From 478828f417caa3572700f7081bcdc83d46deaf19 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Thu, 30 Nov 2023 16:54:17 -0800 Subject: [PATCH 01/44] allow special values outside of the range --- .../os/linux/runtime/linux-runtime-tunables.jsonc | 2 +- mlos_bench/mlos_bench/tests/conftest.py | 2 +- mlos_bench/mlos_bench/tests/tunables/conftest.py | 6 +++--- .../mlos_bench/tests/tunables/tunables_str_test.py | 2 +- mlos_bench/mlos_bench/tunables/tunable.py | 9 ++++----- 5 files changed, 10 insertions(+), 11 deletions(-) diff --git a/mlos_bench/mlos_bench/config/environments/os/linux/runtime/linux-runtime-tunables.jsonc b/mlos_bench/mlos_bench/config/environments/os/linux/runtime/linux-runtime-tunables.jsonc index 5b5f46aa17d..970ddfd745b 100644 --- a/mlos_bench/mlos_bench/config/environments/os/linux/runtime/linux-runtime-tunables.jsonc +++ b/mlos_bench/mlos_bench/config/environments/os/linux/runtime/linux-runtime-tunables.jsonc @@ -7,7 +7,7 @@ "type": "int", "meta": {"name_prefix": "/proc/sys/kernel/"}, "default": 500000, - "range": [-1, 1000000], + "range": [0, 1000000], "special": [-1] }, "sched_latency_ns": { diff --git a/mlos_bench/mlos_bench/tests/conftest.py b/mlos_bench/mlos_bench/tests/conftest.py index e3e29658984..f1de3dd0044 100644 --- a/mlos_bench/mlos_bench/tests/conftest.py +++ b/mlos_bench/mlos_bench/tests/conftest.py @@ -59,7 +59,7 @@ "description": "Cost of migrating the thread to another core", "type": "int", "default": -1, - "range": [-1, 500000], + "range": [0, 500000], "special": [-1] }, "kernel_sched_latency_ns": { diff --git a/mlos_bench/mlos_bench/tests/tunables/conftest.py b/mlos_bench/mlos_bench/tests/tunables/conftest.py index 9dc22fd0f70..95de20d9b84 100644 --- a/mlos_bench/mlos_bench/tests/tunables/conftest.py +++ b/mlos_bench/mlos_bench/tests/tunables/conftest.py @@ -36,7 +36,7 @@ def tunable_categorical() -> Tunable: @pytest.fixture def tunable_int() -> Tunable: """ - A test fixture that produces an interger Tunable object with limited range. + A test fixture that produces an integer Tunable object with limited range. Returns ------- @@ -47,8 +47,8 @@ def tunable_int() -> Tunable: "description": "Cost of migrating the thread to another core", "type": "int", "default": 40000, - "range": [-1, 500000], - "special": [-1] + "range": [0, 500000], + "special": [-1] # Special value outside of the range }) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunables_str_test.py b/mlos_bench/mlos_bench/tests/tunables/tunables_str_test.py index 3482532629c..672b16ab732 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunables_str_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunables_str_test.py @@ -29,7 +29,7 @@ def test_tunable_groups_str(tunable_groups: TunableGroups) -> None: "kernel_sched_migration_cost_ns": { "type": "int", "default": -1, - "range": [-1, 500000], + "range": [0, 500000], "special": [-1] } } diff --git a/mlos_bench/mlos_bench/tunables/tunable.py b/mlos_bench/mlos_bench/tunables/tunable.py index b10a3933a0b..372dcfd3b30 100644 --- a/mlos_bench/mlos_bench/tunables/tunable.py +++ b/mlos_bench/mlos_bench/tunables/tunable.py @@ -32,7 +32,7 @@ class TunableDict(TypedDict, total=False): default: TunableValue values: Optional[List[Optional[str]]] range: Optional[Union[Sequence[int], Sequence[float]]] - special: Optional[Union[List[int], List[str]]] + special: Optional[Union[List[int], List[float]]] meta: Dict[str, Any] @@ -76,7 +76,7 @@ def __init__(self, name: str, config: TunableDict): assert len(config_range) == 2, f"Invalid range: {config_range}" config_range = (config_range[0], config_range[1]) self._range = config_range - self._special = config.get("special") + self._special = set(config.get("special") or []) self._current_value = None self._sanity_check() self.value = self._default @@ -92,7 +92,7 @@ def _sanity_check(self) -> None: raise ValueError("Range must be None for the categorical type") if len(set(self._values)) != len(self._values): raise ValueError("Values must be unique for the categorical type") - if self._special is not None: + if self._special: raise ValueError("Special values must be None for the categorical type") elif self.is_numerical: if self._values is not None: @@ -270,8 +270,7 @@ def is_valid(self, value: TunableValue) -> bool: return value in self._values elif self.is_numerical and self._range: if isinstance(value, (int, float)): - # TODO: allow special values outside of range? - return bool(self._range[0] <= value <= self._range[1]) # or value == self._default + return bool(self._range[0] <= value <= self._range[1]) or value in self._special else: raise ValueError(f"Invalid value type for tunable {self}: {value}={type(value)}") else: From 8de456cd07538dd473092388751d6fdd01b0dc4f Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Thu, 30 Nov 2023 16:59:12 -0800 Subject: [PATCH 02/44] add unit tests to check the assignment of the special value --- .../tests/tunables/tunables_assign_test.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py b/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py index 9ceae74df62..66083112abe 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py @@ -162,3 +162,29 @@ def test_tunable_assign_null_to_float(tunable_float: Tunable) -> None: tunable_float.value = None with pytest.raises(TypeError): tunable_float.numerical_value = None # type: ignore[assignment] + + +def test_tunable_assign_special(tunable_int: Tunable) -> None: + """ + Check the assignment of a special value outside of the range (but declared `special`). + """ + tunable_int.numerical_value = -1 + assert tunable_int.numerical_value == -1 + + +def test_tunable_assign_special_with_coercion(tunable_int: Tunable) -> None: + """ + Check the assignment of a special value outside of the range (but declared `special`). + Check coercion from float to int. + """ + tunable_int.numerical_value = -1.0 + assert tunable_int.numerical_value == -1 + + +def test_tunable_assign_special_with_coercion_str(tunable_int: Tunable) -> None: + """ + Check the assignment of a special value outside of the range (but declared `special`). + Check coercion from string to int. + """ + tunable_int.numerical_value = "-1" + assert tunable_int.numerical_value == -1 From 93ee12271a1a41f53eab2e30f04e31df25107b99 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Thu, 30 Nov 2023 17:01:37 -0800 Subject: [PATCH 03/44] more unit tests for tunable values assignment --- .../mlos_bench/tests/tunables/tunables_assign_test.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py b/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py index 66083112abe..3e5ae07ec55 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py @@ -172,6 +172,14 @@ def test_tunable_assign_special(tunable_int: Tunable) -> None: assert tunable_int.numerical_value == -1 +def test_tunable_assign_special_fail(tunable_int: Tunable) -> None: + """ + Assign a value that is neither special nor in range and fail. + """ + with pytest.raises(ValueError): + tunable_int.numerical_value = -2 + + def test_tunable_assign_special_with_coercion(tunable_int: Tunable) -> None: """ Check the assignment of a special value outside of the range (but declared `special`). From f6ba2eb1929e1c2b0d477a571df66717a4aa48a6 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Thu, 30 Nov 2023 17:04:18 -0800 Subject: [PATCH 04/44] minor type issues fixed --- mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py b/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py index 3e5ae07ec55..ec3e40b8f35 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py @@ -194,5 +194,5 @@ def test_tunable_assign_special_with_coercion_str(tunable_int: Tunable) -> None: Check the assignment of a special value outside of the range (but declared `special`). Check coercion from string to int. """ - tunable_int.numerical_value = "-1" + tunable_int.value = "-1" assert tunable_int.numerical_value == -1 From 9a775148b5a2413401a24ba796ca3da03d6e3afd Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Thu, 30 Nov 2023 17:22:34 -0800 Subject: [PATCH 05/44] making ConfigSpace tests break --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 4 +++- .../mlos_bench/tests/tunables/tunable_to_configspace_test.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 03bb2c30727..46bc6e377b5 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -98,4 +98,6 @@ def tunable_values_to_configuration(tunables: TunableGroups) -> Configuration: A ConfigSpace Configuration. """ configspace = tunable_groups_to_configspace(tunables) - return Configuration(configspace, values={tunable.name: tunable.value for (tunable, _group) in tunables}) + return Configuration(configspace, values={ + tunable.name: tunable.value for (tunable, _group) in tunables + }) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index 3b7085f5ff9..82e205d2cd8 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -36,7 +36,7 @@ def configuration_space() -> ConfigurationSpace: spaces = ConfigurationSpace(space={ "vmSize": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"], "idle": ["halt", "mwait", "noidle"], - "kernel_sched_migration_cost_ns": (-1, 500000), + "kernel_sched_migration_cost_ns": (0, 500000), "kernel_sched_latency_ns": (0, 1000000000), }) From 6513119291be7dc79b85a50fd72e40360c29e4e3 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 4 Dec 2023 16:50:53 -0800 Subject: [PATCH 06/44] create special ConfigSpace hyperparameters for tunables with special values; implement TunableGroups to ConfigSpace conversion. TODO: convert from ConfigSpace back to Tunablegroups --- .../optimizers/convert_configspace.py | 103 ++++++++++++------ mlos_bench/mlos_bench/tunables/tunable.py | 29 ++++- 2 files changed, 99 insertions(+), 33 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 46bc6e377b5..b0c4956d8c9 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -8,24 +8,27 @@ import logging -from typing import Optional - -from ConfigSpace.hyperparameters import Hyperparameter -from ConfigSpace import UniformIntegerHyperparameter -from ConfigSpace import UniformFloatHyperparameter -from ConfigSpace import CategoricalHyperparameter -from ConfigSpace import ConfigurationSpace, Configuration - -from mlos_bench.tunables.tunable import Tunable +from typing import Dict, Optional + +from ConfigSpace import ( + Configuration, + ConfigurationSpace, + CategoricalHyperparameter, + UniformIntegerHyperparameter, + UniformFloatHyperparameter, + EqualsCondition, +) +from mlos_bench.tunables.tunable import Tunable, TunableValue from mlos_bench.tunables.tunable_groups import TunableGroups _LOG = logging.getLogger(__name__) -def _tunable_to_hyperparameter( - tunable: Tunable, group_name: Optional[str] = None, cost: int = 0) -> Hyperparameter: +def _tunable_to_configspace( + tunable: Tunable, group_name: Optional[str] = None, cost: int = 0) -> ConfigurationSpace: """ - Convert a single Tunable to an equivalent ConfigSpace Hyperparameter object. + Convert a single Tunable to an equivalent ConfigSpace Hyperparameter objects, + wrapped in a ConfigurationSpace for composability. Parameters ---------- @@ -38,25 +41,53 @@ def _tunable_to_hyperparameter( Returns ------- - hyperparameter : Hyperparameter - A ConfigSpace Hyperparameter object that corresponds to the Tunable. + cs : ConfigurationSpace + A ConfigurationSpace object that corresponds to the Tunable. """ meta = {"group": group_name, "cost": cost} # {"lower": "", "upper": "", "scaling": ""} + if tunable.type == "categorical": - return CategoricalHyperparameter( - tunable.name, choices=tunable.categories, - default_value=tunable.default, meta=meta) - elif tunable.type == "int": - return UniformIntegerHyperparameter( - tunable.name, lower=tunable.range[0], upper=tunable.range[1], - default_value=tunable.default, meta=meta) + return ConfigurationSpace({ + tunable.name: CategoricalHyperparameter( + name=tunable.name, choices=tunable.categories, + default_value=tunable.default, meta=meta) + }) + + if tunable.type == "int": + hp_type = UniformIntegerHyperparameter elif tunable.type == "float": - return UniformFloatHyperparameter( - tunable.name, lower=tunable.range[0], upper=tunable.range[1], - default_value=tunable.default, meta=meta) + hp_type = UniformFloatHyperparameter else: raise TypeError(f"Undefined Parameter Type: {tunable.type}") + if not tunable.special: + return ConfigurationSpace({ + tunable.name: hp_type( + name=tunable.name, lower=tunable.range[0], upper=tunable.range[1], + default_value=tunable.default if tunable.in_range(tunable.default) else None, + meta=meta) + }) + + cs = ConfigurationSpace( + name=tunable.name, + space={ + "range": hp_type( + name="range", lower=tunable.range[0], upper=tunable.range[1], + default_value=tunable.default if tunable.in_range(tunable.default) else None, + meta=meta), + "special": CategoricalHyperparameter( + name="special", choices=tunable.special, + default_value=tunable.default if tunable.default in tunable.special else None, + meta=meta), + "type": CategoricalHyperparameter( + name="type", choices=["special", "range"], default_value="special", + weights=[0.1, 0.9]), # TODO: make weights configurable + } + ) + cs.add_condition(EqualsCondition(cs["special"], cs["type"], "special")) + cs.add_condition(EqualsCondition(cs["range"], cs["type"], "range")) + return cs + def tunable_groups_to_configspace(tunables: TunableGroups, seed: Optional[int] = None) -> ConfigurationSpace: """ @@ -76,10 +107,11 @@ def tunable_groups_to_configspace(tunables: TunableGroups, seed: Optional[int] = A new ConfigurationSpace instance that corresponds to the input TunableGroups. """ space = ConfigurationSpace(seed=seed) - space.add_hyperparameters([ - _tunable_to_hyperparameter(tunable, group.name, group.get_current_cost()) - for (tunable, group) in tunables - ]) + for (tunable, group) in tunables: + space.add_configuration_space( + prefix="", + configuration_space=_tunable_to_configspace( + tunable, group.name, group.get_current_cost())) return space @@ -97,7 +129,16 @@ def tunable_values_to_configuration(tunables: TunableGroups) -> Configuration: Configuration A ConfigSpace Configuration. """ + values: Dict[str, TunableValue] = {} + for (tunable, _group) in tunables: + if tunable.special: + if tunable.value in tunable.special: + values[f"{tunable.name}:type"] = "special" + values[f"{tunable.name}:special"] = tunable.value + else: + values[f"{tunable.name}:type"] = "range" + values[f"{tunable.name}:range"] = tunable.value + else: + values[tunable.name] = tunable.value configspace = tunable_groups_to_configspace(tunables) - return Configuration(configspace, values={ - tunable.name: tunable.value for (tunable, _group) in tunables - }) + return Configuration(configspace, values=values) diff --git a/mlos_bench/mlos_bench/tunables/tunable.py b/mlos_bench/mlos_bench/tunables/tunable.py index 372dcfd3b30..033596a5cd2 100644 --- a/mlos_bench/mlos_bench/tunables/tunable.py +++ b/mlos_bench/mlos_bench/tunables/tunable.py @@ -76,7 +76,7 @@ def __init__(self, name: str, config: TunableDict): assert len(config_range) == 2, f"Invalid range: {config_range}" config_range = (config_range[0], config_range[1]) self._range = config_range - self._special = set(config.get("special") or []) + self._special = config.get("special") or [] self._current_value = None self._sanity_check() self.value = self._default @@ -270,12 +270,25 @@ def is_valid(self, value: TunableValue) -> bool: return value in self._values elif self.is_numerical and self._range: if isinstance(value, (int, float)): - return bool(self._range[0] <= value <= self._range[1]) or value in self._special + return self.in_range(value) or value in self._special else: raise ValueError(f"Invalid value type for tunable {self}: {value}={type(value)}") else: raise ValueError(f"Invalid parameter type: {self._type}") + def in_range(self, value: Union[int, float, str, None]) -> bool: + """ + Check if the value is within the range of the tunable. + Do *NOT* check for special values. + Return False if the tunable or value is categorical or None. + """ + return ( + isinstance(value, (float, int)) and + self.is_numerical and + self._range is not None and + bool(self._range[0] <= value <= self._range[1]) + ) + @property def category(self) -> Optional[str]: """ @@ -328,6 +341,18 @@ def name(self) -> str: """ return self._name + @property + def special(self) -> Union[List[int], List[float]]: + """ + Get the special values of the tunable. Return an empty list if there are none. + + Returns + ------- + special : [int] | [float] + A list of special values of the tunable. Can be empty. + """ + return self._special + @property def type(self) -> str: """ From ccfab5345019abcc60a2468055b0b85f8769eb76 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 4 Dec 2023 17:12:58 -0800 Subject: [PATCH 07/44] make configspace unit tests typecheck --- .../tunables/tunable_to_configspace_test.py | 29 ++++++++++--------- mlos_bench/mlos_bench/tunables/tunable.py | 2 +- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index 82e205d2cd8..003a1d7f8a3 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -8,16 +8,19 @@ import pytest -from ConfigSpace import UniformIntegerHyperparameter -from ConfigSpace import UniformFloatHyperparameter -from ConfigSpace import CategoricalHyperparameter -from ConfigSpace import ConfigurationSpace - +from ConfigSpace import ( + ConfigurationSpace, + CategoricalHyperparameter, + UniformIntegerHyperparameter, + UniformFloatHyperparameter, +) from mlos_bench.tunables.tunable import Tunable from mlos_bench.tunables.tunable_groups import TunableGroups -from mlos_bench.optimizers.convert_configspace import _tunable_to_hyperparameter -from mlos_bench.optimizers.convert_configspace import tunable_groups_to_configspace +from mlos_bench.optimizers.convert_configspace import ( + _tunable_to_configspace, + tunable_groups_to_configspace, +) # pylint: disable=redefined-outer-name @@ -78,27 +81,27 @@ def _cmp_tunable_hyperparameter_float( assert cs_param.default_value == tunable.value -def test_tunable_to_hyperparameter_categorical(tunable_categorical: Tunable) -> None: +def test_tunable_to_configspace_categorical(tunable_categorical: Tunable) -> None: """ Check the conversion of Tunable to CategoricalHyperparameter. """ - cs_param = _tunable_to_hyperparameter(tunable_categorical) + cs_param = _tunable_to_configspace(tunable_categorical)[tunable_categorical.name] _cmp_tunable_hyperparameter_categorical(tunable_categorical, cs_param) -def test_tunable_to_hyperparameter_int(tunable_int: Tunable) -> None: +def test_tunable_to_configspace_int(tunable_int: Tunable) -> None: """ Check the conversion of Tunable to UniformIntegerHyperparameter. """ - cs_param = _tunable_to_hyperparameter(tunable_int) + cs_param = _tunable_to_configspace(tunable_int)[tunable_int.name] _cmp_tunable_hyperparameter_int(tunable_int, cs_param) -def test_tunable_to_hyperparameter_float(tunable_float: Tunable) -> None: +def test_tunable_to_configspace_float(tunable_float: Tunable) -> None: """ Check the conversion of Tunable to UniformFloatHyperparameter. """ - cs_param = _tunable_to_hyperparameter(tunable_float) + cs_param = _tunable_to_configspace(tunable_float)[tunable_float.name] _cmp_tunable_hyperparameter_float(tunable_float, cs_param) diff --git a/mlos_bench/mlos_bench/tunables/tunable.py b/mlos_bench/mlos_bench/tunables/tunable.py index 033596a5cd2..87b435e7833 100644 --- a/mlos_bench/mlos_bench/tunables/tunable.py +++ b/mlos_bench/mlos_bench/tunables/tunable.py @@ -76,7 +76,7 @@ def __init__(self, name: str, config: TunableDict): assert len(config_range) == 2, f"Invalid range: {config_range}" config_range = (config_range[0], config_range[1]) self._range = config_range - self._special = config.get("special") or [] + self._special: Union[List[int], List[float]] = config.get("special") or [] self._current_value = None self._sanity_check() self.value = self._default From d7de0828fd4876146d64e1736fb9951a60f0d99c Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 4 Dec 2023 19:03:01 -0800 Subject: [PATCH 08/44] working on TunableGroups to ConfigurationSpace conversion unit tests --- .../optimizers/convert_configspace.py | 52 +++++++------- .../tunables/tunable_to_configspace_test.py | 72 ++++++++----------- 2 files changed, 58 insertions(+), 66 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index b0c4956d8c9..483410743c5 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -44,7 +44,7 @@ def _tunable_to_configspace( cs : ConfigurationSpace A ConfigurationSpace object that corresponds to the Tunable. """ - meta = {"group": group_name, "cost": cost} # {"lower": "", "upper": "", "scaling": ""} + meta = {"group": group_name, "cost": cost} # {"scaling": ""} if tunable.type == "categorical": return ConfigurationSpace({ @@ -68,24 +68,28 @@ def _tunable_to_configspace( meta=meta) }) - cs = ConfigurationSpace( - name=tunable.name, - space={ - "range": hp_type( - name="range", lower=tunable.range[0], upper=tunable.range[1], - default_value=tunable.default if tunable.in_range(tunable.default) else None, - meta=meta), - "special": CategoricalHyperparameter( - name="special", choices=tunable.special, - default_value=tunable.default if tunable.default in tunable.special else None, - meta=meta), - "type": CategoricalHyperparameter( - name="type", choices=["special", "range"], default_value="special", - weights=[0.1, 0.9]), # TODO: make weights configurable - } - ) - cs.add_condition(EqualsCondition(cs["special"], cs["type"], "special")) - cs.add_condition(EqualsCondition(cs["range"], cs["type"], "range")) + cs = ConfigurationSpace({ + "range": hp_type( + name=tunable.name + ":range", + lower=tunable.range[0], upper=tunable.range[1], + default_value=tunable.default if tunable.in_range(tunable.default) else None, + meta=meta), + "special": CategoricalHyperparameter( + name=tunable.name + ":special", + choices=tunable.special, + default_value=tunable.default if tunable.default in tunable.special else None, + meta=meta), + "type": CategoricalHyperparameter( + name=tunable.name + ":type", + choices=["special", "range"], default_value="special", + weights=[0.1, 0.9]), # TODO: Make weights configurable + }) + + cs.add_condition(EqualsCondition( + cs[tunable.name + ":special"], cs[tunable.name + ":type"], "special")) + cs.add_condition(EqualsCondition( + cs[tunable.name + ":range"], cs[tunable.name + ":type"], "range")) + return cs @@ -109,7 +113,7 @@ def tunable_groups_to_configspace(tunables: TunableGroups, seed: Optional[int] = space = ConfigurationSpace(seed=seed) for (tunable, group) in tunables: space.add_configuration_space( - prefix="", + prefix="", delimiter="", configuration_space=_tunable_to_configspace( tunable, group.name, group.get_current_cost())) return space @@ -133,11 +137,11 @@ def tunable_values_to_configuration(tunables: TunableGroups) -> Configuration: for (tunable, _group) in tunables: if tunable.special: if tunable.value in tunable.special: - values[f"{tunable.name}:type"] = "special" - values[f"{tunable.name}:special"] = tunable.value + values[tunable.name + ":type"] = "special" + values[tunable.name + ":special"] = tunable.value else: - values[f"{tunable.name}:type"] = "range" - values[f"{tunable.name}:range"] = tunable.value + values[tunable.name + ":type"] = "range" + values[tunable.name + ":range"] = tunable.value else: values[tunable.name] = tunable.value configspace = tunable_groups_to_configspace(tunables) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index 003a1d7f8a3..75872d596b9 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -8,19 +8,13 @@ import pytest -from ConfigSpace import ( - ConfigurationSpace, - CategoricalHyperparameter, - UniformIntegerHyperparameter, - UniformFloatHyperparameter, -) +from ConfigSpace import ConfigurationSpace, CategoricalHyperparameter + from mlos_bench.tunables.tunable import Tunable from mlos_bench.tunables.tunable_groups import TunableGroups -from mlos_bench.optimizers.convert_configspace import ( - _tunable_to_configspace, - tunable_groups_to_configspace, -) +from mlos_bench.optimizers.convert_configspace import _tunable_to_configspace +from mlos_bench.optimizers.convert_configspace import tunable_groups_to_configspace # pylint: disable=redefined-outer-name @@ -39,53 +33,49 @@ def configuration_space() -> ConfigurationSpace: spaces = ConfigurationSpace(space={ "vmSize": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"], "idle": ["halt", "mwait", "noidle"], - "kernel_sched_migration_cost_ns": (0, 500000), + "kernel_sched_migration_cost_ns:range": (0, 500000), + "kernel_sched_migration_cost_ns:special": [-1], + "kernel_sched_migration_cost_ns:type": ["special", "range"], "kernel_sched_latency_ns": (0, 1000000000), }) spaces["vmSize"].default_value = "Standard_B4ms" spaces["idle"].default_value = "halt" - spaces["kernel_sched_migration_cost_ns"].default_value = -1 + spaces["kernel_sched_migration_cost_ns:range"].default_value = 250000 + spaces["kernel_sched_migration_cost_ns:special"].default_value = -1 + spaces["kernel_sched_migration_cost_ns:type"].default_value = "special" spaces["kernel_sched_latency_ns"].default_value = 2000000 return spaces def _cmp_tunable_hyperparameter_categorical( - tunable: Tunable, cs_param: CategoricalHyperparameter) -> None: + tunable: Tunable, space: ConfigurationSpace) -> None: """ Check if categorical Tunable and ConfigSpace Hyperparameter actually match. """ - assert isinstance(cs_param, CategoricalHyperparameter) - assert set(cs_param.choices) == set(tunable.categories) - assert cs_param.default_value == tunable.value + param = space[tunable.name] + assert isinstance(param, CategoricalHyperparameter) + assert set(param.choices) == set(tunable.categories) + assert param.default_value == tunable.value -def _cmp_tunable_hyperparameter_int( - tunable: Tunable, cs_param: UniformIntegerHyperparameter) -> None: +def _cmp_tunable_hyperparameter_numerical( + tunable: Tunable, space: ConfigurationSpace) -> None: """ Check if integer Tunable and ConfigSpace Hyperparameter actually match. """ - assert isinstance(cs_param, UniformIntegerHyperparameter) - assert (cs_param.lower, cs_param.upper) == tuple(tunable.range) - assert cs_param.default_value == tunable.value - - -def _cmp_tunable_hyperparameter_float( - tunable: Tunable, cs_param: UniformFloatHyperparameter) -> None: - """ - Check if float Tunable and ConfigSpace Hyperparameter actually match. - """ - assert isinstance(cs_param, UniformFloatHyperparameter) - assert (cs_param.lower, cs_param.upper) == tuple(tunable.range) - assert cs_param.default_value == tunable.value + param = space[tunable.name + (":range" if tunable.special else "")] + assert (param.lower, param.upper) == tuple(tunable.range) + if tunable.in_range(tunable.value): + assert param.default_value == tunable.value def test_tunable_to_configspace_categorical(tunable_categorical: Tunable) -> None: """ Check the conversion of Tunable to CategoricalHyperparameter. """ - cs_param = _tunable_to_configspace(tunable_categorical)[tunable_categorical.name] + cs_param = _tunable_to_configspace(tunable_categorical) _cmp_tunable_hyperparameter_categorical(tunable_categorical, cs_param) @@ -93,22 +83,22 @@ def test_tunable_to_configspace_int(tunable_int: Tunable) -> None: """ Check the conversion of Tunable to UniformIntegerHyperparameter. """ - cs_param = _tunable_to_configspace(tunable_int)[tunable_int.name] - _cmp_tunable_hyperparameter_int(tunable_int, cs_param) + cs_param = _tunable_to_configspace(tunable_int) + _cmp_tunable_hyperparameter_numerical(tunable_int, cs_param) def test_tunable_to_configspace_float(tunable_float: Tunable) -> None: """ Check the conversion of Tunable to UniformFloatHyperparameter. """ - cs_param = _tunable_to_configspace(tunable_float)[tunable_float.name] - _cmp_tunable_hyperparameter_float(tunable_float, cs_param) + cs_param = _tunable_to_configspace(tunable_float) + _cmp_tunable_hyperparameter_numerical(tunable_float, cs_param) _CMP_FUNC = { - "int": _cmp_tunable_hyperparameter_int, - "float": _cmp_tunable_hyperparameter_float, - "categorical": _cmp_tunable_hyperparameter_categorical + "int": _cmp_tunable_hyperparameter_numerical, + "float": _cmp_tunable_hyperparameter_numerical, + "categorical": _cmp_tunable_hyperparameter_categorical, } @@ -119,9 +109,7 @@ def test_tunable_groups_to_hyperparameters(tunable_groups: TunableGroups) -> Non """ space = tunable_groups_to_configspace(tunable_groups) for (tunable, _group) in tunable_groups: - cs_param = space[tunable.name] - assert cs_param.default_value == tunable.value - _CMP_FUNC[tunable.type](tunable, cs_param) + _CMP_FUNC[tunable.type](tunable, space) def test_tunable_groups_to_configspace( From 465a0f52483b0048ed6c543ed199216a6070d3c8 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 4 Dec 2023 19:10:00 -0800 Subject: [PATCH 09/44] make tg to cs tests pass --- .../tests/tunables/tunable_to_configspace_test.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index 75872d596b9..911d5f484db 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -8,7 +8,7 @@ import pytest -from ConfigSpace import ConfigurationSpace, CategoricalHyperparameter +from ConfigSpace import ConfigurationSpace, CategoricalHyperparameter, EqualsCondition from mlos_bench.tunables.tunable import Tunable from mlos_bench.tunables.tunable_groups import TunableGroups @@ -44,8 +44,16 @@ def configuration_space() -> ConfigurationSpace: spaces["kernel_sched_migration_cost_ns:range"].default_value = 250000 spaces["kernel_sched_migration_cost_ns:special"].default_value = -1 spaces["kernel_sched_migration_cost_ns:type"].default_value = "special" + spaces["kernel_sched_migration_cost_ns:type"].probabilities = (0.1, 0.9) spaces["kernel_sched_latency_ns"].default_value = 2000000 + spaces.add_condition(EqualsCondition( + spaces["kernel_sched_migration_cost_ns:special"], + spaces["kernel_sched_migration_cost_ns:type"], "special")) + spaces.add_condition(EqualsCondition( + spaces["kernel_sched_migration_cost_ns:range"], + spaces["kernel_sched_migration_cost_ns:type"], "range")) + return spaces From 7771eb3f89a8f13b33ca267856d541da307bf788 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Wed, 6 Dec 2023 17:11:03 -0800 Subject: [PATCH 10/44] all unit tests pass --- .../optimizers/convert_configspace.py | 20 ++++++----- .../tunables/tunable_to_configspace_test.py | 33 +++++++++++-------- 2 files changed, 31 insertions(+), 22 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 483410743c5..3c0458c591a 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -68,27 +68,29 @@ def _tunable_to_configspace( meta=meta) }) + # Create three hyperparameters: one for regular values, + # one for special values, and one to choose between the two. cs = ConfigurationSpace({ "range": hp_type( - name=tunable.name + ":range", + name=tunable.name, lower=tunable.range[0], upper=tunable.range[1], default_value=tunable.default if tunable.in_range(tunable.default) else None, meta=meta), "special": CategoricalHyperparameter( - name=tunable.name + ":special", + name="special:" + tunable.name, choices=tunable.special, default_value=tunable.default if tunable.default in tunable.special else None, meta=meta), "type": CategoricalHyperparameter( - name=tunable.name + ":type", + name="__type:" + tunable.name, choices=["special", "range"], default_value="special", weights=[0.1, 0.9]), # TODO: Make weights configurable }) cs.add_condition(EqualsCondition( - cs[tunable.name + ":special"], cs[tunable.name + ":type"], "special")) + cs["special:" + tunable.name], cs["__type:" + tunable.name], "special")) cs.add_condition(EqualsCondition( - cs[tunable.name + ":range"], cs[tunable.name + ":type"], "range")) + cs[tunable.name], cs["__type:" + tunable.name], "range")) return cs @@ -137,11 +139,11 @@ def tunable_values_to_configuration(tunables: TunableGroups) -> Configuration: for (tunable, _group) in tunables: if tunable.special: if tunable.value in tunable.special: - values[tunable.name + ":type"] = "special" - values[tunable.name + ":special"] = tunable.value + values["__type:" + tunable.name] = "special" + values["special:" + tunable.name] = tunable.value else: - values[tunable.name + ":type"] = "range" - values[tunable.name + ":range"] = tunable.value + values["__type:" + tunable.name] = "range" + values[tunable.name] = tunable.value else: values[tunable.name] = tunable.value configspace = tunable_groups_to_configspace(tunables) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index 911d5f484db..b7ba6822e32 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -8,7 +8,13 @@ import pytest -from ConfigSpace import ConfigurationSpace, CategoricalHyperparameter, EqualsCondition +from ConfigSpace import ( + ConfigurationSpace, + CategoricalHyperparameter, + UniformIntegerHyperparameter, + UniformFloatHyperparameter, + EqualsCondition, +) from mlos_bench.tunables.tunable import Tunable from mlos_bench.tunables.tunable_groups import TunableGroups @@ -33,26 +39,26 @@ def configuration_space() -> ConfigurationSpace: spaces = ConfigurationSpace(space={ "vmSize": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"], "idle": ["halt", "mwait", "noidle"], - "kernel_sched_migration_cost_ns:range": (0, 500000), - "kernel_sched_migration_cost_ns:special": [-1], - "kernel_sched_migration_cost_ns:type": ["special", "range"], + "kernel_sched_migration_cost_ns": (0, 500000), + "special:kernel_sched_migration_cost_ns": [-1], + "__type:kernel_sched_migration_cost_ns": ["special", "range"], "kernel_sched_latency_ns": (0, 1000000000), }) spaces["vmSize"].default_value = "Standard_B4ms" spaces["idle"].default_value = "halt" - spaces["kernel_sched_migration_cost_ns:range"].default_value = 250000 - spaces["kernel_sched_migration_cost_ns:special"].default_value = -1 - spaces["kernel_sched_migration_cost_ns:type"].default_value = "special" - spaces["kernel_sched_migration_cost_ns:type"].probabilities = (0.1, 0.9) + spaces["kernel_sched_migration_cost_ns"].default_value = 250000 + spaces["special:kernel_sched_migration_cost_ns"].default_value = -1 + spaces["__type:kernel_sched_migration_cost_ns"].default_value = "special" + spaces["__type:kernel_sched_migration_cost_ns"].probabilities = (0.1, 0.9) spaces["kernel_sched_latency_ns"].default_value = 2000000 spaces.add_condition(EqualsCondition( - spaces["kernel_sched_migration_cost_ns:special"], - spaces["kernel_sched_migration_cost_ns:type"], "special")) + spaces["special:kernel_sched_migration_cost_ns"], + spaces["__type:kernel_sched_migration_cost_ns"], "special")) spaces.add_condition(EqualsCondition( - spaces["kernel_sched_migration_cost_ns:range"], - spaces["kernel_sched_migration_cost_ns:type"], "range")) + spaces["kernel_sched_migration_cost_ns"], + spaces["__type:kernel_sched_migration_cost_ns"], "range")) return spaces @@ -73,7 +79,8 @@ def _cmp_tunable_hyperparameter_numerical( """ Check if integer Tunable and ConfigSpace Hyperparameter actually match. """ - param = space[tunable.name + (":range" if tunable.special else "")] + param = space[tunable.name] + assert isinstance(param, (UniformIntegerHyperparameter, UniformFloatHyperparameter)) assert (param.lower, param.upper) == tuple(tunable.range) if tunable.in_range(tunable.value): assert param.default_value == tunable.value From f69ec0a709efbbccd6b7775b1d95a5d292d13975 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Thu, 7 Dec 2023 11:48:22 -0800 Subject: [PATCH 11/44] make a bit more complex setup for special values of the tunables --- mlos_bench/mlos_bench/tests/conftest.py | 2 +- .../mlos_bench/tests/tunables/tunable_to_configspace_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mlos_bench/mlos_bench/tests/conftest.py b/mlos_bench/mlos_bench/tests/conftest.py index f1de3dd0044..9f646ca8f92 100644 --- a/mlos_bench/mlos_bench/tests/conftest.py +++ b/mlos_bench/mlos_bench/tests/conftest.py @@ -60,7 +60,7 @@ "type": "int", "default": -1, "range": [0, 500000], - "special": [-1] + "special": [-1, 0] }, "kernel_sched_latency_ns": { "description": "Initial value for the scheduler period", diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index b7ba6822e32..6b793e623c7 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -40,7 +40,7 @@ def configuration_space() -> ConfigurationSpace: "vmSize": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"], "idle": ["halt", "mwait", "noidle"], "kernel_sched_migration_cost_ns": (0, 500000), - "special:kernel_sched_migration_cost_ns": [-1], + "special:kernel_sched_migration_cost_ns": [-1, 0], "__type:kernel_sched_migration_cost_ns": ["special", "range"], "kernel_sched_latency_ns": (0, 1000000000), }) From 21360116ce6f01d57003f3eab60c63939e924fa9 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Thu, 7 Dec 2023 14:38:20 -0800 Subject: [PATCH 12/44] fix a few more unit tests --- mlos_bench/mlos_bench/tests/optimizers/mock_opt_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mlos_bench/mlos_bench/tests/optimizers/mock_opt_test.py b/mlos_bench/mlos_bench/tests/optimizers/mock_opt_test.py index 5f186d4596f..0edd8ba81c1 100644 --- a/mlos_bench/mlos_bench/tests/optimizers/mock_opt_test.py +++ b/mlos_bench/mlos_bench/tests/optimizers/mock_opt_test.py @@ -23,19 +23,19 @@ def mock_configurations_no_defaults() -> list: ({ "vmSize": "Standard_B4ms", "idle": "halt", - "kernel_sched_migration_cost_ns": 13111, + "kernel_sched_migration_cost_ns": 13112, "kernel_sched_latency_ns": 796233790, }, 88.88), ({ "vmSize": "Standard_B2ms", "idle": "halt", - "kernel_sched_migration_cost_ns": 117025, + "kernel_sched_migration_cost_ns": 117026, "kernel_sched_latency_ns": 149827706, }, 66.66), ({ "vmSize": "Standard_B4ms", "idle": "halt", - "kernel_sched_migration_cost_ns": 354784, + "kernel_sched_migration_cost_ns": 354785, "kernel_sched_latency_ns": 795285932, }, 99.99), ] From 950f95600fafbb59dee6ec5e655f0e3593e6e25d Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Thu, 7 Dec 2023 18:07:01 -0800 Subject: [PATCH 13/44] haqndling tunables with special values in the optimizer --- .../optimizers/convert_configspace.py | 8 ++++++ .../optimizers/mlos_core_optimizer.py | 25 +++++++++++++------ 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 3c0458c591a..a4de12a63ce 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -148,3 +148,11 @@ def tunable_values_to_configuration(tunables: TunableGroups) -> Configuration: values[tunable.name] = tunable.value configspace = tunable_groups_to_configspace(tunables) return Configuration(configspace, values=values) + + +def configspace_data_to_tunable_values(data: dict) -> dict: + """ + Remove the fields that correspond to special values in ConfigSpace. + In particular, remove `__type:*` keys and trim `special:` prefixes. + """ + return {k.split(":", 1)[0]: v for (k, v) in data.items() if not k.startswith("__")} diff --git a/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py b/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py index 93128aac1df..9bc04023aad 100644 --- a/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py +++ b/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py @@ -20,10 +20,12 @@ ) from mlos_bench.environments.status import Status +from mlos_bench.services.base_service import Service from mlos_bench.tunables.tunable_groups import TunableGroups from mlos_bench.optimizers.base_optimizer import Optimizer -from mlos_bench.optimizers.convert_configspace import tunable_groups_to_configspace -from mlos_bench.services.base_service import Service +from mlos_bench.optimizers.convert_configspace import ( + tunable_groups_to_configspace, configspace_data_to_tunable_values +) _LOG = logging.getLogger(__name__) @@ -103,9 +105,6 @@ def bulk_register(self, configs: Sequence[dict], scores: Sequence[Optional[float df_status_completed = df_status.apply(Status.is_completed) df_configs = df_configs[df_status_completed] df_scores = df_scores[df_status_completed] - # External data can have incorrect types (e.g., all strings). - for (tunable, _group) in self._tunables: - df_configs[tunable.name] = df_configs[tunable.name].astype(tunable.dtype) self._opt.register(df_configs, df_scores) if _LOG.isEnabledFor(logging.DEBUG): (score, _) = self.get_best_observation() @@ -135,6 +134,16 @@ def _to_df(self, configs: Sequence[dict]) -> pd.DataFrame: df_configs[tunable.name] = tunable.default else: df_configs[tunable.name].fillna(tunable.default, inplace=True) + # External data can have incorrect types (e.g., all strings). + df_configs[tunable.name] = df_configs[tunable.name].astype(tunable.dtype) + # Add columns for tunables with special values. + if tunable.special: + is_special = df_configs[tunable.name].apply(tunable.special.__contains__) + df_configs["__type:" + tunable.name] = "range" + df_configs.loc[is_special, "__type:" + tunable.name] = "special" + df_configs["special:" + tunable.name] = df_configs[tunable.name] + df_configs.loc[~is_special, "special:" + tunable.name] = None + df_configs.loc[is_special, tunable.name] = None # By default, hyperparameters in ConfigurationSpace are sorted by name: df_configs = df_configs[sorted(tunables_names)] _LOG.debug("Loaded configs:\n%s", df_configs) @@ -146,7 +155,8 @@ def suggest(self) -> TunableGroups: df_config = self._opt.suggest(defaults=self._start_with_defaults) self._start_with_defaults = False _LOG.info("Iteration %d :: Suggest:\n%s", self._iter, df_config) - return self._tunables.copy().assign(df_config.loc[0].to_dict()) + return self._tunables.copy().assign( + configspace_data_to_tunable_values(df_config.loc[0].to_dict())) def register(self, tunables: TunableGroups, status: Status, score: Optional[Union[float, dict]] = None) -> Optional[float]: @@ -154,6 +164,7 @@ def register(self, tunables: TunableGroups, status: Status, if status.is_completed(): # By default, hyperparameters in ConfigurationSpace are sorted by name: df_config = pd.DataFrame(dict(sorted(tunables.get_param_values().items())), index=[0]) + # TODO: add special columns here!!! _LOG.debug("Score: %s Dataframe:\n%s", score, df_config) self._opt.register(df_config, pd.Series([score], dtype=float)) self._iter += 1 @@ -163,7 +174,7 @@ def get_best_observation(self) -> Union[Tuple[float, TunableGroups], Tuple[None, df_config = self._opt.get_best_observation() if len(df_config) == 0: return (None, None) - params = df_config.iloc[0].to_dict() + params = configspace_data_to_tunable_values(df_config.iloc[0].to_dict()) _LOG.debug("Best observation: %s", params) score = params.pop("score") * self._opt_sign # mlos_core always uses the `score` column return (score, self._tunables.copy().assign(params)) From 6186828423fadd75749e0b122443c6999a4e8f23 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Fri, 8 Dec 2023 17:19:38 -0800 Subject: [PATCH 14/44] fixed df generation; mlos_core does not handle the new data well --- .../optimizers/convert_configspace.py | 93 +++++++++++++++---- .../optimizers/mlos_core_optimizer.py | 24 ++--- .../optimizers/opt_bulk_register_test.py | 2 +- .../tunables/tunable_to_configspace_test.py | 26 +++--- .../bayesian_optimizers/smac_optimizer.py | 1 + 5 files changed, 102 insertions(+), 44 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index a4de12a63ce..456d05fe259 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -8,7 +8,7 @@ import logging -from typing import Dict, Optional +from typing import Dict, Optional, Tuple from ConfigSpace import ( Configuration, @@ -70,27 +70,22 @@ def _tunable_to_configspace( # Create three hyperparameters: one for regular values, # one for special values, and one to choose between the two. + (special_name, type_name) = special_param_names(tunable.name) cs = ConfigurationSpace({ - "range": hp_type( - name=tunable.name, - lower=tunable.range[0], upper=tunable.range[1], + tunable.name: hp_type( + name=tunable.name, lower=tunable.range[0], upper=tunable.range[1], default_value=tunable.default if tunable.in_range(tunable.default) else None, meta=meta), - "special": CategoricalHyperparameter( - name="special:" + tunable.name, - choices=tunable.special, + special_name: CategoricalHyperparameter( + name=special_name, choices=tunable.special, default_value=tunable.default if tunable.default in tunable.special else None, meta=meta), - "type": CategoricalHyperparameter( - name="__type:" + tunable.name, - choices=["special", "range"], default_value="special", + type_name: CategoricalHyperparameter( + name=type_name, choices=["special", "range"], default_value="special", weights=[0.1, 0.9]), # TODO: Make weights configurable }) - - cs.add_condition(EqualsCondition( - cs["special:" + tunable.name], cs["__type:" + tunable.name], "special")) - cs.add_condition(EqualsCondition( - cs[tunable.name], cs["__type:" + tunable.name], "range")) + cs.add_condition(EqualsCondition(cs[special_name], cs[type_name], "special")) + cs.add_condition(EqualsCondition(cs[tunable.name], cs[type_name], "range")) return cs @@ -138,11 +133,12 @@ def tunable_values_to_configuration(tunables: TunableGroups) -> Configuration: values: Dict[str, TunableValue] = {} for (tunable, _group) in tunables: if tunable.special: + (special_name, type_name) = special_param_names(tunable.name) if tunable.value in tunable.special: - values["__type:" + tunable.name] = "special" - values["special:" + tunable.name] = tunable.value + values[type_name] = "special" + values[special_name] = tunable.value else: - values["__type:" + tunable.name] = "range" + values[type_name] = "range" values[tunable.name] = tunable.value else: values[tunable.name] = tunable.value @@ -153,6 +149,63 @@ def tunable_values_to_configuration(tunables: TunableGroups) -> Configuration: def configspace_data_to_tunable_values(data: dict) -> dict: """ Remove the fields that correspond to special values in ConfigSpace. - In particular, remove `__type:*` keys and trim `special:` prefixes. + In particular, remove `!type__` keys and trim `!special` suffixes. + """ + return { + special_param_name_strip(k): v + for (k, v) in data.items() if not special_param_name_is_temp(k) + } + + +def special_param_names(name: str) -> Tuple[str, str]: + """ + Generate the names of the auxiliary hyperparameters that correspond + to a tunable that can have special values. + + Parameters + ---------- + name : str + The name of the tunable parameter. + + Returns + ------- + special_name : str + The name of the hyperparameter that corresponds to the special value. + type_name : str + The name of the hyperparameter that chooses between the regular and the special values. + """ + return (name + "!special", name + "!type__") + + +def special_param_name_is_temp(name: str) -> bool: + """ + Check if name corresponds to a temporary ConfigSpace parameter. + + Parameters + ---------- + name : str + The name of the hyperparameter. + + Returns + ------- + is_special : bool + True if the name corresponds to a temporary ConfigSpace hyperparameter. + """ + return name.endswith("__") + + +def special_param_name_strip(name: str) -> str: + """ + Remove the temporary suffix from a special parameter name. + + Parameters + ---------- + name : str + The name of the hyperparameter. + + Returns + ------- + stripped_name : str + The name of the hyperparameter without the temporary suffix. """ - return {k.split(":", 1)[0]: v for (k, v) in data.items() if not k.startswith("__")} + return name.split("!", 1)[0] diff --git a/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py b/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py index 9bc04023aad..ce088a5527a 100644 --- a/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py +++ b/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py @@ -10,7 +10,7 @@ import os from types import TracebackType -from typing import Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union from typing_extensions import Literal import pandas as pd @@ -21,10 +21,12 @@ from mlos_bench.environments.status import Status from mlos_bench.services.base_service import Service +from mlos_bench.tunables.tunable import TunableValue from mlos_bench.tunables.tunable_groups import TunableGroups from mlos_bench.optimizers.base_optimizer import Optimizer + from mlos_bench.optimizers.convert_configspace import ( - tunable_groups_to_configspace, configspace_data_to_tunable_values + tunable_groups_to_configspace, configspace_data_to_tunable_values, special_param_names ) _LOG = logging.getLogger(__name__) @@ -111,7 +113,7 @@ def bulk_register(self, configs: Sequence[dict], scores: Sequence[Optional[float _LOG.debug("Warm-up end: %s = %s", self.target, score) return True - def _to_df(self, configs: Sequence[dict]) -> pd.DataFrame: + def _to_df(self, configs: Sequence[Dict[str, TunableValue]]) -> pd.DataFrame: """ Select from past trials only the columns required in this experiment and impute default values for the tunables that are missing in the dataframe. @@ -127,7 +129,7 @@ def _to_df(self, configs: Sequence[dict]) -> pd.DataFrame: A dataframe with past trials data, with missing values imputed. """ df_configs = pd.DataFrame(configs) - tunables_names = self._tunables.get_param_values().keys() + tunables_names = list(self._tunables.get_param_values().keys()) missing_cols = set(tunables_names).difference(df_configs.columns) for (tunable, _group) in self._tunables: if tunable.name in missing_cols: @@ -138,11 +140,13 @@ def _to_df(self, configs: Sequence[dict]) -> pd.DataFrame: df_configs[tunable.name] = df_configs[tunable.name].astype(tunable.dtype) # Add columns for tunables with special values. if tunable.special: + (special_name, type_name) = special_param_names(tunable.name) + tunables_names += [special_name, type_name] is_special = df_configs[tunable.name].apply(tunable.special.__contains__) - df_configs["__type:" + tunable.name] = "range" - df_configs.loc[is_special, "__type:" + tunable.name] = "special" - df_configs["special:" + tunable.name] = df_configs[tunable.name] - df_configs.loc[~is_special, "special:" + tunable.name] = None + df_configs[type_name] = "range" + df_configs.loc[is_special, type_name] = "special" + df_configs[special_name] = df_configs[tunable.name] + df_configs.loc[~is_special, special_name] = None df_configs.loc[is_special, tunable.name] = None # By default, hyperparameters in ConfigurationSpace are sorted by name: df_configs = df_configs[sorted(tunables_names)] @@ -162,9 +166,7 @@ def register(self, tunables: TunableGroups, status: Status, score: Optional[Union[float, dict]] = None) -> Optional[float]: score = super().register(tunables, status, score) # With _opt_sign applied if status.is_completed(): - # By default, hyperparameters in ConfigurationSpace are sorted by name: - df_config = pd.DataFrame(dict(sorted(tunables.get_param_values().items())), index=[0]) - # TODO: add special columns here!!! + df_config = self._to_df([tunables.get_param_values()]) _LOG.debug("Score: %s Dataframe:\n%s", score, df_config) self._opt.register(df_config, pd.Series([score], dtype=float)) self._iter += 1 diff --git a/mlos_bench/mlos_bench/tests/optimizers/opt_bulk_register_test.py b/mlos_bench/mlos_bench/tests/optimizers/opt_bulk_register_test.py index e9967776955..4e5582fa709 100644 --- a/mlos_bench/mlos_bench/tests/optimizers/opt_bulk_register_test.py +++ b/mlos_bench/mlos_bench/tests/optimizers/opt_bulk_register_test.py @@ -123,7 +123,7 @@ def test_update_mock_min(mock_opt: MockOptimizer, mock_configs: List[dict], assert mock_opt.suggest().get_param_values() == { "vmSize": "Standard_B4ms", "idle": "halt", - "kernel_sched_migration_cost_ns": 13111, + "kernel_sched_migration_cost_ns": 13112, 'kernel_sched_latency_ns': 796233790, } diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index 6b793e623c7..9cc76975535 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -18,9 +18,11 @@ from mlos_bench.tunables.tunable import Tunable from mlos_bench.tunables.tunable_groups import TunableGroups - -from mlos_bench.optimizers.convert_configspace import _tunable_to_configspace -from mlos_bench.optimizers.convert_configspace import tunable_groups_to_configspace +from mlos_bench.optimizers.convert_configspace import ( + _tunable_to_configspace, + tunable_groups_to_configspace, + special_param_names, +) # pylint: disable=redefined-outer-name @@ -36,29 +38,29 @@ def configuration_space() -> ConfigurationSpace: configuration_space : ConfigurationSpace A new ConfigurationSpace object for testing. """ + (ksm_special, ksm_type) = special_param_names("kernel_sched_migration_cost_ns") + spaces = ConfigurationSpace(space={ "vmSize": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"], "idle": ["halt", "mwait", "noidle"], "kernel_sched_migration_cost_ns": (0, 500000), - "special:kernel_sched_migration_cost_ns": [-1, 0], - "__type:kernel_sched_migration_cost_ns": ["special", "range"], + ksm_special: [-1, 0], + ksm_type: ["special", "range"], "kernel_sched_latency_ns": (0, 1000000000), }) spaces["vmSize"].default_value = "Standard_B4ms" spaces["idle"].default_value = "halt" spaces["kernel_sched_migration_cost_ns"].default_value = 250000 - spaces["special:kernel_sched_migration_cost_ns"].default_value = -1 - spaces["__type:kernel_sched_migration_cost_ns"].default_value = "special" - spaces["__type:kernel_sched_migration_cost_ns"].probabilities = (0.1, 0.9) + spaces[ksm_special].default_value = -1 + spaces[ksm_type].default_value = "special" + spaces[ksm_type].probabilities = (0.1, 0.9) spaces["kernel_sched_latency_ns"].default_value = 2000000 spaces.add_condition(EqualsCondition( - spaces["special:kernel_sched_migration_cost_ns"], - spaces["__type:kernel_sched_migration_cost_ns"], "special")) + spaces[ksm_special], spaces[ksm_type], "special")) spaces.add_condition(EqualsCondition( - spaces["kernel_sched_migration_cost_ns"], - spaces["__type:kernel_sched_migration_cost_ns"], "range")) + spaces["kernel_sched_migration_cost_ns"], spaces[ksm_type], "range")) return spaces diff --git a/mlos_core/mlos_core/optimizers/bayesian_optimizers/smac_optimizer.py b/mlos_core/mlos_core/optimizers/bayesian_optimizers/smac_optimizer.py index 78d661be730..3b9c5bbac8c 100644 --- a/mlos_core/mlos_core/optimizers/bayesian_optimizers/smac_optimizer.py +++ b/mlos_core/mlos_core/optimizers/bayesian_optimizers/smac_optimizer.py @@ -334,6 +334,7 @@ def _to_configspace_configs(self, configurations: pd.DataFrame) -> List[ConfigSp configurations : list List of ConfigSpace configurations. """ + # FIXME: BUG!!! Need to generate proper dict keys for ConfigSpace.Configuration objects. return [ ConfigSpace.Configuration(self.optimizer_parameter_space, values=config.to_dict()) for (_, config) in configurations.iterrows() From 003f50d5aacbaee345c3acc9f9aef0c4cc3319c9 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 8 Jan 2024 15:21:38 -0800 Subject: [PATCH 15/44] bugfix: make int columns nullable if they have special values --- mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py b/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py index ce088a5527a..509dbd6f794 100644 --- a/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py +++ b/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py @@ -145,6 +145,9 @@ def _to_df(self, configs: Sequence[Dict[str, TunableValue]]) -> pd.DataFrame: is_special = df_configs[tunable.name].apply(tunable.special.__contains__) df_configs[type_name] = "range" df_configs.loc[is_special, type_name] = "special" + if tunable.type == "int": + # Make int column NULLABLE: + df_configs[tunable.name] = df_configs[tunable.name].astype("Int64") df_configs[special_name] = df_configs[tunable.name] df_configs.loc[~is_special, special_name] = None df_configs.loc[is_special, tunable.name] = None From d2ef5695723e02c18fb30776e8aa6f1fafd632a3 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 8 Jan 2024 16:13:02 -0800 Subject: [PATCH 16/44] minor fix: special can be an empty list instead of null --- mlos_bench/mlos_bench/tunables/tunable.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mlos_bench/mlos_bench/tunables/tunable.py b/mlos_bench/mlos_bench/tunables/tunable.py index 479f8481e73..06d2abeadeb 100644 --- a/mlos_bench/mlos_bench/tunables/tunable.py +++ b/mlos_bench/mlos_bench/tunables/tunable.py @@ -92,8 +92,8 @@ def _sanity_check(self) -> None: raise ValueError(f"Range must be None for the categorical type tunable {self}") if len(set(self._values)) != len(self._values): raise ValueError(f"Values must be unique for the categorical type tunable {self}") - if self._special is not None: - raise ValueError(f"Special values must be None for the categorical type tunable {self}") + if self._special: + raise ValueError(f"Categorical tunable cannot have special values: {self}") elif self.is_numerical: if self._values is not None: raise ValueError(f"Values must be None for the numerical type tunable {self}") From d21964c3bb7ba6184d45b10982ed83404f7a47d5 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 8 Jan 2024 17:05:03 -0800 Subject: [PATCH 17/44] fix the configspace_data_to_tunable_values() to deal with the specials --- .../optimizers/convert_configspace.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 456d05fe259..29ca46b2f5c 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -151,10 +151,19 @@ def configspace_data_to_tunable_values(data: dict) -> dict: Remove the fields that correspond to special values in ConfigSpace. In particular, remove `!type__` keys and trim `!special` suffixes. """ - return { - special_param_name_strip(k): v - for (k, v) in data.items() if not special_param_name_is_temp(k) - } + data = data.copy() + specials = [ + special_param_name_strip(k) + for k in data.keys() if special_param_name_is_temp(k) + ] + for k in specials: + (special_name, type_name) = special_param_names(k) + if data[type_name] == "special": + data[k] = data[special_name] + if special_name in data: + del data[special_name] + del data[type_name] + return data def special_param_names(name: str) -> Tuple[str, str]: From 3adff5775d369d17ba9529e5ebd3f0c5b919c75e Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 8 Jan 2024 17:31:06 -0800 Subject: [PATCH 18/44] make special/regular weights uniform to make FLAML work --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 29ca46b2f5c..bc4f1e46351 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -82,7 +82,7 @@ def _tunable_to_configspace( meta=meta), type_name: CategoricalHyperparameter( name=type_name, choices=["special", "range"], default_value="special", - weights=[0.1, 0.9]), # TODO: Make weights configurable + weights=[0.5, 0.5]), # TODO: Make weights configurable; FLAML requires uniform weights. }) cs.add_condition(EqualsCondition(cs[special_name], cs[type_name], "special")) cs.add_condition(EqualsCondition(cs[tunable.name], cs[type_name], "range")) From b8928998fff39228111115d265eb2b589d040898 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 8 Jan 2024 17:34:46 -0800 Subject: [PATCH 19/44] fix teh unit test --- .../mlos_bench/tests/tunables/tunable_to_configspace_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index 9cc76975535..b4b2d39c70d 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -54,7 +54,7 @@ def configuration_space() -> ConfigurationSpace: spaces["kernel_sched_migration_cost_ns"].default_value = 250000 spaces[ksm_special].default_value = -1 spaces[ksm_type].default_value = "special" - spaces[ksm_type].probabilities = (0.1, 0.9) + spaces[ksm_type].probabilities = (0.5, 0.5) # FLAML requires distribution to be uniform spaces["kernel_sched_latency_ns"].default_value = 2000000 spaces.add_condition(EqualsCondition( From fe4e6dfa84c7bd1416d2ea509c5b4b8c02d51f63 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 8 Jan 2024 17:44:41 -0800 Subject: [PATCH 20/44] fix values in unit tests --- .../mlos_bench/tests/optimizers/toy_optimization_loop_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py b/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py index 69eee102a83..e652abffd7e 100644 --- a/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py +++ b/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py @@ -81,7 +81,7 @@ def test_mock_optimization_loop(mock_env_no_noise: MockEnv, assert tunables.get_param_values() == { "vmSize": "Standard_B2ms", "idle": "halt", - "kernel_sched_migration_cost_ns": 117025, + "kernel_sched_migration_cost_ns": 117026, "kernel_sched_latency_ns": 149827706, } @@ -96,7 +96,7 @@ def test_mock_optimization_loop_no_defaults(mock_env_no_noise: MockEnv, assert tunables.get_param_values() == { "vmSize": "Standard_B2s", "idle": "halt", - "kernel_sched_migration_cost_ns": 49122, + "kernel_sched_migration_cost_ns": 49123, "kernel_sched_latency_ns": 234760738, } From 538b917367c01c93afb0f2a5a5a2bc78b61d632a Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Mon, 8 Jan 2024 18:15:32 -0800 Subject: [PATCH 21/44] make SMAC optimizer work with special values --- .../tests/optimizers/toy_optimization_loop_test.py | 6 +++--- mlos_core/mlos_core/optimizers/optimizer.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py b/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py index e652abffd7e..4df86df5bd3 100644 --- a/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py +++ b/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py @@ -123,12 +123,12 @@ def test_smac_optimization_loop(mock_env_no_noise: MockEnv, Toy optimization loop with mock environment and SMAC optimizer. """ (score, tunables) = _optimize(mock_env_no_noise, smac_opt) - expected_score = 73.59 + expected_score = 70.33 expected_tunable_values = { "vmSize": "Standard_B2s", "idle": "mwait", - "kernel_sched_migration_cost_ns": 319025, - "kernel_sched_latency_ns": 499339615, + "kernel_sched_migration_cost_ns": 297669, + "kernel_sched_latency_ns": 290365137, } assert score == pytest.approx(expected_score, 0.01) assert tunables.get_param_values() == expected_tunable_values diff --git a/mlos_core/mlos_core/optimizers/optimizer.py b/mlos_core/mlos_core/optimizers/optimizer.py index aa8ba7f847b..8437aacac8c 100644 --- a/mlos_core/mlos_core/optimizers/optimizer.py +++ b/mlos_core/mlos_core/optimizers/optimizer.py @@ -136,7 +136,7 @@ def suggest(self, context: Optional[pd.DataFrame] = None, defaults: bool = False "Suggest returned a configuration with the wrong number of parameters." if self._space_adapter: configuration = self._space_adapter.transform(configuration) - assert len(configuration.columns) == len(self.parameter_space.values()), \ + assert set(configuration).issubset(set(self.parameter_space)), \ "Space adapter transformed configuration with the wrong number of parameters." return configuration From 851c7c73872a6a87b02947eef09b8eed347cc0a0 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 9 Jan 2024 12:11:40 -0800 Subject: [PATCH 22/44] make FLAML optimizer work with special values and conditionals --- .../optimizers/toy_optimization_loop_test.py | 4 +- .../mlos_core/optimizers/flaml_optimizer.py | 38 ++++++++++++++++--- mlos_core/mlos_core/optimizers/optimizer.py | 2 +- 3 files changed, 36 insertions(+), 8 deletions(-) diff --git a/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py b/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py index 4df86df5bd3..86aa7a400be 100644 --- a/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py +++ b/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py @@ -111,8 +111,8 @@ def test_flaml_optimization_loop(mock_env_no_noise: MockEnv, assert tunables.get_param_values() == { "vmSize": "Standard_B2s", "idle": "halt", - "kernel_sched_migration_cost_ns": 50132, - "kernel_sched_latency_ns": 22674895, + "kernel_sched_migration_cost_ns": -1, + "kernel_sched_latency_ns": 13718105, } diff --git a/mlos_core/mlos_core/optimizers/flaml_optimizer.py b/mlos_core/mlos_core/optimizers/flaml_optimizer.py index 0744c2e08c2..d583a8717c8 100644 --- a/mlos_core/mlos_core/optimizers/flaml_optimizer.py +++ b/mlos_core/mlos_core/optimizers/flaml_optimizer.py @@ -134,11 +134,20 @@ def _target_function(self, config: dict) -> Union[dict, None]: result: Union[dict, None] Dictionary with a single key, `score`, if config already evaluated; `None` otherwise. """ - cs_config: ConfigSpace.Configuration = ConfigSpace.Configuration(self.optimizer_parameter_space, values=config) + cs_config: ConfigSpace.Configuration = ConfigSpace.Configuration( + self.optimizer_parameter_space, values=config, allow_inactive_with_values=True) + # FLAML ignores ConfigSpace conditionals when proposing new configurations. + # We have to manually remove inactive hyperparameters from FLAML suggestion here. + cs_config = ConfigSpace.Configuration( + self.optimizer_parameter_space, values={ + key: cs_config[key] + for key in self.optimizer_parameter_space.get_active_hyperparameters(cs_config) + } + ) if cs_config in self.evaluated_samples: return {'score': self.evaluated_samples[cs_config].score} - self._suggested_config = config + self._suggested_config = dict(cs_config) # Cleaned-up version of config return None # Returning None stops the process def _get_next_config(self) -> dict: @@ -164,7 +173,7 @@ def _get_next_config(self) -> dict: points_to_evaluate: list = [] evaluated_rewards: list = [] if len(self.evaluated_samples) > 0: - points_to_evaluate = [s.config for s in self.evaluated_samples.values()] + points_to_evaluate = [self._config_to_dict(conf) for conf in self.evaluated_samples] evaluated_rewards = [s.score for s in self.evaluated_samples.values()] # Warm start FLAML optimizer @@ -174,8 +183,8 @@ def _get_next_config(self) -> dict: config=self.flaml_parameter_space, mode='min', metric='score', - points_to_evaluate=list(points_to_evaluate), - evaluated_rewards=list(evaluated_rewards), + points_to_evaluate=points_to_evaluate, + evaluated_rewards=evaluated_rewards, num_samples=len(points_to_evaluate) + 1, low_cost_partial_config=self.low_cost_partial_config, verbose=0, @@ -184,3 +193,22 @@ def _get_next_config(self) -> dict: raise RuntimeError('FLAML did not produce a suggestion') return self._suggested_config # type: ignore[unreachable] + + @staticmethod + def _config_to_dict(config: ConfigSpace.Configuration) -> dict: + """Converts a ConfigSpace.Configuration to a dictionary. + + Parameters + ---------- + config: ConfigSpace.Configuration + Configuration to be converted. + + Returns + ------- + result: dict + Dictionary representation of the configuration. + """ + return { + k: v for (k, v) in config.items() + if config.config_space[k].is_legal(v) + } diff --git a/mlos_core/mlos_core/optimizers/optimizer.py b/mlos_core/mlos_core/optimizers/optimizer.py index 8437aacac8c..9c1d59dc588 100644 --- a/mlos_core/mlos_core/optimizers/optimizer.py +++ b/mlos_core/mlos_core/optimizers/optimizer.py @@ -132,7 +132,7 @@ def suggest(self, context: Optional[pd.DataFrame] = None, defaults: bool = False configuration = self._suggest(context) assert len(configuration) == 1, \ "Suggest must return a single configuration." - assert len(configuration.columns) == len(self.optimizer_parameter_space.values()), \ + assert set(configuration).issubset(set(self.parameter_space)), \ "Suggest returned a configuration with the wrong number of parameters." if self._space_adapter: configuration = self._space_adapter.transform(configuration) From 3f2d3f44f18b495db310b80019dfffa887198545 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 9 Jan 2024 12:15:23 -0800 Subject: [PATCH 23/44] make launcher_run_test work again after FLAML updates --- mlos_bench/mlos_bench/tests/launcher_run_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mlos_bench/mlos_bench/tests/launcher_run_test.py b/mlos_bench/mlos_bench/tests/launcher_run_test.py index cdea175bdef..8dafd0725d2 100644 --- a/mlos_bench/mlos_bench/tests/launcher_run_test.py +++ b/mlos_bench/mlos_bench/tests/launcher_run_test.py @@ -97,7 +97,7 @@ def test_launch_main_app_opt(root_path: str, local_exec_service: LocalExecServic [ # Iteration 1: Expect first value to be the baseline f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ " + - r"register DEBUG Score: 65\.67\d+ Dataframe:\s*$", + r"register DEBUG Score: 64\.88\d+ Dataframe:\s*$", # Iteration 2: The result may not always be deterministic f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ " + r"register DEBUG Score: \d+\.\d+ Dataframe:\s*$", @@ -106,6 +106,6 @@ def test_launch_main_app_opt(root_path: str, local_exec_service: LocalExecServic r"register DEBUG Score: \d+\.\d+ Dataframe:\s*$", # Final result: baseline is the optimum for the mock environment f"^{_RE_DATE} run\\.py:\\d+ " + - r"_optimize INFO Env: Mock environment best score: 65\.67\d+\s*$", + r"_optimize INFO Env: Mock environment best score: 64\.88\d+\s*$", ] ) From 7f45fa23091fa080bdb3825aa9d0274014ffa20f Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 9 Jan 2024 14:28:32 -0800 Subject: [PATCH 24/44] remove irrelevant comment from smac opt --- .../mlos_core/optimizers/bayesian_optimizers/smac_optimizer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/mlos_core/mlos_core/optimizers/bayesian_optimizers/smac_optimizer.py b/mlos_core/mlos_core/optimizers/bayesian_optimizers/smac_optimizer.py index 3b9c5bbac8c..78d661be730 100644 --- a/mlos_core/mlos_core/optimizers/bayesian_optimizers/smac_optimizer.py +++ b/mlos_core/mlos_core/optimizers/bayesian_optimizers/smac_optimizer.py @@ -334,7 +334,6 @@ def _to_configspace_configs(self, configurations: pd.DataFrame) -> List[ConfigSp configurations : list List of ConfigSpace configurations. """ - # FIXME: BUG!!! Need to generate proper dict keys for ConfigSpace.Configuration objects. return [ ConfigSpace.Configuration(self.optimizer_parameter_space, values=config.to_dict()) for (_, config) in configurations.iterrows() From fbf40015bccc98e65dd0e5f0e0e3493c8a935328 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 9 Jan 2024 15:23:26 -0800 Subject: [PATCH 25/44] move config normalization to the base class --- .../mlos_core/optimizers/flaml_optimizer.py | 34 ++----------------- mlos_core/mlos_core/optimizers/optimizer.py | 16 +++++++++ .../mlos_core/spaces/adapters/llamatune.py | 4 ++- 3 files changed, 22 insertions(+), 32 deletions(-) diff --git a/mlos_core/mlos_core/optimizers/flaml_optimizer.py b/mlos_core/mlos_core/optimizers/flaml_optimizer.py index d583a8717c8..e06cf1f9f0c 100644 --- a/mlos_core/mlos_core/optimizers/flaml_optimizer.py +++ b/mlos_core/mlos_core/optimizers/flaml_optimizer.py @@ -134,20 +134,11 @@ def _target_function(self, config: dict) -> Union[dict, None]: result: Union[dict, None] Dictionary with a single key, `score`, if config already evaluated; `None` otherwise. """ - cs_config: ConfigSpace.Configuration = ConfigSpace.Configuration( - self.optimizer_parameter_space, values=config, allow_inactive_with_values=True) - # FLAML ignores ConfigSpace conditionals when proposing new configurations. - # We have to manually remove inactive hyperparameters from FLAML suggestion here. - cs_config = ConfigSpace.Configuration( - self.optimizer_parameter_space, values={ - key: cs_config[key] - for key in self.optimizer_parameter_space.get_active_hyperparameters(cs_config) - } - ) + cs_config = self._dict_to_config(config) if cs_config in self.evaluated_samples: return {'score': self.evaluated_samples[cs_config].score} - self._suggested_config = dict(cs_config) # Cleaned-up version of config + self._suggested_config = dict(cs_config) # Cleaned-up version of the config return None # Returning None stops the process def _get_next_config(self) -> dict: @@ -173,7 +164,7 @@ def _get_next_config(self) -> dict: points_to_evaluate: list = [] evaluated_rewards: list = [] if len(self.evaluated_samples) > 0: - points_to_evaluate = [self._config_to_dict(conf) for conf in self.evaluated_samples] + points_to_evaluate = [dict(self._dict_to_config(conf)) for conf in self.evaluated_samples] evaluated_rewards = [s.score for s in self.evaluated_samples.values()] # Warm start FLAML optimizer @@ -193,22 +184,3 @@ def _get_next_config(self) -> dict: raise RuntimeError('FLAML did not produce a suggestion') return self._suggested_config # type: ignore[unreachable] - - @staticmethod - def _config_to_dict(config: ConfigSpace.Configuration) -> dict: - """Converts a ConfigSpace.Configuration to a dictionary. - - Parameters - ---------- - config: ConfigSpace.Configuration - Configuration to be converted. - - Returns - ------- - result: dict - Dictionary representation of the configuration. - """ - return { - k: v for (k, v) in config.items() - if config.config_space[k].is_legal(v) - } diff --git a/mlos_core/mlos_core/optimizers/optimizer.py b/mlos_core/mlos_core/optimizers/optimizer.py index 9c1d59dc588..db5bc58157e 100644 --- a/mlos_core/mlos_core/optimizers/optimizer.py +++ b/mlos_core/mlos_core/optimizers/optimizer.py @@ -214,6 +214,22 @@ def cleanup(self) -> None: Redefine this method in optimizers that require cleanup. """ + def _dict_to_config(self, config: dict) -> ConfigSpace.Configuration: + """ + Convert a dictionary to a valid ConfigSpace configuration. + + Some optimizers (e.g., FLAML) ignore ConfigSpace conditionals when proposing new + configurations. We have to manually remove inactive hyperparameters such suggestions. + """ + cs_config: ConfigSpace.Configuration = ConfigSpace.Configuration( + self.optimizer_parameter_space, values=config, allow_inactive_with_values=True) + return ConfigSpace.Configuration( + self.optimizer_parameter_space, values={ + key: cs_config[key] + for key in self.optimizer_parameter_space.get_active_hyperparameters(cs_config) + } + ) + def _from_1hot(self, config: npt.NDArray) -> pd.DataFrame: """ Convert numpy array from one-hot encoding to a DataFrame diff --git a/mlos_core/mlos_core/spaces/adapters/llamatune.py b/mlos_core/mlos_core/spaces/adapters/llamatune.py index 7eff790d29b..cef0fe21922 100644 --- a/mlos_core/mlos_core/spaces/adapters/llamatune.py +++ b/mlos_core/mlos_core/spaces/adapters/llamatune.py @@ -108,9 +108,11 @@ def inverse_transform(self, configurations: pd.DataFrame) -> pd.DataFrame: if getattr(self, '_pinv_matrix', None) is None: self._try_generate_approx_inverse_mapping() + # Replace NaNs with zeros for inactive hyperparameters + config_vector = np.nan_to_num(configuration.get_array(), nan=0.0) # Perform approximate reverse mapping # NOTE: applying special value biasing is not possible - vector = self._config_scaler.inverse_transform([configuration.get_array()])[0] + vector = self._config_scaler.inverse_transform([config_vector])[0] target_config_vector = self._pinv_matrix.dot(vector) target_config = ConfigSpace.Configuration(self.target_parameter_space, vector=target_config_vector) From 13ff42c34fe784715a9ae724b01563e9b057fcd8 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Wed, 10 Jan 2024 12:31:49 -0800 Subject: [PATCH 26/44] move normalize_config to mlos_core.util; use it in LlamaTune. All unit tests pass! --- .../optimizers/toy_optimization_loop_test.py | 2 +- mlos_core/mlos_core/__init__.py | 19 ------- .../mlos_core/optimizers/flaml_optimizer.py | 12 +++- mlos_core/mlos_core/optimizers/optimizer.py | 22 +------- .../mlos_core/spaces/adapters/llamatune.py | 6 +- mlos_core/mlos_core/util.py | 56 +++++++++++++++++++ 6 files changed, 73 insertions(+), 44 deletions(-) create mode 100644 mlos_core/mlos_core/util.py diff --git a/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py b/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py index 86aa7a400be..54e860a8553 100644 --- a/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py +++ b/mlos_bench/mlos_bench/tests/optimizers/toy_optimization_loop_test.py @@ -12,7 +12,7 @@ import pytest -from mlos_core import config_to_dataframe +from mlos_core.util import config_to_dataframe from mlos_core.optimizers.bayesian_optimizers.smac_optimizer import SmacOptimizer from mlos_bench.optimizers.convert_configspace import tunable_values_to_configuration diff --git a/mlos_core/mlos_core/__init__.py b/mlos_core/mlos_core/__init__.py index ba10a11d85f..3d816eb9169 100644 --- a/mlos_core/mlos_core/__init__.py +++ b/mlos_core/mlos_core/__init__.py @@ -5,22 +5,3 @@ """ Basic initializer module for the mlos_core package. """ - -import ConfigSpace -import pandas as pd - - -def config_to_dataframe(config: ConfigSpace.Configuration) -> pd.DataFrame: - """Converts a ConfigSpace config to a DataFrame - - Parameters - ---------- - config : ConfigSpace.Configuration - The config to convert. - - Returns - ------- - pd.DataFrame - A DataFrame with a single row, containing the config's parameters. - """ - return pd.DataFrame([dict(config)]) diff --git a/mlos_core/mlos_core/optimizers/flaml_optimizer.py b/mlos_core/mlos_core/optimizers/flaml_optimizer.py index e06cf1f9f0c..cbb7713d467 100644 --- a/mlos_core/mlos_core/optimizers/flaml_optimizer.py +++ b/mlos_core/mlos_core/optimizers/flaml_optimizer.py @@ -13,6 +13,7 @@ import numpy as np import pandas as pd +from mlos_core.util import normalize_config from mlos_core.optimizers.optimizer import BaseOptimizer from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter @@ -134,7 +135,7 @@ def _target_function(self, config: dict) -> Union[dict, None]: result: Union[dict, None] Dictionary with a single key, `score`, if config already evaluated; `None` otherwise. """ - cs_config = self._dict_to_config(config) + cs_config = normalize_config(self.parameter_space, config) if cs_config in self.evaluated_samples: return {'score': self.evaluated_samples[cs_config].score} @@ -164,8 +165,13 @@ def _get_next_config(self) -> dict: points_to_evaluate: list = [] evaluated_rewards: list = [] if len(self.evaluated_samples) > 0: - points_to_evaluate = [dict(self._dict_to_config(conf)) for conf in self.evaluated_samples] - evaluated_rewards = [s.score for s in self.evaluated_samples.values()] + points_to_evaluate = [ + dict(normalize_config(self.parameter_space, conf)) + for conf in self.evaluated_samples + ] + evaluated_rewards = [ + s.score for s in self.evaluated_samples.values() + ] # Warm start FLAML optimizer self._suggested_config = None diff --git a/mlos_core/mlos_core/optimizers/optimizer.py b/mlos_core/mlos_core/optimizers/optimizer.py index db5bc58157e..b2424403059 100644 --- a/mlos_core/mlos_core/optimizers/optimizer.py +++ b/mlos_core/mlos_core/optimizers/optimizer.py @@ -15,7 +15,7 @@ import numpy.typing as npt import pandas as pd -from mlos_core import config_to_dataframe +from mlos_core.util import config_to_dataframe from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter @@ -132,11 +132,11 @@ def suggest(self, context: Optional[pd.DataFrame] = None, defaults: bool = False configuration = self._suggest(context) assert len(configuration) == 1, \ "Suggest must return a single configuration." - assert set(configuration).issubset(set(self.parameter_space)), \ + assert set(configuration.columns).issubset(set(self.optimizer_parameter_space)), \ "Suggest returned a configuration with the wrong number of parameters." if self._space_adapter: configuration = self._space_adapter.transform(configuration) - assert set(configuration).issubset(set(self.parameter_space)), \ + assert set(configuration.columns).issubset(set(self.parameter_space)), \ "Space adapter transformed configuration with the wrong number of parameters." return configuration @@ -214,22 +214,6 @@ def cleanup(self) -> None: Redefine this method in optimizers that require cleanup. """ - def _dict_to_config(self, config: dict) -> ConfigSpace.Configuration: - """ - Convert a dictionary to a valid ConfigSpace configuration. - - Some optimizers (e.g., FLAML) ignore ConfigSpace conditionals when proposing new - configurations. We have to manually remove inactive hyperparameters such suggestions. - """ - cs_config: ConfigSpace.Configuration = ConfigSpace.Configuration( - self.optimizer_parameter_space, values=config, allow_inactive_with_values=True) - return ConfigSpace.Configuration( - self.optimizer_parameter_space, values={ - key: cs_config[key] - for key in self.optimizer_parameter_space.get_active_hyperparameters(cs_config) - } - ) - def _from_1hot(self, config: npt.NDArray) -> pd.DataFrame: """ Convert numpy array from one-hot encoding to a DataFrame diff --git a/mlos_core/mlos_core/spaces/adapters/llamatune.py b/mlos_core/mlos_core/spaces/adapters/llamatune.py index cef0fe21922..a49e000a878 100644 --- a/mlos_core/mlos_core/spaces/adapters/llamatune.py +++ b/mlos_core/mlos_core/spaces/adapters/llamatune.py @@ -13,6 +13,8 @@ import numpy.typing as npt import pandas as pd from sklearn.preprocessing import MinMaxScaler + +from mlos_core.util import normalize_config from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter @@ -129,12 +131,12 @@ def transform(self, configuration: pd.DataFrame) -> pd.DataFrame: target_configuration = ConfigSpace.Configuration(self.target_parameter_space, values=target_values_dict) orig_values_dict = self._transform(target_values_dict) - orig_configuration = ConfigSpace.Configuration(self.orig_parameter_space, values=orig_values_dict) + orig_configuration = normalize_config(self.orig_parameter_space, orig_values_dict) # Add to inverse dictionary -- needed for registering the performance later self._suggested_configs[orig_configuration] = target_configuration - return pd.DataFrame([orig_values_dict.values()], columns=list(self.orig_parameter_space.keys())) + return pd.DataFrame([list(orig_configuration.values())], columns=list(orig_configuration.keys())) def _construct_low_dim_space(self, num_low_dims: int, max_unique_values_per_param: Optional[int]) -> None: """Constructs the low-dimensional parameter (potentially discretized) search space. diff --git a/mlos_core/mlos_core/util.py b/mlos_core/mlos_core/util.py new file mode 100644 index 00000000000..8acb654adf4 --- /dev/null +++ b/mlos_core/mlos_core/util.py @@ -0,0 +1,56 @@ +# +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# +""" +Internal helper functions for mlos_core package. +""" + +from typing import Union + +from ConfigSpace import Configuration, ConfigurationSpace +import pandas as pd + + +def config_to_dataframe(config: Configuration) -> pd.DataFrame: + """Converts a ConfigSpace config to a DataFrame + + Parameters + ---------- + config : ConfigSpace.Configuration + The config to convert. + + Returns + ------- + pd.DataFrame + A DataFrame with a single row, containing the config's parameters. + """ + return pd.DataFrame([dict(config)]) + + +def normalize_config(config_space: ConfigurationSpace, config: Union[Configuration, dict]) -> Configuration: + """ + Convert a dictionary to a valid ConfigSpace configuration. + + Some optimizers and adapters ignore ConfigSpace conditionals when proposing new + configurations. We have to manually remove inactive hyperparameters such suggestions. + + Parameters + ---------- + config_space : ConfigurationSpace + The parameter space to use. + config : dict + The configuration to convert. + + Returns + ------- + cs_config: Configuration + A valid ConfigSpace configuration with inactive parameters removed. + """ + cs_config = Configuration(config_space, values=config, allow_inactive_with_values=True) + return Configuration( + config_space, values={ + key: cs_config[key] + for key in config_space.get_active_hyperparameters(cs_config) + } + ) From 50bcc513e386aa3c9465700ee083641477e7f5c4 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Wed, 10 Jan 2024 13:44:18 -0800 Subject: [PATCH 27/44] bugfix: use the right config space in FLAML --- mlos_core/mlos_core/optimizers/flaml_optimizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mlos_core/mlos_core/optimizers/flaml_optimizer.py b/mlos_core/mlos_core/optimizers/flaml_optimizer.py index cbb7713d467..423c0558e2b 100644 --- a/mlos_core/mlos_core/optimizers/flaml_optimizer.py +++ b/mlos_core/mlos_core/optimizers/flaml_optimizer.py @@ -135,7 +135,7 @@ def _target_function(self, config: dict) -> Union[dict, None]: result: Union[dict, None] Dictionary with a single key, `score`, if config already evaluated; `None` otherwise. """ - cs_config = normalize_config(self.parameter_space, config) + cs_config = normalize_config(self.optimizer_parameter_space, config) if cs_config in self.evaluated_samples: return {'score': self.evaluated_samples[cs_config].score} @@ -166,7 +166,7 @@ def _get_next_config(self) -> dict: evaluated_rewards: list = [] if len(self.evaluated_samples) > 0: points_to_evaluate = [ - dict(normalize_config(self.parameter_space, conf)) + dict(normalize_config(self.optimizer_parameter_space, conf)) for conf in self.evaluated_samples ] evaluated_rewards = [ From 5fed0558fc8720743ea6bfee417a8b72a0d5e0c3 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Wed, 10 Jan 2024 13:49:00 -0800 Subject: [PATCH 28/44] be a bit more verbose in parameter naming in unit tests --- .../tunables/tunable_to_configspace_test.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index b4b2d39c70d..d2be29a4add 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -38,29 +38,32 @@ def configuration_space() -> ConfigurationSpace: configuration_space : ConfigurationSpace A new ConfigurationSpace object for testing. """ - (ksm_special, ksm_type) = special_param_names("kernel_sched_migration_cost_ns") + (kernel_sched_migration_cost_ns_special, + kernel_sched_migration_cost_ns_type) = special_param_names("kernel_sched_migration_cost_ns") spaces = ConfigurationSpace(space={ "vmSize": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"], "idle": ["halt", "mwait", "noidle"], "kernel_sched_migration_cost_ns": (0, 500000), - ksm_special: [-1, 0], - ksm_type: ["special", "range"], + kernel_sched_migration_cost_ns_special: [-1, 0], + kernel_sched_migration_cost_ns_type: ["special", "range"], "kernel_sched_latency_ns": (0, 1000000000), }) spaces["vmSize"].default_value = "Standard_B4ms" spaces["idle"].default_value = "halt" spaces["kernel_sched_migration_cost_ns"].default_value = 250000 - spaces[ksm_special].default_value = -1 - spaces[ksm_type].default_value = "special" - spaces[ksm_type].probabilities = (0.5, 0.5) # FLAML requires distribution to be uniform + spaces[kernel_sched_migration_cost_ns_special].default_value = -1 + spaces[kernel_sched_migration_cost_ns_type].default_value = "special" + spaces[kernel_sched_migration_cost_ns_type].probabilities = (0.5, 0.5) # FLAML requires distribution to be uniform spaces["kernel_sched_latency_ns"].default_value = 2000000 spaces.add_condition(EqualsCondition( - spaces[ksm_special], spaces[ksm_type], "special")) + spaces[kernel_sched_migration_cost_ns_special], + spaces[kernel_sched_migration_cost_ns_type], "special")) spaces.add_condition(EqualsCondition( - spaces["kernel_sched_migration_cost_ns"], spaces[ksm_type], "range")) + spaces["kernel_sched_migration_cost_ns"], + spaces[kernel_sched_migration_cost_ns_type], "range")) return spaces From 7a68ce1cf2c5142b8e2a58a1be3947599aad8d9c Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Wed, 10 Jan 2024 13:52:37 -0800 Subject: [PATCH 29/44] a stricter check for llamatune configuration parameters --- mlos_core/mlos_core/optimizers/optimizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mlos_core/mlos_core/optimizers/optimizer.py b/mlos_core/mlos_core/optimizers/optimizer.py index b2424403059..0a5637e15b8 100644 --- a/mlos_core/mlos_core/optimizers/optimizer.py +++ b/mlos_core/mlos_core/optimizers/optimizer.py @@ -136,8 +136,8 @@ def suggest(self, context: Optional[pd.DataFrame] = None, defaults: bool = False "Suggest returned a configuration with the wrong number of parameters." if self._space_adapter: configuration = self._space_adapter.transform(configuration) - assert set(configuration.columns).issubset(set(self.parameter_space)), \ - "Space adapter transformed configuration with the wrong number of parameters." + assert set(configuration.columns) == set(self.parameter_space), \ + "Space adapter transformed configuration has incorrect parameters." return configuration @abstractmethod From 0cc11e874f51199d16837c1ef35557121f379b00 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Wed, 10 Jan 2024 14:02:21 -0800 Subject: [PATCH 30/44] roll back the config check --- mlos_core/mlos_core/optimizers/optimizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mlos_core/mlos_core/optimizers/optimizer.py b/mlos_core/mlos_core/optimizers/optimizer.py index 0a5637e15b8..b2424403059 100644 --- a/mlos_core/mlos_core/optimizers/optimizer.py +++ b/mlos_core/mlos_core/optimizers/optimizer.py @@ -136,8 +136,8 @@ def suggest(self, context: Optional[pd.DataFrame] = None, defaults: bool = False "Suggest returned a configuration with the wrong number of parameters." if self._space_adapter: configuration = self._space_adapter.transform(configuration) - assert set(configuration.columns) == set(self.parameter_space), \ - "Space adapter transformed configuration has incorrect parameters." + assert set(configuration.columns).issubset(set(self.parameter_space)), \ + "Space adapter transformed configuration with the wrong number of parameters." return configuration @abstractmethod From 75af02f1116af1086a33e06ecee4e61ef8316640 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 16 Jan 2024 21:57:21 +0000 Subject: [PATCH 31/44] =?UTF-8?q?Bump=20version:=200.3.0=20=E2=86=92=200.3?= =?UTF-8?q?.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- doc/source/conf.py | 2 +- mlos_bench/_version.py | 2 +- mlos_core/_version.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index db9cf115211..fb05266cb64 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.3.0 +current_version = 0.3.1 commit = True tag = True diff --git a/doc/source/conf.py b/doc/source/conf.py index 3191c485722..c3b88d74cee 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -36,7 +36,7 @@ author = 'GSL' # The full version, including alpha/beta/rc tags -release = '0.3.0' +release = '0.3.1' try: from setuptools_scm import get_version diff --git a/mlos_bench/_version.py b/mlos_bench/_version.py index a768871f1d1..a8b115f7e33 100644 --- a/mlos_bench/_version.py +++ b/mlos_bench/_version.py @@ -7,4 +7,4 @@ """ # NOTE: This should be managed by bumpversion. -_VERSION = '0.3.0' +_VERSION = '0.3.1' diff --git a/mlos_core/_version.py b/mlos_core/_version.py index 069b397e53d..08d17a0e685 100644 --- a/mlos_core/_version.py +++ b/mlos_core/_version.py @@ -7,4 +7,4 @@ """ # NOTE: This should be managed by bumpversion. -_VERSION = '0.3.0' +_VERSION = '0.3.1' From d0304c406ec1f8b6e1cd0e3f4502f7763d0839f8 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 16 Jan 2024 14:00:22 -0800 Subject: [PATCH 32/44] Update mlos_bench/mlos_bench/optimizers/convert_configspace.py Co-authored-by: Brian Kroth --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index bc4f1e46351..232ccbd9955 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -27,7 +27,7 @@ def _tunable_to_configspace( tunable: Tunable, group_name: Optional[str] = None, cost: int = 0) -> ConfigurationSpace: """ - Convert a single Tunable to an equivalent ConfigSpace Hyperparameter objects, + Convert a single Tunable to an equivalent set of ConfigSpace Hyperparameter objects, wrapped in a ConfigurationSpace for composability. Parameters From 288c528a5f347db339dfab12367aa97954db89da Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 16 Jan 2024 14:00:39 -0800 Subject: [PATCH 33/44] Update mlos_bench/mlos_bench/optimizers/convert_configspace.py Co-authored-by: Brian Kroth --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 232ccbd9955..099d11d25d7 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -29,6 +29,7 @@ def _tunable_to_configspace( """ Convert a single Tunable to an equivalent set of ConfigSpace Hyperparameter objects, wrapped in a ConfigurationSpace for composability. + Note: this may more than one Hyperparameter in the case of special value handling. Parameters ---------- From cecc738cb9b3a53ea8ee6f01a3a62d23004d3c4a Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 16 Jan 2024 14:06:23 -0800 Subject: [PATCH 34/44] Update mlos_bench/mlos_bench/optimizers/convert_configspace.py Co-authored-by: Brian Kroth --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 099d11d25d7..96b9e13de4b 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -150,7 +150,7 @@ def tunable_values_to_configuration(tunables: TunableGroups) -> Configuration: def configspace_data_to_tunable_values(data: dict) -> dict: """ Remove the fields that correspond to special values in ConfigSpace. - In particular, remove `!type__` keys and trim `!special` suffixes. + In particular, remove and keys suffixes added by `special_param_names`. """ data = data.copy() specials = [ From 981141c2b7dc20c8d5497b834adc551b2d9a25a9 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 16 Jan 2024 14:11:14 -0800 Subject: [PATCH 35/44] sort the imports --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 6 +++--- .../tests/tunables/tunable_to_configspace_test.py | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index bc4f1e46351..1085e57caca 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -11,12 +11,12 @@ from typing import Dict, Optional, Tuple from ConfigSpace import ( + CategoricalHyperparameter, Configuration, ConfigurationSpace, - CategoricalHyperparameter, - UniformIntegerHyperparameter, - UniformFloatHyperparameter, EqualsCondition, + UniformFloatHyperparameter, + UniformIntegerHyperparameter, ) from mlos_bench.tunables.tunable import Tunable, TunableValue from mlos_bench.tunables.tunable_groups import TunableGroups diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index d2be29a4add..896b3526d9e 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -9,19 +9,19 @@ import pytest from ConfigSpace import ( - ConfigurationSpace, CategoricalHyperparameter, - UniformIntegerHyperparameter, - UniformFloatHyperparameter, + ConfigurationSpace, EqualsCondition, + UniformFloatHyperparameter, + UniformIntegerHyperparameter, ) from mlos_bench.tunables.tunable import Tunable from mlos_bench.tunables.tunable_groups import TunableGroups from mlos_bench.optimizers.convert_configspace import ( _tunable_to_configspace, - tunable_groups_to_configspace, special_param_names, + tunable_groups_to_configspace, ) # pylint: disable=redefined-outer-name From 6bf88ac703af25cb4eae85788232a85d9c8adea4 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 16 Jan 2024 14:27:36 -0800 Subject: [PATCH 36/44] fix the wording in some assert messages --- mlos_core/mlos_core/optimizers/optimizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mlos_core/mlos_core/optimizers/optimizer.py b/mlos_core/mlos_core/optimizers/optimizer.py index b2424403059..1d4e5762af5 100644 --- a/mlos_core/mlos_core/optimizers/optimizer.py +++ b/mlos_core/mlos_core/optimizers/optimizer.py @@ -133,11 +133,11 @@ def suggest(self, context: Optional[pd.DataFrame] = None, defaults: bool = False assert len(configuration) == 1, \ "Suggest must return a single configuration." assert set(configuration.columns).issubset(set(self.optimizer_parameter_space)), \ - "Suggest returned a configuration with the wrong number of parameters." + "Optimizer suggested a configuration that does not match the expected parameter space." if self._space_adapter: configuration = self._space_adapter.transform(configuration) assert set(configuration.columns).issubset(set(self.parameter_space)), \ - "Space adapter transformed configuration with the wrong number of parameters." + "Space adapter produced a configuration that does not match the expected parameter space." return configuration @abstractmethod From 574db2db76c39989532456915e0c7d2b154926d5 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 16 Jan 2024 14:43:03 -0800 Subject: [PATCH 37/44] explicitly check for `*!type` parameters instead of relying on teh __ suffix --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 5c843685e2c..3438a7feeab 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -184,7 +184,7 @@ def special_param_names(name: str) -> Tuple[str, str]: type_name : str The name of the hyperparameter that chooses between the regular and the special values. """ - return (name + "!special", name + "!type__") + return (name + "!special", name + "!type") def special_param_name_is_temp(name: str) -> bool: @@ -201,7 +201,7 @@ def special_param_name_is_temp(name: str) -> bool: is_special : bool True if the name corresponds to a temporary ConfigSpace hyperparameter. """ - return name.endswith("__") + return name.endswith("!type") def special_param_name_strip(name: str) -> str: From e74b0bc077224688ef5a7b4160c35f4d499946e4 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 16 Jan 2024 15:15:43 -0800 Subject: [PATCH 38/44] make sure that tunables cannot have a `!` in the name --- .../mlos_bench/tests/tunables/tunable_definition_test.py | 8 ++++++++ mlos_bench/mlos_bench/tunables/tunable.py | 2 ++ 2 files changed, 10 insertions(+) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py index 4b971a1db10..30da252b286 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py @@ -12,6 +12,14 @@ from mlos_bench.tunables.tunable import Tunable +def test_tunable_name() -> None: + """ + Check that tunable name is valid. + """ + with pytest.raises(ValueError): + Tunable(name='test!tunable', config={"type": "float", "range": [0, 1], "default": 0}) + + def test_categorical_required_params() -> None: """ Check that required parameters are present for categorical tunables. diff --git a/mlos_bench/mlos_bench/tunables/tunable.py b/mlos_bench/mlos_bench/tunables/tunable.py index 06d2abeadeb..17b9d1177e8 100644 --- a/mlos_bench/mlos_bench/tunables/tunable.py +++ b/mlos_bench/mlos_bench/tunables/tunable.py @@ -59,6 +59,8 @@ def __init__(self, name: str, config: TunableDict): config : dict Python dict that represents a Tunable (e.g., deserialized from JSON) """ + if '!' in name: # TODO: Use a regex here and in JSON schema + raise ValueError(f"Invalid name of the tunable: {name}") self._name = name self._type = config["type"] # required if self._type not in self._DTYPE: From dd04b21a89067064ac7d34b59af513d545b3c6c9 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Tue, 16 Jan 2024 15:47:35 -0800 Subject: [PATCH 39/44] use enum for special and range values of the config space conditionals --- .../optimizers/convert_configspace.py | 24 ++++++++++++++----- .../optimizers/mlos_core_optimizer.py | 9 ++++--- .../tunables/tunable_to_configspace_test.py | 9 +++---- 3 files changed, 29 insertions(+), 13 deletions(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 3438a7feeab..62e486ded40 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -24,6 +24,16 @@ _LOG = logging.getLogger(__name__) +class TunableValueKind: + """ + Enum for the kind of the tunable value (special or not). + It is not a true enum because ConfigSpace wants string values. + """ + + SPECIAL = "special" + RANGE = "range" + + def _tunable_to_configspace( tunable: Tunable, group_name: Optional[str] = None, cost: int = 0) -> ConfigurationSpace: """ @@ -82,11 +92,13 @@ def _tunable_to_configspace( default_value=tunable.default if tunable.default in tunable.special else None, meta=meta), type_name: CategoricalHyperparameter( - name=type_name, choices=["special", "range"], default_value="special", + name=type_name, + choices=[TunableValueKind.SPECIAL, TunableValueKind.RANGE], + default_value=TunableValueKind.SPECIAL, weights=[0.5, 0.5]), # TODO: Make weights configurable; FLAML requires uniform weights. }) - cs.add_condition(EqualsCondition(cs[special_name], cs[type_name], "special")) - cs.add_condition(EqualsCondition(cs[tunable.name], cs[type_name], "range")) + cs.add_condition(EqualsCondition(cs[special_name], cs[type_name], TunableValueKind.SPECIAL)) + cs.add_condition(EqualsCondition(cs[tunable.name], cs[type_name], TunableValueKind.RANGE)) return cs @@ -136,10 +148,10 @@ def tunable_values_to_configuration(tunables: TunableGroups) -> Configuration: if tunable.special: (special_name, type_name) = special_param_names(tunable.name) if tunable.value in tunable.special: - values[type_name] = "special" + values[type_name] = TunableValueKind.SPECIAL values[special_name] = tunable.value else: - values[type_name] = "range" + values[type_name] = TunableValueKind.RANGE values[tunable.name] = tunable.value else: values[tunable.name] = tunable.value @@ -159,7 +171,7 @@ def configspace_data_to_tunable_values(data: dict) -> dict: ] for k in specials: (special_name, type_name) = special_param_names(k) - if data[type_name] == "special": + if data[type_name] == TunableValueKind.SPECIAL: data[k] = data[special_name] if special_name in data: del data[special_name] diff --git a/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py b/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py index 509dbd6f794..91b18781887 100644 --- a/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py +++ b/mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py @@ -26,7 +26,10 @@ from mlos_bench.optimizers.base_optimizer import Optimizer from mlos_bench.optimizers.convert_configspace import ( - tunable_groups_to_configspace, configspace_data_to_tunable_values, special_param_names + TunableValueKind, + configspace_data_to_tunable_values, + special_param_names, + tunable_groups_to_configspace, ) _LOG = logging.getLogger(__name__) @@ -143,8 +146,8 @@ def _to_df(self, configs: Sequence[Dict[str, TunableValue]]) -> pd.DataFrame: (special_name, type_name) = special_param_names(tunable.name) tunables_names += [special_name, type_name] is_special = df_configs[tunable.name].apply(tunable.special.__contains__) - df_configs[type_name] = "range" - df_configs.loc[is_special, type_name] = "special" + df_configs[type_name] = TunableValueKind.RANGE + df_configs.loc[is_special, type_name] = TunableValueKind.SPECIAL if tunable.type == "int": # Make int column NULLABLE: df_configs[tunable.name] = df_configs[tunable.name].astype("Int64") diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py index 896b3526d9e..0cc7bd0f994 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py @@ -19,6 +19,7 @@ from mlos_bench.tunables.tunable import Tunable from mlos_bench.tunables.tunable_groups import TunableGroups from mlos_bench.optimizers.convert_configspace import ( + TunableValueKind, _tunable_to_configspace, special_param_names, tunable_groups_to_configspace, @@ -46,7 +47,7 @@ def configuration_space() -> ConfigurationSpace: "idle": ["halt", "mwait", "noidle"], "kernel_sched_migration_cost_ns": (0, 500000), kernel_sched_migration_cost_ns_special: [-1, 0], - kernel_sched_migration_cost_ns_type: ["special", "range"], + kernel_sched_migration_cost_ns_type: [TunableValueKind.SPECIAL, TunableValueKind.RANGE], "kernel_sched_latency_ns": (0, 1000000000), }) @@ -54,16 +55,16 @@ def configuration_space() -> ConfigurationSpace: spaces["idle"].default_value = "halt" spaces["kernel_sched_migration_cost_ns"].default_value = 250000 spaces[kernel_sched_migration_cost_ns_special].default_value = -1 - spaces[kernel_sched_migration_cost_ns_type].default_value = "special" + spaces[kernel_sched_migration_cost_ns_type].default_value = TunableValueKind.SPECIAL spaces[kernel_sched_migration_cost_ns_type].probabilities = (0.5, 0.5) # FLAML requires distribution to be uniform spaces["kernel_sched_latency_ns"].default_value = 2000000 spaces.add_condition(EqualsCondition( spaces[kernel_sched_migration_cost_ns_special], - spaces[kernel_sched_migration_cost_ns_type], "special")) + spaces[kernel_sched_migration_cost_ns_type], TunableValueKind.SPECIAL)) spaces.add_condition(EqualsCondition( spaces["kernel_sched_migration_cost_ns"], - spaces[kernel_sched_migration_cost_ns_type], "range")) + spaces[kernel_sched_migration_cost_ns_type], TunableValueKind.RANGE)) return spaces From 1ee4bd016fd5c1df4c6ae9354fd243d59e3213d2 Mon Sep 17 00:00:00 2001 From: Brian Kroth Date: Wed, 17 Jan 2024 14:39:20 -0600 Subject: [PATCH 40/44] Update mlos_bench/mlos_bench/optimizers/convert_configspace.py --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 62e486ded40..7d5b7f891c0 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -39,7 +39,7 @@ def _tunable_to_configspace( """ Convert a single Tunable to an equivalent set of ConfigSpace Hyperparameter objects, wrapped in a ConfigurationSpace for composability. - Note: this may more than one Hyperparameter in the case of special value handling. + Note: this may be more than one Hyperparameter in the case of special value handling. Parameters ---------- From 86e323ca32eb8a46176448c9c42170e8f4df5489 Mon Sep 17 00:00:00 2001 From: Brian Kroth Date: Wed, 17 Jan 2024 14:43:27 -0600 Subject: [PATCH 41/44] Update mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py --- mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py b/mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py index 30da252b286..5a100b59d1a 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py @@ -17,6 +17,7 @@ def test_tunable_name() -> None: Check that tunable name is valid. """ with pytest.raises(ValueError): + # ! characters are currently disallowed in tunable names Tunable(name='test!tunable', config={"type": "float", "range": [0, 1], "default": 0}) From c39f0975a2c17cffe9659a2f7a169e60ec90a879 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Wed, 17 Jan 2024 13:05:37 -0800 Subject: [PATCH 42/44] Update mlos_bench/mlos_bench/optimizers/convert_configspace.py Co-authored-by: Brian Kroth --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index 7d5b7f891c0..f76de66e772 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -178,6 +178,7 @@ def configspace_data_to_tunable_values(data: dict) -> dict: del data[type_name] return data +# Note: `!` characters are currently disallowed in Tunable names in order handle this logic. def special_param_names(name: str) -> Tuple[str, str]: """ From b56bca611aa0107612a3596c664aa63f64ee2652 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Wed, 17 Jan 2024 13:08:16 -0800 Subject: [PATCH 43/44] add a note to docstring regarding `!` in tunable names --- mlos_bench/mlos_bench/optimizers/convert_configspace.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/mlos_bench/mlos_bench/optimizers/convert_configspace.py b/mlos_bench/mlos_bench/optimizers/convert_configspace.py index f76de66e772..2b310cfc004 100644 --- a/mlos_bench/mlos_bench/optimizers/convert_configspace.py +++ b/mlos_bench/mlos_bench/optimizers/convert_configspace.py @@ -178,13 +178,14 @@ def configspace_data_to_tunable_values(data: dict) -> dict: del data[type_name] return data -# Note: `!` characters are currently disallowed in Tunable names in order handle this logic. def special_param_names(name: str) -> Tuple[str, str]: """ Generate the names of the auxiliary hyperparameters that correspond to a tunable that can have special values. + NOTE: `!` characters are currently disallowed in Tunable names in order handle this logic. + Parameters ---------- name : str @@ -204,6 +205,8 @@ def special_param_name_is_temp(name: str) -> bool: """ Check if name corresponds to a temporary ConfigSpace parameter. + NOTE: `!` characters are currently disallowed in Tunable names in order handle this logic. + Parameters ---------- name : str @@ -221,6 +224,8 @@ def special_param_name_strip(name: str) -> str: """ Remove the temporary suffix from a special parameter name. + NOTE: `!` characters are currently disallowed in Tunable names in order handle this logic. + Parameters ---------- name : str From b2e5ec3b0fc6ff130e81870b303d1c2c74b4df27 Mon Sep 17 00:00:00 2001 From: Sergiy Matusevych Date: Wed, 17 Jan 2024 13:17:13 -0800 Subject: [PATCH 44/44] implement `Tunable.is_special` and use it in unit tests --- .../tests/tunables/tunables_assign_test.py | 17 +++++++++++++++++ mlos_bench/mlos_bench/tunables/tunable.py | 12 ++++++++++++ 2 files changed, 29 insertions(+) diff --git a/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py b/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py index ec3e40b8f35..bdc90ba8ac3 100644 --- a/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py +++ b/mlos_bench/mlos_bench/tests/tunables/tunables_assign_test.py @@ -27,6 +27,15 @@ def test_tunables_assign_unknown_param(tunable_groups: TunableGroups) -> None: }) +def test_tunables_assign_categorical(tunable_categorical: Tunable) -> None: + """ + Regular assignment for categorical tunable. + """ + # Must be one of: {"Standard_B2s", "Standard_B2ms", "Standard_B4ms"} + tunable_categorical.value = "Standard_B4ms" + assert not tunable_categorical.is_special + + def test_tunables_assign_invalid_categorical(tunable_groups: TunableGroups) -> None: """ Check parameter validation for categorical tunables. @@ -80,6 +89,7 @@ def test_tunable_assign_int_to_numerical_value(tunable_int: Tunable) -> None: """ tunable_int.numerical_value = 10.0 assert tunable_int.numerical_value == 10 + assert not tunable_int.is_special def test_tunable_assign_float_to_numerical_value(tunable_float: Tunable) -> None: @@ -88,6 +98,7 @@ def test_tunable_assign_float_to_numerical_value(tunable_float: Tunable) -> None """ tunable_float.numerical_value = 0.1 assert tunable_float.numerical_value == 0.1 + assert not tunable_float.is_special def test_tunable_assign_str_to_int(tunable_int: Tunable) -> None: @@ -96,6 +107,7 @@ def test_tunable_assign_str_to_int(tunable_int: Tunable) -> None: """ tunable_int.value = "10" assert tunable_int.value == 10 # type: ignore[comparison-overlap] + assert not tunable_int.is_special def test_tunable_assign_str_to_float(tunable_float: Tunable) -> None: @@ -104,6 +116,7 @@ def test_tunable_assign_str_to_float(tunable_float: Tunable) -> None: """ tunable_float.value = "0.5" assert tunable_float.value == 0.5 # type: ignore[comparison-overlap] + assert not tunable_float.is_special def test_tunable_assign_float_to_int(tunable_int: Tunable) -> None: @@ -112,6 +125,7 @@ def test_tunable_assign_float_to_int(tunable_int: Tunable) -> None: """ tunable_int.value = 10.0 assert tunable_int.value == 10 + assert not tunable_int.is_special def test_tunable_assign_float_to_int_fail(tunable_int: Tunable) -> None: @@ -170,6 +184,7 @@ def test_tunable_assign_special(tunable_int: Tunable) -> None: """ tunable_int.numerical_value = -1 assert tunable_int.numerical_value == -1 + assert tunable_int.is_special def test_tunable_assign_special_fail(tunable_int: Tunable) -> None: @@ -187,6 +202,7 @@ def test_tunable_assign_special_with_coercion(tunable_int: Tunable) -> None: """ tunable_int.numerical_value = -1.0 assert tunable_int.numerical_value == -1 + assert tunable_int.is_special def test_tunable_assign_special_with_coercion_str(tunable_int: Tunable) -> None: @@ -196,3 +212,4 @@ def test_tunable_assign_special_with_coercion_str(tunable_int: Tunable) -> None: """ tunable_int.value = "-1" assert tunable_int.numerical_value == -1 + assert tunable_int.is_special diff --git a/mlos_bench/mlos_bench/tunables/tunable.py b/mlos_bench/mlos_bench/tunables/tunable.py index 17b9d1177e8..8992e9d96b3 100644 --- a/mlos_bench/mlos_bench/tunables/tunable.py +++ b/mlos_bench/mlos_bench/tunables/tunable.py @@ -357,6 +357,18 @@ def special(self) -> Union[List[int], List[float]]: """ return self._special + @property + def is_special(self) -> bool: + """ + Check if the current value of the tunable is special. + + Returns + ------- + is_special : bool + True if the current value of the tunable is special, False otherwise. + """ + return self.value in self._special + @property def type(self) -> str: """