From 3410d348a8230cf6e398625bf749396966ca7093 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 28 Nov 2023 17:03:35 +0300 Subject: [PATCH 01/17] Add initial search for sequential --- golem/core/tuning/hyperopt_tuner.py | 61 ++++++++++++++++++++--- golem/core/tuning/sequential.py | 75 +++++++++++++++++++---------- golem/core/tuning/simultaneous.py | 64 ++++-------------------- 3 files changed, 114 insertions(+), 86 deletions(-) diff --git a/golem/core/tuning/hyperopt_tuner.py b/golem/core/tuning/hyperopt_tuner.py index cf6471a6..f90865fc 100644 --- a/golem/core/tuning/hyperopt_tuner.py +++ b/golem/core/tuning/hyperopt_tuner.py @@ -1,15 +1,17 @@ from abc import ABC from datetime import timedelta -from typing import Callable, Dict, Optional +from typing import Callable, Dict, Optional, Tuple, Any import numpy as np -from hyperopt import hp, tpe +from hyperopt import hp, tpe, fmin, Trials from hyperopt.early_stop import no_progress_loss from hyperopt.pyll import Apply, scope from hyperopt.pyll_utils import validate_label from golem.core.adapter import BaseOptimizationAdapter +from golem.core.dag.linked_graph_node import LinkedGraphNode from golem.core.log import default_log +from golem.core.optimisers.graph import OptGraph from golem.core.optimisers.objective import ObjectiveFunction from golem.core.tuning.search_space import SearchSpace, get_node_operation_parameter_label from golem.core.tuning.tuner_interface import BaseTuner @@ -64,6 +66,49 @@ def __init__(self, objective_evaluate: ObjectiveFunction, self.algo = algo self.log = default_log(self) + def _search_near_initial_parameters(self, + objective, + search_space: dict, + initial_parameters: dict, + trials: Trials, + remaining_time: float, + show_progress: bool = True) -> Tuple[Trials, int]: + """ Method to search using the search space where parameters initially set for the graph are fixed. + This allows not to lose results obtained while composition process + + Args: + graph: graph to be tuned + search_space: dict with parameters to be optimized and their search spaces + initial_parameters: dict with initial parameters of the graph + trials: Trials object to store all the search iterations + show_progress: shows progress of tuning if True + + Returns: + trials: Trials object storing all the search trials + init_trials_num: number of iterations made using the search space with fixed initial parameters + """ + try_initial_parameters = initial_parameters and self.iterations > 1 + if not try_initial_parameters: + init_trials_num = 0 + return trials, init_trials_num + + is_init_params_full = len(initial_parameters) == len(search_space) + if self.iterations < 10 or is_init_params_full: + init_trials_num = 1 + else: + init_trials_num = min(int(self.iterations * 0.1), 10) + + # fmin updates trials with evaluation points tried out during the call + fmin(objective, + search_space, + trials=trials, + algo=self.algo, + max_evals=init_trials_num, + show_progressbar=show_progress, + early_stop_fn=self.early_stop_fn, + timeout=remaining_time) + return trials, init_trials_num + def get_parameter_hyperopt_space(search_space: SearchSpace, operation_name: str, @@ -96,8 +141,8 @@ def get_parameter_hyperopt_space(search_space: SearchSpace, return None -def get_node_parameters_for_hyperopt(search_space: SearchSpace, node_id: int, operation_name: str) \ - -> Dict[str, Apply]: +def get_node_parameters_for_hyperopt(search_space: SearchSpace, node_id: int, node: LinkedGraphNode) \ + -> Tuple[Dict[str, Apply], Dict[str, Any]]: """ Function for forming dictionary with hyperparameters of the node operation for the ``HyperoptTuner`` @@ -112,15 +157,19 @@ def get_node_parameters_for_hyperopt(search_space: SearchSpace, node_id: int, op """ # Get available parameters for current operation + operation_name = node.name parameters_list = search_space.get_parameters_for_operation(operation_name) parameters_dict = {} + initial_parameters = {} for parameter_name in parameters_list: node_op_parameter_name = get_node_operation_parameter_label(node_id, operation_name, parameter_name) # For operation get range where search can be done space = get_parameter_hyperopt_space(search_space, operation_name, parameter_name, node_op_parameter_name) - parameters_dict.update({node_op_parameter_name: space}) - return parameters_dict + if parameter_name in node.parameters: + initial_parameters.update({node_op_parameter_name: node.parameters[parameter_name]}) + + return parameters_dict, initial_parameters diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 003bb170..ac5cf23e 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -1,10 +1,11 @@ from datetime import timedelta from functools import partial -from typing import Callable, Optional +from typing import Callable, Optional, Tuple -from hyperopt import tpe, fmin, space_eval +from hyperopt import tpe, fmin, space_eval, Trials from golem.core.adapter import BaseOptimizationAdapter +from golem.core.constants import MIN_TIME_FOR_TUNING_IN_SEC from golem.core.optimisers.graph import OptGraph from golem.core.optimisers.objective import ObjectiveFunction from golem.core.tuning.hyperopt_tuner import HyperoptTuner, get_node_parameters_for_hyperopt @@ -64,18 +65,18 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: nodes_ids = self.get_nodes_order(nodes_number=nodes_amount) for node_id in nodes_ids: node = graph.nodes[node_id] - operation_name = node.name # Get node's parameters to optimize - node_params = get_node_parameters_for_hyperopt(self.search_space, node_id, operation_name) + node_params, init_params = get_node_parameters_for_hyperopt(self.search_space, node_id, node) if not node_params: - self.log.info(f'"{operation_name}" operation has no parameters to optimize') + self.log.info(f'"{node.name}" operation has no parameters to optimize') else: # Apply tuning for current node self._optimize_node(node_id=node_id, graph=graph, node_params=node_params, + init_params=init_params, iterations_per_node=iterations_per_node, seconds_per_node=seconds_per_node) @@ -114,22 +115,22 @@ def tune_node(self, graph: DomainGraphForTune, node_index: int) -> DomainGraphFo self.init_check(graph) node = graph.nodes[node_index] - operation_name = node.name # Get node's parameters to optimize - node_params = get_node_parameters_for_hyperopt(self.search_space, - node_id=node_index, - operation_name=operation_name) + node_params, init_params = get_node_parameters_for_hyperopt(self.search_space, + node_id=node_index, + node=node) remaining_time = self._get_remaining_time() if self._check_if_tuning_possible(graph, len(node_params) > 1, remaining_time): # Apply tuning for current node - self._optimize_node(graph=graph, - node_id=node_index, - node_params=node_params, - iterations_per_node=self.iterations, - seconds_per_node=remaining_time - ) + graph = self._optimize_node(graph=graph, + node_id=node_index, + node_params=node_params, + init_params=init_params, + iterations_per_node=self.iterations, + seconds_per_node=remaining_time + ) self.was_tuned = True # Validation is the optimization do well @@ -143,6 +144,7 @@ def tune_node(self, graph: DomainGraphForTune, node_index: int) -> DomainGraphFo def _optimize_node(self, graph: OptGraph, node_id: int, node_params: dict, + init_params: dict, iterations_per_node: int, seconds_per_node: float) -> OptGraph: """ @@ -158,20 +160,40 @@ def _optimize_node(self, graph: OptGraph, Returns: updated graph with tuned parameters in particular node """ - best_parameters = fmin(partial(self._objective, graph=graph, node_id=node_id), - node_params, - algo=self.algo, - max_evals=iterations_per_node, - early_stop_fn=self.early_stop_fn, - timeout=seconds_per_node) - - best_parameters = space_eval(space=node_params, hp_assignment=best_parameters) + remaining_time = self._get_remaining_time() + trials = Trials() + trials, init_trials_num = self._search_near_initial_parameters(partial(self._objective, + graph=graph, + node_id=node_id, + unchangeable_parameters=init_params), + node_params, + init_params, + trials, + remaining_time) + remaining_time = self._get_remaining_time() + if remaining_time > MIN_TIME_FOR_TUNING_IN_SEC: + fmin(partial(self._objective, graph=graph, node_id=node_id), + node_params, + trials=trials, + algo=self.algo, + max_evals=iterations_per_node, + early_stop_fn=self.early_stop_fn, + timeout=seconds_per_node) + + best = space_eval(space=node_params, hp_assignment=trials.argmin) + is_best_trial_with_init_params = trials.best_trial.get('tid') in range(init_trials_num) + if is_best_trial_with_init_params: + best = {**best, **init_params} # Set best params for this node in the graph - graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=best_parameters) + graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=best) return graph - def _objective(self, node_params: dict, graph: OptGraph, node_id: int) -> float: + def _objective(self, + node_params: dict, + graph: OptGraph, + node_id: int, + unchangeable_parameters: Optional[dict] = None) -> float: """ Objective function for minimization problem Args: @@ -182,6 +204,9 @@ def _objective(self, node_params: dict, graph: OptGraph, node_id: int) -> float: Returns: value of objective function """ + # replace new parameters with parameters + if unchangeable_parameters: + parameters_dict = {**node_params, **unchangeable_parameters} # Set hyperparameters for node graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=node_params) diff --git a/golem/core/tuning/simultaneous.py b/golem/core/tuning/simultaneous.py index f540114e..3005c243 100644 --- a/golem/core/tuning/simultaneous.py +++ b/golem/core/tuning/simultaneous.py @@ -6,7 +6,6 @@ from golem.core.constants import MIN_TIME_FOR_TUNING_IN_SEC from golem.core.optimisers.graph import OptGraph from golem.core.tuning.hyperopt_tuner import HyperoptTuner, get_node_parameters_for_hyperopt -from golem.core.tuning.search_space import get_node_operation_parameter_label from golem.core.tuning.tuner_interface import DomainGraphForTune @@ -28,13 +27,15 @@ def _tune(self, graph: DomainGraphForTune, show_progress: bool = True) -> Domain parameters_dict, init_parameters = self._get_parameters_for_tune(graph) remaining_time = self._get_remaining_time() - if self._check_if_tuning_possible(graph, parameters_dict, remaining_time): + if self._check_if_tuning_possible(graph, len(parameters_dict) > 0, remaining_time): trials = Trials() try: # try searching using initial parameters # (uses original search space with fixed initial parameters) - trials, init_trials_num = self._search_near_initial_parameters(graph, + trials, init_trials_num = self._search_near_initial_parameters(partial(self._objective, + graph=graph, + unchangeable_parameters=init_parameters), parameters_dict, init_parameters, trials, @@ -70,48 +71,6 @@ def _tune(self, graph: DomainGraphForTune, show_progress: bool = True) -> Domain final_graph = graph return final_graph - def _search_near_initial_parameters(self, graph: OptGraph, - search_space: dict, - initial_parameters: dict, - trials: Trials, - remaining_time: float, - show_progress: bool = True) -> Tuple[Trials, int]: - """ Method to search using the search space where parameters initially set for the graph are fixed. - This allows not to lose results obtained while composition process - - Args: - graph: graph to be tuned - search_space: dict with parameters to be optimized and their search spaces - initial_parameters: dict with initial parameters of the graph - trials: Trials object to store all the search iterations - show_progress: shows progress of tuning if True - - Returns: - trials: Trials object storing all the search trials - init_trials_num: number of iterations made using the search space with fixed initial parameters - """ - try_initial_parameters = initial_parameters and self.iterations > 1 - if not try_initial_parameters: - init_trials_num = 0 - return trials, init_trials_num - - is_init_params_full = len(initial_parameters) == len(search_space) - if self.iterations < 10 or is_init_params_full: - init_trials_num = 1 - else: - init_trials_num = min(int(self.iterations * 0.1), 10) - - # fmin updates trials with evaluation points tried out during the call - fmin(partial(self._objective, graph=graph, unchangeable_parameters=initial_parameters), - search_space, - trials=trials, - algo=self.algo, - max_evals=init_trials_num, - show_progressbar=show_progress, - early_stop_fn=self.early_stop_fn, - timeout=remaining_time) - return trials, init_trials_num - def _get_parameters_for_tune(self, graph: OptGraph) -> Tuple[dict, dict]: """ Method for defining the search space @@ -130,16 +89,11 @@ def _get_parameters_for_tune(self, graph: OptGraph) -> Tuple[dict, dict]: # Assign unique prefix for each model hyperparameter # label - number of node in the graph - node_params = get_node_parameters_for_hyperopt(self.search_space, node_id=node_id, - operation_name=operation_name) - parameters_dict.update(node_params) - - tunable_node_params = self.search_space.get_parameters_for_operation(operation_name) - if tunable_node_params: - tunable_initial_params = {get_node_operation_parameter_label(node_id, operation_name, p): - node.parameters[p] for p in node.parameters if p in tunable_node_params} - if tunable_initial_params: - initial_parameters.update(tunable_initial_params) + tunable_node_params, initial_node_params = get_node_parameters_for_hyperopt(self.search_space, + node_id=node_id, + node=node) + parameters_dict.update(tunable_node_params) + initial_parameters.update(initial_parameters) return parameters_dict, initial_parameters From 873412b9bcbc533481bf01f6b069a23ae3dec3d2 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 28 Nov 2023 18:48:40 +0300 Subject: [PATCH 02/17] Save best params for sequential --- golem/core/tuning/sequential.py | 40 ++++++++++++++++++++------------- 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index ac5cf23e..5ffa7e4a 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -1,3 +1,4 @@ +from copy import deepcopy from datetime import timedelta from functools import partial from typing import Callable, Optional, Tuple @@ -48,6 +49,7 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: remaining_time = self._get_remaining_time() if self._check_if_tuning_possible(graph, parameters_to_optimize=True, remaining_time=remaining_time): # Calculate amount of iterations we can apply per node + initial_graph = deepcopy(graph) nodes_amount = graph.length iterations_per_node = round(self.iterations / nodes_amount) iterations_per_node = int(iterations_per_node) @@ -68,18 +70,23 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: # Get node's parameters to optimize node_params, init_params = get_node_parameters_for_hyperopt(self.search_space, node_id, node) - + best_metric = self.init_metric + best_parameters = {} if not node_params: self.log.info(f'"{node.name}" operation has no parameters to optimize') else: # Apply tuning for current node - self._optimize_node(node_id=node_id, - graph=graph, - node_params=node_params, - init_params=init_params, - iterations_per_node=iterations_per_node, - seconds_per_node=seconds_per_node) - + graph, metric, parameters = self._optimize_node(node_id=node_id, + graph=graph, + node_params=node_params, + init_params=init_params, + iterations_per_node=iterations_per_node, + seconds_per_node=seconds_per_node) + if metric < best_metric: + best_metric = metric + best_parameters.update(parameters) + if best_parameters: + self.set_arg_graph(initial_graph, parameters) self.was_tuned = True return graph @@ -124,13 +131,14 @@ def tune_node(self, graph: DomainGraphForTune, node_index: int) -> DomainGraphFo remaining_time = self._get_remaining_time() if self._check_if_tuning_possible(graph, len(node_params) > 1, remaining_time): # Apply tuning for current node - graph = self._optimize_node(graph=graph, - node_id=node_index, - node_params=node_params, - init_params=init_params, - iterations_per_node=self.iterations, - seconds_per_node=remaining_time - ) + graph, _, _ = self._optimize_node(graph=graph, + node_id=node_index, + node_params=node_params, + init_params=init_params, + iterations_per_node=self.iterations, + seconds_per_node=remaining_time + ) + self.was_tuned = True # Validation is the optimization do well @@ -187,7 +195,7 @@ def _optimize_node(self, graph: OptGraph, best = {**best, **init_params} # Set best params for this node in the graph graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=best) - return graph + return graph, trials.best_trial['result']['loss'], best def _objective(self, node_params: dict, From d2186e997523baf9a51312bd652a1097b7fa2162 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 28 Nov 2023 18:54:02 +0300 Subject: [PATCH 03/17] Fix best graph sequential --- golem/core/tuning/sequential.py | 38 +++++++++++++++------------------ 1 file changed, 17 insertions(+), 21 deletions(-) diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 5ffa7e4a..47925dbd 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -49,7 +49,6 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: remaining_time = self._get_remaining_time() if self._check_if_tuning_possible(graph, parameters_to_optimize=True, remaining_time=remaining_time): # Calculate amount of iterations we can apply per node - initial_graph = deepcopy(graph) nodes_amount = graph.length iterations_per_node = round(self.iterations / nodes_amount) iterations_per_node = int(iterations_per_node) @@ -71,24 +70,21 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: # Get node's parameters to optimize node_params, init_params = get_node_parameters_for_hyperopt(self.search_space, node_id, node) best_metric = self.init_metric - best_parameters = {} + final_graph = deepcopy(self.init_graph) if not node_params: self.log.info(f'"{node.name}" operation has no parameters to optimize') else: # Apply tuning for current node - graph, metric, parameters = self._optimize_node(node_id=node_id, - graph=graph, - node_params=node_params, - init_params=init_params, - iterations_per_node=iterations_per_node, - seconds_per_node=seconds_per_node) + graph, metric = self._optimize_node(node_id=node_id, + graph=graph, + node_params=node_params, + init_params=init_params, + iterations_per_node=iterations_per_node, + seconds_per_node=seconds_per_node) if metric < best_metric: - best_metric = metric - best_parameters.update(parameters) - if best_parameters: - self.set_arg_graph(initial_graph, parameters) + final_graph = deepcopy(graph) self.was_tuned = True - return graph + return final_graph def get_nodes_order(self, nodes_number: int) -> range: """ Method returns list with indices of nodes in the graph @@ -131,13 +127,13 @@ def tune_node(self, graph: DomainGraphForTune, node_index: int) -> DomainGraphFo remaining_time = self._get_remaining_time() if self._check_if_tuning_possible(graph, len(node_params) > 1, remaining_time): # Apply tuning for current node - graph, _, _ = self._optimize_node(graph=graph, - node_id=node_index, - node_params=node_params, - init_params=init_params, - iterations_per_node=self.iterations, - seconds_per_node=remaining_time - ) + graph, _ = self._optimize_node(graph=graph, + node_id=node_index, + node_params=node_params, + init_params=init_params, + iterations_per_node=self.iterations, + seconds_per_node=remaining_time + ) self.was_tuned = True @@ -195,7 +191,7 @@ def _optimize_node(self, graph: OptGraph, best = {**best, **init_params} # Set best params for this node in the graph graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=best) - return graph, trials.best_trial['result']['loss'], best + return graph, trials.best_trial['result']['loss'] def _objective(self, node_params: dict, From 1b4509edced1c4e28851f9f9c57bf76b632c1d2c Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Mon, 4 Dec 2023 16:21:54 +0300 Subject: [PATCH 04/17] Minor --- golem/core/tuning/sequential.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 47925dbd..003eabdb 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -64,13 +64,13 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: # Tuning performed sequentially for every node - so get ids of nodes nodes_ids = self.get_nodes_order(nodes_number=nodes_amount) + final_graph = deepcopy(self.init_graph) for node_id in nodes_ids: node = graph.nodes[node_id] # Get node's parameters to optimize node_params, init_params = get_node_parameters_for_hyperopt(self.search_space, node_id, node) best_metric = self.init_metric - final_graph = deepcopy(self.init_graph) if not node_params: self.log.info(f'"{node.name}" operation has no parameters to optimize') else: From 8e0711e31c67a28ec11e05af8ef31e667d6cc4af Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Fri, 8 Dec 2023 17:18:39 +0300 Subject: [PATCH 05/17] Fix pep8 --- golem/core/tuning/hyperopt_tuner.py | 3 +-- golem/core/tuning/sequential.py | 4 ++-- golem/core/tuning/simultaneous.py | 19 +++++++++---------- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/golem/core/tuning/hyperopt_tuner.py b/golem/core/tuning/hyperopt_tuner.py index f90865fc..b8a7c5ab 100644 --- a/golem/core/tuning/hyperopt_tuner.py +++ b/golem/core/tuning/hyperopt_tuner.py @@ -11,7 +11,6 @@ from golem.core.adapter import BaseOptimizationAdapter from golem.core.dag.linked_graph_node import LinkedGraphNode from golem.core.log import default_log -from golem.core.optimisers.graph import OptGraph from golem.core.optimisers.objective import ObjectiveFunction from golem.core.tuning.search_space import SearchSpace, get_node_operation_parameter_label from golem.core.tuning.tuner_interface import BaseTuner @@ -149,7 +148,7 @@ def get_node_parameters_for_hyperopt(search_space: SearchSpace, node_id: int, no Args: search_space: SearchSpace with parameters per operation node_id: number of node in graph.nodes list - operation_name: name of operation in the node + node: node from the graph Returns: parameters_dict: dictionary-like structure with labeled hyperparameters diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 003eabdb..44fbf059 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -1,7 +1,7 @@ from copy import deepcopy from datetime import timedelta from functools import partial -from typing import Callable, Optional, Tuple +from typing import Callable, Optional from hyperopt import tpe, fmin, space_eval, Trials @@ -213,7 +213,7 @@ def _objective(self, parameters_dict = {**node_params, **unchangeable_parameters} # Set hyperparameters for node - graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=node_params) + graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=parameters_dict) metric_value = self.get_metric_value(graph=graph) return metric_value diff --git a/golem/core/tuning/simultaneous.py b/golem/core/tuning/simultaneous.py index 3005c243..797036d6 100644 --- a/golem/core/tuning/simultaneous.py +++ b/golem/core/tuning/simultaneous.py @@ -33,14 +33,15 @@ def _tune(self, graph: DomainGraphForTune, show_progress: bool = True) -> Domain try: # try searching using initial parameters # (uses original search space with fixed initial parameters) - trials, init_trials_num = self._search_near_initial_parameters(partial(self._objective, - graph=graph, - unchangeable_parameters=init_parameters), - parameters_dict, - init_parameters, - trials, - remaining_time, - show_progress) + trials, init_trials_num = self._search_near_initial_parameters( + partial(self._objective, + graph=graph, + unchangeable_parameters=init_parameters), + parameters_dict, + init_parameters, + trials, + remaining_time, + show_progress) remaining_time = self._get_remaining_time() if remaining_time > MIN_TIME_FOR_TUNING_IN_SEC: fmin(partial(self._objective, graph=graph), @@ -85,8 +86,6 @@ def _get_parameters_for_tune(self, graph: OptGraph) -> Tuple[dict, dict]: parameters_dict = {} initial_parameters = {} for node_id, node in enumerate(graph.nodes): - operation_name = node.name - # Assign unique prefix for each model hyperparameter # label - number of node in the graph tunable_node_params, initial_node_params = get_node_parameters_for_hyperopt(self.search_space, From 9d0767b4dbac70a5b7fefb91fba8b35276222916 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Fri, 8 Dec 2023 17:27:41 +0300 Subject: [PATCH 06/17] Fix test --- golem/core/tuning/sequential.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 44fbf059..f7159128 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -210,10 +210,10 @@ def _objective(self, """ # replace new parameters with parameters if unchangeable_parameters: - parameters_dict = {**node_params, **unchangeable_parameters} + node_params = {**node_params, **unchangeable_parameters} # Set hyperparameters for node - graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=parameters_dict) + graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=node_params) metric_value = self.get_metric_value(graph=graph) return metric_value From 4a32a5f9d9a27ca7a61a19726a2a08cd4ec6afde Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 13 Feb 2024 17:58:01 +0300 Subject: [PATCH 07/17] Review fixes --- golem/core/tuning/hyperopt_tuner.py | 2 +- golem/core/tuning/sequential.py | 4 ++-- golem/core/tuning/simultaneous.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/golem/core/tuning/hyperopt_tuner.py b/golem/core/tuning/hyperopt_tuner.py index b8a7c5ab..0f31e7e5 100644 --- a/golem/core/tuning/hyperopt_tuner.py +++ b/golem/core/tuning/hyperopt_tuner.py @@ -68,9 +68,9 @@ def __init__(self, objective_evaluate: ObjectiveFunction, def _search_near_initial_parameters(self, objective, search_space: dict, - initial_parameters: dict, trials: Trials, remaining_time: float, + initial_parameters: Optional[dict] = None, show_progress: bool = True) -> Tuple[Trials, int]: """ Method to search using the search space where parameters initially set for the graph are fixed. This allows not to lose results obtained while composition process diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index f7159128..5f468819 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -171,9 +171,9 @@ def _optimize_node(self, graph: OptGraph, node_id=node_id, unchangeable_parameters=init_params), node_params, - init_params, trials, - remaining_time) + remaining_time, + init_params) remaining_time = self._get_remaining_time() if remaining_time > MIN_TIME_FOR_TUNING_IN_SEC: diff --git a/golem/core/tuning/simultaneous.py b/golem/core/tuning/simultaneous.py index 797036d6..fd2f7c2e 100644 --- a/golem/core/tuning/simultaneous.py +++ b/golem/core/tuning/simultaneous.py @@ -38,9 +38,9 @@ def _tune(self, graph: DomainGraphForTune, show_progress: bool = True) -> Domain graph=graph, unchangeable_parameters=init_parameters), parameters_dict, - init_parameters, trials, remaining_time, + init_parameters, show_progress) remaining_time = self._get_remaining_time() if remaining_time > MIN_TIME_FOR_TUNING_IN_SEC: From 1df6300b698337a9fd767cd6b85cb8c94cacda4a Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 28 Nov 2023 17:03:35 +0300 Subject: [PATCH 08/17] Add initial search for sequential --- golem/core/tuning/hyperopt_tuner.py | 5 ++++- golem/core/tuning/sequential.py | 29 ++++++++++++++++++++++++++++- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/golem/core/tuning/hyperopt_tuner.py b/golem/core/tuning/hyperopt_tuner.py index 0f31e7e5..1a9095c5 100644 --- a/golem/core/tuning/hyperopt_tuner.py +++ b/golem/core/tuning/hyperopt_tuner.py @@ -1,5 +1,7 @@ from abc import ABC from datetime import timedelta +from functools import partial +from typing import Optional, Callable, Dict, Tuple, Any from typing import Callable, Dict, Optional, Tuple, Any import numpy as np @@ -11,6 +13,7 @@ from golem.core.adapter import BaseOptimizationAdapter from golem.core.dag.linked_graph_node import LinkedGraphNode from golem.core.log import default_log +from golem.core.optimisers.graph import OptGraph from golem.core.optimisers.objective import ObjectiveFunction from golem.core.tuning.search_space import SearchSpace, get_node_operation_parameter_label from golem.core.tuning.tuner_interface import BaseTuner @@ -68,9 +71,9 @@ def __init__(self, objective_evaluate: ObjectiveFunction, def _search_near_initial_parameters(self, objective, search_space: dict, + initial_parameters: dict, trials: Trials, remaining_time: float, - initial_parameters: Optional[dict] = None, show_progress: bool = True) -> Tuple[Trials, int]: """ Method to search using the search space where parameters initially set for the graph are fixed. This allows not to lose results obtained while composition process diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 5f468819..5de085bb 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -1,7 +1,7 @@ from copy import deepcopy from datetime import timedelta from functools import partial -from typing import Callable, Optional +from typing import Callable, Optional, Tuple from hyperopt import tpe, fmin, space_eval, Trials @@ -75,6 +75,13 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: self.log.info(f'"{node.name}" operation has no parameters to optimize') else: # Apply tuning for current node + self._optimize_node(node_id=node_id, + graph=graph, + node_params=node_params, + init_params=init_params, + iterations_per_node=iterations_per_node, + seconds_per_node=seconds_per_node) + graph, metric = self._optimize_node(node_id=node_id, graph=graph, node_params=node_params, @@ -175,6 +182,26 @@ def _optimize_node(self, graph: OptGraph, remaining_time, init_params) + remaining_time = self._get_remaining_time() + if remaining_time > MIN_TIME_FOR_TUNING_IN_SEC: + fmin(partial(self._objective, graph=graph, node_id=node_id), + node_params, + trials=trials, + algo=self.algo, + max_evals=iterations_per_node, + early_stop_fn=self.early_stop_fn, + timeout=seconds_per_node) + remaining_time = self._get_remaining_time() + trials = Trials() + trials, init_trials_num = self._search_near_initial_parameters(partial(self._objective, + graph=graph, + node_id=node_id, + unchangeable_parameters=init_params), + node_params, + init_params, + trials, + remaining_time) + remaining_time = self._get_remaining_time() if remaining_time > MIN_TIME_FOR_TUNING_IN_SEC: fmin(partial(self._objective, graph=graph, node_id=node_id), From d695d474601bb40c2f8f209b5e80bf2ee40fd550 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 28 Nov 2023 18:48:40 +0300 Subject: [PATCH 09/17] Save best params for sequential --- golem/core/tuning/sequential.py | 62 +++++++++++---------------------- 1 file changed, 20 insertions(+), 42 deletions(-) diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 5de085bb..371ef29e 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -49,6 +49,7 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: remaining_time = self._get_remaining_time() if self._check_if_tuning_possible(graph, parameters_to_optimize=True, remaining_time=remaining_time): # Calculate amount of iterations we can apply per node + initial_graph = deepcopy(graph) nodes_amount = graph.length iterations_per_node = round(self.iterations / nodes_amount) iterations_per_node = int(iterations_per_node) @@ -71,25 +72,22 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: # Get node's parameters to optimize node_params, init_params = get_node_parameters_for_hyperopt(self.search_space, node_id, node) best_metric = self.init_metric + best_parameters = {} if not node_params: self.log.info(f'"{node.name}" operation has no parameters to optimize') else: # Apply tuning for current node - self._optimize_node(node_id=node_id, - graph=graph, - node_params=node_params, - init_params=init_params, - iterations_per_node=iterations_per_node, - seconds_per_node=seconds_per_node) - - graph, metric = self._optimize_node(node_id=node_id, - graph=graph, - node_params=node_params, - init_params=init_params, - iterations_per_node=iterations_per_node, - seconds_per_node=seconds_per_node) + graph, metric, parameters = self._optimize_node(node_id=node_id, + graph=graph, + node_params=node_params, + init_params=init_params, + iterations_per_node=iterations_per_node, + seconds_per_node=seconds_per_node) if metric < best_metric: - final_graph = deepcopy(graph) + best_metric = metric + best_parameters.update(parameters) + if best_parameters: + self.set_arg_graph(initial_graph, best_parameters) self.was_tuned = True return final_graph @@ -134,13 +132,13 @@ def tune_node(self, graph: DomainGraphForTune, node_index: int) -> DomainGraphFo remaining_time = self._get_remaining_time() if self._check_if_tuning_possible(graph, len(node_params) > 1, remaining_time): # Apply tuning for current node - graph, _ = self._optimize_node(graph=graph, - node_id=node_index, - node_params=node_params, - init_params=init_params, - iterations_per_node=self.iterations, - seconds_per_node=remaining_time - ) + graph, _, _ = self._optimize_node(graph=graph, + node_id=node_index, + node_params=node_params, + init_params=init_params, + iterations_per_node=self.iterations, + seconds_per_node=remaining_time + ) self.was_tuned = True @@ -173,26 +171,6 @@ def _optimize_node(self, graph: OptGraph, """ remaining_time = self._get_remaining_time() trials = Trials() - trials, init_trials_num = self._search_near_initial_parameters(partial(self._objective, - graph=graph, - node_id=node_id, - unchangeable_parameters=init_params), - node_params, - trials, - remaining_time, - init_params) - - remaining_time = self._get_remaining_time() - if remaining_time > MIN_TIME_FOR_TUNING_IN_SEC: - fmin(partial(self._objective, graph=graph, node_id=node_id), - node_params, - trials=trials, - algo=self.algo, - max_evals=iterations_per_node, - early_stop_fn=self.early_stop_fn, - timeout=seconds_per_node) - remaining_time = self._get_remaining_time() - trials = Trials() trials, init_trials_num = self._search_near_initial_parameters(partial(self._objective, graph=graph, node_id=node_id, @@ -218,7 +196,7 @@ def _optimize_node(self, graph: OptGraph, best = {**best, **init_params} # Set best params for this node in the graph graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=best) - return graph, trials.best_trial['result']['loss'] + return graph, trials.best_trial['result']['loss'], best def _objective(self, node_params: dict, From 84daad4412ff695ce3e25979d91957bf14e37bc6 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 28 Nov 2023 18:54:02 +0300 Subject: [PATCH 10/17] Fix best graph sequential --- golem/core/tuning/sequential.py | 37 ++++++++++++++------------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 371ef29e..1e95d356 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -49,7 +49,6 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: remaining_time = self._get_remaining_time() if self._check_if_tuning_possible(graph, parameters_to_optimize=True, remaining_time=remaining_time): # Calculate amount of iterations we can apply per node - initial_graph = deepcopy(graph) nodes_amount = graph.length iterations_per_node = round(self.iterations / nodes_amount) iterations_per_node = int(iterations_per_node) @@ -65,29 +64,25 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: # Tuning performed sequentially for every node - so get ids of nodes nodes_ids = self.get_nodes_order(nodes_number=nodes_amount) - final_graph = deepcopy(self.init_graph) for node_id in nodes_ids: node = graph.nodes[node_id] # Get node's parameters to optimize node_params, init_params = get_node_parameters_for_hyperopt(self.search_space, node_id, node) best_metric = self.init_metric - best_parameters = {} + final_graph = deepcopy(self.init_graph) if not node_params: self.log.info(f'"{node.name}" operation has no parameters to optimize') else: # Apply tuning for current node - graph, metric, parameters = self._optimize_node(node_id=node_id, - graph=graph, - node_params=node_params, - init_params=init_params, - iterations_per_node=iterations_per_node, - seconds_per_node=seconds_per_node) + graph, metric = self._optimize_node(node_id=node_id, + graph=graph, + node_params=node_params, + init_params=init_params, + iterations_per_node=iterations_per_node, + seconds_per_node=seconds_per_node) if metric < best_metric: - best_metric = metric - best_parameters.update(parameters) - if best_parameters: - self.set_arg_graph(initial_graph, best_parameters) + final_graph = deepcopy(graph) self.was_tuned = True return final_graph @@ -132,13 +127,13 @@ def tune_node(self, graph: DomainGraphForTune, node_index: int) -> DomainGraphFo remaining_time = self._get_remaining_time() if self._check_if_tuning_possible(graph, len(node_params) > 1, remaining_time): # Apply tuning for current node - graph, _, _ = self._optimize_node(graph=graph, - node_id=node_index, - node_params=node_params, - init_params=init_params, - iterations_per_node=self.iterations, - seconds_per_node=remaining_time - ) + graph, _ = self._optimize_node(graph=graph, + node_id=node_index, + node_params=node_params, + init_params=init_params, + iterations_per_node=self.iterations, + seconds_per_node=remaining_time + ) self.was_tuned = True @@ -196,7 +191,7 @@ def _optimize_node(self, graph: OptGraph, best = {**best, **init_params} # Set best params for this node in the graph graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=best) - return graph, trials.best_trial['result']['loss'], best + return graph, trials.best_trial['result']['loss'] def _objective(self, node_params: dict, From 1693d3577c0599a617c9f6c942e1e3d61d8d07b2 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Mon, 4 Dec 2023 16:21:54 +0300 Subject: [PATCH 11/17] Minor --- golem/core/tuning/sequential.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 1e95d356..008e7661 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -64,13 +64,13 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: # Tuning performed sequentially for every node - so get ids of nodes nodes_ids = self.get_nodes_order(nodes_number=nodes_amount) + final_graph = deepcopy(self.init_graph) for node_id in nodes_ids: node = graph.nodes[node_id] # Get node's parameters to optimize node_params, init_params = get_node_parameters_for_hyperopt(self.search_space, node_id, node) best_metric = self.init_metric - final_graph = deepcopy(self.init_graph) if not node_params: self.log.info(f'"{node.name}" operation has no parameters to optimize') else: From 2253463c6c602f618f513cadc4f8bd6f4e4b0b8f Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Fri, 8 Dec 2023 17:18:39 +0300 Subject: [PATCH 12/17] Fix pep8 --- golem/core/tuning/hyperopt_tuner.py | 2 -- golem/core/tuning/sequential.py | 4 ++-- golem/core/tuning/simultaneous.py | 2 +- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/golem/core/tuning/hyperopt_tuner.py b/golem/core/tuning/hyperopt_tuner.py index 1a9095c5..018e85f8 100644 --- a/golem/core/tuning/hyperopt_tuner.py +++ b/golem/core/tuning/hyperopt_tuner.py @@ -1,6 +1,5 @@ from abc import ABC from datetime import timedelta -from functools import partial from typing import Optional, Callable, Dict, Tuple, Any from typing import Callable, Dict, Optional, Tuple, Any @@ -13,7 +12,6 @@ from golem.core.adapter import BaseOptimizationAdapter from golem.core.dag.linked_graph_node import LinkedGraphNode from golem.core.log import default_log -from golem.core.optimisers.graph import OptGraph from golem.core.optimisers.objective import ObjectiveFunction from golem.core.tuning.search_space import SearchSpace, get_node_operation_parameter_label from golem.core.tuning.tuner_interface import BaseTuner diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 008e7661..85df7282 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -1,7 +1,7 @@ from copy import deepcopy from datetime import timedelta from functools import partial -from typing import Callable, Optional, Tuple +from typing import Callable, Optional from hyperopt import tpe, fmin, space_eval, Trials @@ -213,7 +213,7 @@ def _objective(self, node_params = {**node_params, **unchangeable_parameters} # Set hyperparameters for node - graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=node_params) + graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=parameters_dict) metric_value = self.get_metric_value(graph=graph) return metric_value diff --git a/golem/core/tuning/simultaneous.py b/golem/core/tuning/simultaneous.py index fd2f7c2e..797036d6 100644 --- a/golem/core/tuning/simultaneous.py +++ b/golem/core/tuning/simultaneous.py @@ -38,9 +38,9 @@ def _tune(self, graph: DomainGraphForTune, show_progress: bool = True) -> Domain graph=graph, unchangeable_parameters=init_parameters), parameters_dict, + init_parameters, trials, remaining_time, - init_parameters, show_progress) remaining_time = self._get_remaining_time() if remaining_time > MIN_TIME_FOR_TUNING_IN_SEC: From dca5b49e3a874c34609b41bef3cea7608c55012d Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Fri, 8 Dec 2023 17:27:41 +0300 Subject: [PATCH 13/17] Fix test --- golem/core/tuning/sequential.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index 85df7282..f7159128 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -213,7 +213,7 @@ def _objective(self, node_params = {**node_params, **unchangeable_parameters} # Set hyperparameters for node - graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=parameters_dict) + graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=node_params) metric_value = self.get_metric_value(graph=graph) return metric_value From 97c171f841d2c9f61605b4e4da93ea74933daf90 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 4 Jun 2024 15:50:49 +0300 Subject: [PATCH 14/17] Rebase --- golem/core/tuning/sequential.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/golem/core/tuning/sequential.py b/golem/core/tuning/sequential.py index f7159128..2ac22292 100644 --- a/golem/core/tuning/sequential.py +++ b/golem/core/tuning/sequential.py @@ -65,12 +65,12 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: # Tuning performed sequentially for every node - so get ids of nodes nodes_ids = self.get_nodes_order(nodes_number=nodes_amount) final_graph = deepcopy(self.init_graph) + best_metric = self.init_metric for node_id in nodes_ids: node = graph.nodes[node_id] # Get node's parameters to optimize node_params, init_params = get_node_parameters_for_hyperopt(self.search_space, node_id, node) - best_metric = self.init_metric if not node_params: self.log.info(f'"{node.name}" operation has no parameters to optimize') else: @@ -81,8 +81,9 @@ def _tune(self, graph: DomainGraphForTune, **kwargs) -> DomainGraphForTune: init_params=init_params, iterations_per_node=iterations_per_node, seconds_per_node=seconds_per_node) - if metric < best_metric: + if metric <= best_metric: final_graph = deepcopy(graph) + best_metric = metric self.was_tuned = True return final_graph @@ -185,12 +186,12 @@ def _optimize_node(self, graph: OptGraph, early_stop_fn=self.early_stop_fn, timeout=seconds_per_node) - best = space_eval(space=node_params, hp_assignment=trials.argmin) + best_params = space_eval(space=node_params, hp_assignment=trials.argmin) is_best_trial_with_init_params = trials.best_trial.get('tid') in range(init_trials_num) if is_best_trial_with_init_params: - best = {**best, **init_params} + best_params = {**best_params, **init_params} # Set best params for this node in the graph - graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=best) + graph = self.set_arg_node(graph=graph, node_id=node_id, node_params=best_params) return graph, trials.best_trial['result']['loss'] def _objective(self, From 884ef62090efa4f64d12b47b57092d6dcdbbac16 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 4 Jun 2024 16:22:59 +0300 Subject: [PATCH 15/17] Minor --- golem/core/tuning/tuner_interface.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/golem/core/tuning/tuner_interface.py b/golem/core/tuning/tuner_interface.py index 098c3734..10d924c0 100644 --- a/golem/core/tuning/tuner_interface.py +++ b/golem/core/tuning/tuner_interface.py @@ -82,7 +82,7 @@ def tune(self, graph: DomainGraphForTune, **kwargs) -> Union[DomainGraphForTune, with self.timer: # Check source metrics for data - self.init_check(graph) + self.init_check(graph, self.objectives_number > 1) final_graph = self._tune(graph, **kwargs) # Validate if optimisation did well final_graph = self.final_check(final_graph, self.objectives_number > 1) @@ -100,6 +100,7 @@ def init_check(self, graph: OptGraph) -> None: Args: graph: graph to calculate objective + multi_obj: If optimization was multi objective. """ self.log.info('Hyperparameters optimization start: estimation of metric for initial graph') @@ -179,8 +180,8 @@ def _multi_obj_final_check(self, tuned_graphs: Sequence[OptGraph]) -> Sequence[O f'{metrics_formatted}') else: self.log.message('Initial metric dominates all found solutions. Return initial graph.') - final_graphs = self.init_graph - self.obtained_metric = self.init_metric + final_graphs = [self.init_graph] + self.obtained_metric = [self.init_metric] return final_graphs def get_metric_value(self, graph: OptGraph) -> Union[float, Sequence[float]]: From 01808d5d48db1e4b35040d4d3c32f32bfc109754 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 4 Jun 2024 16:43:14 +0300 Subject: [PATCH 16/17] Minor --- golem/core/tuning/tuner_interface.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/golem/core/tuning/tuner_interface.py b/golem/core/tuning/tuner_interface.py index 10d924c0..9563e438 100644 --- a/golem/core/tuning/tuner_interface.py +++ b/golem/core/tuning/tuner_interface.py @@ -82,7 +82,7 @@ def tune(self, graph: DomainGraphForTune, **kwargs) -> Union[DomainGraphForTune, with self.timer: # Check source metrics for data - self.init_check(graph, self.objectives_number > 1) + self.init_check(graph) final_graph = self._tune(graph, **kwargs) # Validate if optimisation did well final_graph = self.final_check(final_graph, self.objectives_number > 1) From baac3e1ded2d5ac6a7271559461c3a87ccaf2426 Mon Sep 17 00:00:00 2001 From: Lyubov Yamshchikova Date: Tue, 4 Jun 2024 16:58:52 +0300 Subject: [PATCH 17/17] pep8 --- golem/core/tuning/hyperopt_tuner.py | 1 - 1 file changed, 1 deletion(-) diff --git a/golem/core/tuning/hyperopt_tuner.py b/golem/core/tuning/hyperopt_tuner.py index 018e85f8..b8a7c5ab 100644 --- a/golem/core/tuning/hyperopt_tuner.py +++ b/golem/core/tuning/hyperopt_tuner.py @@ -1,6 +1,5 @@ from abc import ABC from datetime import timedelta -from typing import Optional, Callable, Dict, Tuple, Any from typing import Callable, Dict, Optional, Tuple, Any import numpy as np