Skip to content

Commit

Permalink
fix and add adaptive parameters to common_optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
kasyanovse committed Dec 11, 2023
1 parent 34f83e7 commit e69fc2a
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 16 deletions.
1 change: 1 addition & 0 deletions golem/core/optimisers/common_optimizer/common_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ def _initial_population(self, evaluator: EvaluationOperator):
def _evolve_population(self, evaluator: EvaluationOperator) -> PopulationT:
""" Method realizing full evolution cycle """
# TODO rebuild population
# TODO add iterations limit

self.evaluator = evaluator

Expand Down
21 changes: 19 additions & 2 deletions golem/core/optimisers/common_optimizer/old_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,24 @@
from golem.core.optimisers.common_optimizer.nodes.old_regularization import Regularization, RegularizationTask
from golem.core.optimisers.common_optimizer.nodes.old_selection import Selection, SelectionTask
from golem.core.optimisers.common_optimizer.runner import ParallelRunner, OneThreadRunner
from golem.core.optimisers.common_optimizer.scheme import Scheme
from golem.core.optimisers.common_optimizer.scheme import Scheme, SequentialScheme
from golem.core.optimisers.common_optimizer.stage import Stage
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum
from golem.core.optimisers.common_optimizer.temp.adaptive import AdaptivePopulationSize, AdaptiveParametersTask

default_stages = list()


# adaptive parameters
nodes = [AdaptivePopulationSize()]
scheme = SequentialScheme(nodes=nodes)
def adaptive_parameter_updater(finished_tasks, parameters):
parameters = finished_tasks[0].update_parameters(parameters)
return parameters

default_stages.append(Stage(runner=OneThreadRunner(), nodes=nodes, task_builder=AdaptiveParametersTask,
scheme=scheme, stop_fun=lambda f, a: bool(f),
parameter_updater=adaptive_parameter_updater))

# main evolution process
class EvolvePopulationTask(ElitismTask, MutationTask,
Expand All @@ -41,7 +51,14 @@ def update_parameters(self, parameters: 'CommonOptimizerParameters'):
nodes = [Elitism(), Mutation(), Crossover(), Regularization(),
Selection(), Inheritance(), Evaluator()]

stop_fun = lambda f, a: a and len(f) >= a[0].graph_optimizer_params.pop_size
def stop_fun(finished_tasks, all_tasks):
if all_tasks:
pop_size = all_tasks[0].graph_generation_params.pop_size
if len(finished_tasks) >= pop_size:
return True
return False



def parameter_updater(finished_tasks, parameters):
parameters.new_population = list(chain(*[task.generation for task in finished_tasks]))
Expand Down
22 changes: 8 additions & 14 deletions golem/core/optimisers/common_optimizer/temp/adaptive.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,9 @@ class AdaptiveParametersTask(Task):

def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__()
self.parameters = {}
for attribute, values in parameters.__dict__.items():
if isinstance(values, (OptimizationParameters, GraphGenerationParams, AlgorithmParameters)):
self.parameters[attribute] = dict(values.__dict__.items())
self.parameters['population'] = parameters.population
self.graph_optimizer_params = parameters.graph_optimizer_params
self.graph_generation_params = parameters.graph_generation_params
self.population = parameters.population

def update_parameters(self, parameters: 'CommonOptimizerParameters') -> 'CommonOptimizerParameters':
"""
Expand All @@ -29,11 +27,7 @@ def update_parameters(self, parameters: 'CommonOptimizerParameters') -> 'CommonO
:param parameters: instance of CommonOptimizerParameters to update
:return: updated parameters object
"""
for attribute, values in self.parameters.items():
parameters_obj = getattr(parameters, attribute, None)
if parameters_obj:
for subattribute, subvalues in values.items():
setattr(parameters_obj, subattribute, subvalues)
parameters.graph_generation_params = self.graph_generation_params
return parameters


Expand Down Expand Up @@ -75,10 +69,10 @@ def __call__(self, task: AdaptiveParametersTask) -> List[AdaptiveParametersTask]
raise TypeError(f"task should be `AdaptiveParametersTask`, got {type(task)} instead")
if self._pop_size is None:
self._pop_size: OldAdaptivePopulationSize = init_adaptive_pop_size(
task.parameters['graph_optimizer_params'],
task.parameters['population']
task.graph_optimizer_params,
task.population
)
pop_size = self._pop_size.next(task.parameters['population'])
pop_size = self._pop_size.next(task.population)

task.parameters['graph_generation_params']['pop_size'] = pop_size
task.graph_generation_params.pop_size = pop_size
return [task]

0 comments on commit e69fc2a

Please sign in to comment.