Skip to content

Commit

Permalink
add some operators
Browse files Browse the repository at this point in the history
  • Loading branch information
kasyanovse committed Dec 8, 2023
1 parent 2373f85 commit 4af20ee
Show file tree
Hide file tree
Showing 11 changed files with 298 additions and 13 deletions.
43 changes: 32 additions & 11 deletions golem/core/optimisers/common_optimizer/common_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,22 +5,27 @@

from golem.core.dag.graph import Graph
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.old_config import default_stages
from golem.core.optimisers.common_optimizer.scheme import Scheme
from golem.core.optimisers.common_optimizer.stage import Stage
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum
from golem.core.optimisers.genetic.operators.operator import PopulationT
from golem.core.optimisers.genetic.operators.operator import PopulationT, EvaluationOperator
from golem.core.optimisers.graph import OptGraph
from golem.core.optimisers.objective import Objective, ObjectiveFunction
from golem.core.optimisers.opt_history_objects.individual import Individual
from golem.core.optimisers.opt_history_objects.opt_history import OptHistory
from golem.core.optimisers.optimization_parameters import OptimizationParameters
from golem.core.optimisers.optimizer import GraphOptimizer, GraphGenerationParams, AlgorithmParameters
from golem.core.optimisers.populational_optimizer import PopulationalOptimizer
from golem.core.optimisers.timer import OptimisationTimer


@dataclass
class CommonOptimizerParameters:
_run: bool
generations: List[PopulationT]
population: PopulationT
evaluator: Any

objective: Objective
initial_graphs: Sequence[Union[Graph, Any]]
Expand All @@ -31,11 +36,12 @@ class CommonOptimizerParameters:
history: OptHistory


class CommonOptimizer(GraphOptimizer):
class CommonOptimizer(PopulationalOptimizer):
__parameters_attrs = ('objective', 'initial_graphs', 'requirements', 'graph_generation_params',
'graph_optimizer_params', 'history', 'stages', '_run', 'generations')
'graph_optimizer_params', 'history', 'stages', '_run',
'generations', 'population', 'evaluator')
__parameters_allowed_to_change = ('requirements', 'graph_generation_params',
'graph_optimizer_params', 'stages', '_run', 'generations')
'graph_optimizer_params', 'stages', '_run')

def __init__(self,
objective: Objective,
Expand All @@ -51,11 +57,14 @@ def __init__(self,
graph_generation_params=graph_generation_params,
graph_optimizer_params=graph_optimizer_params)

self.timer = OptimisationTimer(timeout=self.requirements.timeout)
self.generations = list()
self.stages = stages
self.stages = default_stages
self._run = True

self.requirements.max_depth = 100 # TODO fix
self.graph_optimizer_params.pop_size = graph_optimizer_params.pop_size
self.initial_individuals = [Individual(graph, metadata=requirements.static_individual_metadata)
for graph in self.initial_graphs]

@property
def parameters(self):
return CommonOptimizerParameters(**{attr: getattr(self, attr) for attr in self.__parameters_attrs})
Expand All @@ -67,7 +76,19 @@ def parameters(self, parameters: CommonOptimizerParameters):
for attr in self.__parameters_allowed_to_change:
setattr(self, attr, getattr(parameters, attr))

def optimise(self, objective: ObjectiveFunction):
while self._run:
for i_stage in range(len(self.stages)):
self.parameters = self.stages[i_stage].run(self.parameters)
# def optimise(self, objective: ObjectiveFunction):
# while self._run:
# for i_stage in range(len(self.stages)):
# self.parameters = self.stages[i_stage].run(self.parameters)

def _initial_population(self, evaluator: EvaluationOperator):
self._update_population(evaluator(self.initial_individuals), 'initial_assumptions')

def _evolve_population(self, evaluator: EvaluationOperator) -> PopulationT:
""" Method realizing full evolution cycle """

self.evaluator = evaluator

for i_stage in range(len(self.stages)):
self.parameters = self.stages[i_stage].run(self.parameters)
print(1)
5 changes: 3 additions & 2 deletions golem/core/optimisers/common_optimizer/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
class Node:
""" Node with operation """

name: str
operation: Callable[[Task], Union[Task, List[Task]]]
def __init__(self, name: str, operation: Callable[[Task], Union[Task, List[Task]]]):
self.name = name
self.operation = operation

def __call__(self, *args, **kwargs):
results = self.operation(*args, **kwargs)
Expand Down
26 changes: 26 additions & 0 deletions golem/core/optimisers/common_optimizer/nodes/evaluator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin
from golem.core.optimisers.genetic.operators.mutation import Mutation as OldMutation


class EvaluatorTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
self.evaluator = parameters.evaluator

def update_parameters(self, parameters: 'CommonOptimizerParameters'):
return super().update_parameters(parameters)


class Evaluator(Node):
def __init__(self, name: str = 'evaluator'):
self.name = name

def __call__(self, task: EvaluatorTask):
evaluated_inds = task.evaluator(task.generation)
if evaluated_inds:
task.generation = evaluated_inds
task.status = TaskStatusEnum.SUCCESS
else:
task.status = TaskStatusEnum.FAIL
return [task]
2 changes: 2 additions & 0 deletions golem/core/optimisers/common_optimizer/nodes/mutation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum, TaskMixin
from golem.core.optimisers.genetic.operators.base_mutations import base_mutations_repo
from golem.core.optimisers.genetic.operators.mutation import Mutation as OldMutation
from golem.core.optimisers.opt_history_objects.individual import Individual
from golem.core.optimisers.opt_history_objects.parent_operator import ParentOperator


class MutationTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
Expand Down
36 changes: 36 additions & 0 deletions golem/core/optimisers/common_optimizer/nodes/old_crossover.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.genetic.operators.crossover import Crossover as OldCrossover
from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin


class CrossoverTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
self.graph_optimizer_params = parameters.graph_optimizer_params
self.requirements = parameters.requirements
self.graph_generation_params = parameters.graph_generation_params
self.generation = parameters.population

def update_parameters(self, parameters: 'CommonOptimizerParameters'):
return super().update_parameters(parameters)


class Crossover(Node):
def __init__(self, name: str = 'crossover'):
self.name = name
self._crossover = None

def __call__(self, task: CrossoverTask):
if self._crossover is None:
self._crossover = OldCrossover(task.graph_optimizer_params,
task.requirements,
task.graph_generation_params)
task.generation = self._crossover(task.generation)
task.status = TaskStatusEnum.SUCCESS

new_tasks = list()
for new_ind in task.generation:
new_task = task.copy()
new_task.generation = [new_ind]
new_tasks.append(new_task)
return new_tasks
28 changes: 28 additions & 0 deletions golem/core/optimisers/common_optimizer/nodes/old_elitism.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin
from golem.core.optimisers.genetic.operators.elitism import Elitism as OldElitism



class ElitismTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
self.graph_optimizer_params = parameters.graph_optimizer_params
self.generation = parameters.population
self.best_individuals = None # TODO

def update_parameters(self, parameters: 'CommonOptimizerParameters'):
return super().update_parameters(parameters)


class Elitism(Node):
def __init__(self, name: str = 'elitism'):
self.name = name
self._regularization = None

def __call__(self, task: ElitismTask):
if self._regularization is None:
self._regularization = OldElitism(task.graph_optimizer_params)
task.generation = self._regularization(task.best_individuals, task.generation)
task.status = TaskStatusEnum.SUCCESS
return [task]
28 changes: 28 additions & 0 deletions golem/core/optimisers/common_optimizer/nodes/old_inheritance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin
from golem.core.optimisers.genetic.operators.inheritance import Inheritance as OldInheritance
from golem.core.optimisers.genetic.operators.selection import Selection as OldSelection

class InheritanceTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
self.graph_optimizer_params = parameters.graph_optimizer_params
self.generation = parameters.population
self.origin_generation = parameters.population

def update_parameters(self, parameters: 'CommonOptimizerParameters'):
return super().update_parameters(parameters)


class Inheritance(Node):
def __init__(self, name: str = 'inheritance'):
self.name = name
self._inheritance = None

def __call__(self, task: InheritanceTask):
if self._inheritance is None:
selection = OldSelection(task.graph_optimizer_params)
self._inheritance = OldInheritance(task.graph_optimizer_params, selection)
task.generation = self._inheritance(task.origin_generation, task.generation)
task.status = TaskStatusEnum.SUCCESS
return [task]
34 changes: 34 additions & 0 deletions golem/core/optimisers/common_optimizer/nodes/old_mutation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin
from golem.core.optimisers.genetic.operators.mutation import Mutation as OldMutation


class MutationTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
self.graph_optimizer_params = parameters.graph_optimizer_params
self.requirements = parameters.requirements
self.graph_generation_params = parameters.graph_generation_params
self.generation = parameters.population

def update_parameters(self, parameters: 'CommonOptimizerParameters'):
return super().update_parameters(parameters)


class Mutation(Node):
def __init__(self, name: str = 'mutation'):
self.name = name
self._mutation = None

def __call__(self, task: MutationTask):
if self._mutation is None:
self._mutation = OldMutation(task.graph_optimizer_params,
task.requirements,
task.graph_generation_params)
ind = self._mutation(task.generation)
if not ind:
task.status = TaskStatusEnum.FAIL
else:
task.generation = [ind]
task.status = TaskStatusEnum.SUCCESS
return [task]
30 changes: 30 additions & 0 deletions golem/core/optimisers/common_optimizer/nodes/old_regularization.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin
from golem.core.optimisers.genetic.operators.regularization import Regularization as OldRegularization



class RegularizationTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
self.graph_optimizer_params = parameters.graph_optimizer_params
self.graph_generation_params = parameters.graph_generation_params
self.generation = parameters.population
self.evaluator = parameters.evaluator

def update_parameters(self, parameters: 'CommonOptimizerParameters'):
return super().update_parameters(parameters)


class Regularization(Node):
def __init__(self, name: str = 'regularization'):
self.name = name
self._regularization = None

def __call__(self, task: RegularizationTask):
if self._regularization is None:
self._regularization = OldRegularization(task.graph_optimizer_params,
task.graph_generation_params)
task.generation = self._regularization(task.generation, task.evaluator)
task.status = TaskStatusEnum.SUCCESS
return [task]
27 changes: 27 additions & 0 deletions golem/core/optimisers/common_optimizer/nodes/old_selection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from golem.core.optimisers.common_optimizer.node import Node
from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin
from golem.core.optimisers.genetic.operators.selection import Selection as OldSelection



class SelectionTask(TaskMixin):
def __init__(self, parameters: 'CommonOptimizerParameters'):
super().__init__(parameters)
self.graph_optimizer_params = parameters.graph_optimizer_params
self.generation = parameters.population

def update_parameters(self, parameters: 'CommonOptimizerParameters'):
return super().update_parameters(parameters)


class Selection(Node):
def __init__(self, name: str = 'selection'):
self.name = name
self._selection = None

def __call__(self, task: SelectionTask):
if self._selection is None:
self._selection = OldSelection(task.graph_optimizer_params)
task.generation = self._selection(task.generation)
task.status = TaskStatusEnum.SUCCESS
return [task]
52 changes: 52 additions & 0 deletions golem/core/optimisers/common_optimizer/old_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
""" Module with genetic optimization settings
that reproduces behavior of default GOLEM
genetic optimization """
from collections import defaultdict

from golem.core.optimisers.common_optimizer.nodes.evaluator import Evaluator
from golem.core.optimisers.common_optimizer.nodes.old_crossover import Crossover, CrossoverTask
from golem.core.optimisers.common_optimizer.nodes.old_elitism import Elitism, ElitismTask
from golem.core.optimisers.common_optimizer.nodes.old_inheritance import Inheritance, InheritanceTask
from golem.core.optimisers.common_optimizer.nodes.old_mutation import Mutation, MutationTask
from golem.core.optimisers.common_optimizer.nodes.old_regularization import Regularization, RegularizationTask
from golem.core.optimisers.common_optimizer.nodes.old_selection import Selection, SelectionTask
from golem.core.optimisers.common_optimizer.runner import ParallelRunner, OneThreadRunner
from golem.core.optimisers.common_optimizer.scheme import Scheme
from golem.core.optimisers.common_optimizer.stage import Stage
from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum

default_stages = list()


# adaptive parameters

# main evolution process
class EvolvePopulationTask(ElitismTask, MutationTask,
CrossoverTask, RegularizationTask,
SelectionTask, InheritanceTask, Task):
def update_parameters(self, parameters: 'CommonOptimizerParameters'):
parameters = super().update_parameters(parameters)
return parameters

scheme_map = dict()
scheme_map[None] = defaultdict(lambda: 'regularization')
scheme_map['regularization'] = defaultdict(lambda: 'selection')
scheme_map['selection'] = defaultdict(lambda: 'crossover')
scheme_map['crossover'] = defaultdict(lambda: 'mutation')
scheme_map['mutation'] = {TaskStatusEnum.SUCCESS: 'evaluator', TaskStatusEnum.FAIL: None}
scheme_map['evaluator'] = defaultdict(lambda: None)
scheme = Scheme(scheme_map=scheme_map)

nodes = [Elitism(), Mutation(), Crossover(), Regularization(),
Selection(), Inheritance(), Evaluator()]

stop_fun = lambda f, a: a and len(f) >= a[0].graph_optimizer_params.pop_size

def parameter_updater(finished_tasks, parameters):
parameters.new_population = [task.generation for task in finished_tasks]
return parameters

runner = OneThreadRunner()
# runner = ParallelRunner()
default_stages.append(Stage(runner=runner, nodes=nodes, task_builder=EvolvePopulationTask,
scheme=scheme, stop_fun=stop_fun, parameter_updater=parameter_updater))

0 comments on commit 4af20ee

Please sign in to comment.