diff --git a/golem/core/optimisers/common_optimizer/common_optimizer.py b/golem/core/optimisers/common_optimizer/common_optimizer.py index d0878394..a7ca328f 100644 --- a/golem/core/optimisers/common_optimizer/common_optimizer.py +++ b/golem/core/optimisers/common_optimizer/common_optimizer.py @@ -5,15 +5,18 @@ from golem.core.dag.graph import Graph from golem.core.optimisers.common_optimizer.node import Node +from golem.core.optimisers.common_optimizer.old_config import default_stages from golem.core.optimisers.common_optimizer.scheme import Scheme from golem.core.optimisers.common_optimizer.stage import Stage from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum -from golem.core.optimisers.genetic.operators.operator import PopulationT +from golem.core.optimisers.genetic.operators.operator import PopulationT, EvaluationOperator from golem.core.optimisers.graph import OptGraph from golem.core.optimisers.objective import Objective, ObjectiveFunction +from golem.core.optimisers.opt_history_objects.individual import Individual from golem.core.optimisers.opt_history_objects.opt_history import OptHistory from golem.core.optimisers.optimization_parameters import OptimizationParameters from golem.core.optimisers.optimizer import GraphOptimizer, GraphGenerationParams, AlgorithmParameters +from golem.core.optimisers.populational_optimizer import PopulationalOptimizer from golem.core.optimisers.timer import OptimisationTimer @@ -21,6 +24,8 @@ class CommonOptimizerParameters: _run: bool generations: List[PopulationT] + population: PopulationT + evaluator: Any objective: Objective initial_graphs: Sequence[Union[Graph, Any]] @@ -31,11 +36,12 @@ class CommonOptimizerParameters: history: OptHistory -class CommonOptimizer(GraphOptimizer): +class CommonOptimizer(PopulationalOptimizer): __parameters_attrs = ('objective', 'initial_graphs', 'requirements', 'graph_generation_params', - 'graph_optimizer_params', 'history', 'stages', '_run', 'generations') + 'graph_optimizer_params', 'history', 'stages', '_run', + 'generations', 'population', 'evaluator') __parameters_allowed_to_change = ('requirements', 'graph_generation_params', - 'graph_optimizer_params', 'stages', '_run', 'generations') + 'graph_optimizer_params', 'stages', '_run') def __init__(self, objective: Objective, @@ -51,11 +57,14 @@ def __init__(self, graph_generation_params=graph_generation_params, graph_optimizer_params=graph_optimizer_params) - self.timer = OptimisationTimer(timeout=self.requirements.timeout) - self.generations = list() - self.stages = stages + self.stages = default_stages self._run = True + self.requirements.max_depth = 100 # TODO fix + self.graph_optimizer_params.pop_size = graph_optimizer_params.pop_size + self.initial_individuals = [Individual(graph, metadata=requirements.static_individual_metadata) + for graph in self.initial_graphs] + @property def parameters(self): return CommonOptimizerParameters(**{attr: getattr(self, attr) for attr in self.__parameters_attrs}) @@ -67,7 +76,19 @@ def parameters(self, parameters: CommonOptimizerParameters): for attr in self.__parameters_allowed_to_change: setattr(self, attr, getattr(parameters, attr)) - def optimise(self, objective: ObjectiveFunction): - while self._run: - for i_stage in range(len(self.stages)): - self.parameters = self.stages[i_stage].run(self.parameters) + # def optimise(self, objective: ObjectiveFunction): + # while self._run: + # for i_stage in range(len(self.stages)): + # self.parameters = self.stages[i_stage].run(self.parameters) + + def _initial_population(self, evaluator: EvaluationOperator): + self._update_population(evaluator(self.initial_individuals), 'initial_assumptions') + + def _evolve_population(self, evaluator: EvaluationOperator) -> PopulationT: + """ Method realizing full evolution cycle """ + + self.evaluator = evaluator + + for i_stage in range(len(self.stages)): + self.parameters = self.stages[i_stage].run(self.parameters) + print(1) diff --git a/golem/core/optimisers/common_optimizer/node.py b/golem/core/optimisers/common_optimizer/node.py index 16415899..d635407d 100644 --- a/golem/core/optimisers/common_optimizer/node.py +++ b/golem/core/optimisers/common_optimizer/node.py @@ -8,8 +8,9 @@ class Node: """ Node with operation """ - name: str - operation: Callable[[Task], Union[Task, List[Task]]] + def __init__(self, name: str, operation: Callable[[Task], Union[Task, List[Task]]]): + self.name = name + self.operation = operation def __call__(self, *args, **kwargs): results = self.operation(*args, **kwargs) diff --git a/golem/core/optimisers/common_optimizer/nodes/evaluator.py b/golem/core/optimisers/common_optimizer/nodes/evaluator.py new file mode 100644 index 00000000..895f3f71 --- /dev/null +++ b/golem/core/optimisers/common_optimizer/nodes/evaluator.py @@ -0,0 +1,26 @@ +from golem.core.optimisers.common_optimizer.node import Node +from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin +from golem.core.optimisers.genetic.operators.mutation import Mutation as OldMutation + + +class EvaluatorTask(TaskMixin): + def __init__(self, parameters: 'CommonOptimizerParameters'): + super().__init__(parameters) + self.evaluator = parameters.evaluator + + def update_parameters(self, parameters: 'CommonOptimizerParameters'): + return super().update_parameters(parameters) + + +class Evaluator(Node): + def __init__(self, name: str = 'evaluator'): + self.name = name + + def __call__(self, task: EvaluatorTask): + evaluated_inds = task.evaluator(task.generation) + if evaluated_inds: + task.generation = evaluated_inds + task.status = TaskStatusEnum.SUCCESS + else: + task.status = TaskStatusEnum.FAIL + return [task] diff --git a/golem/core/optimisers/common_optimizer/nodes/mutation.py b/golem/core/optimisers/common_optimizer/nodes/mutation.py index 8108dffe..8ef4d7fd 100644 --- a/golem/core/optimisers/common_optimizer/nodes/mutation.py +++ b/golem/core/optimisers/common_optimizer/nodes/mutation.py @@ -2,9 +2,11 @@ from golem.core.optimisers.common_optimizer.node import Node from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum, TaskMixin from golem.core.optimisers.genetic.operators.base_mutations import base_mutations_repo +from golem.core.optimisers.genetic.operators.mutation import Mutation as OldMutation from golem.core.optimisers.opt_history_objects.individual import Individual from golem.core.optimisers.opt_history_objects.parent_operator import ParentOperator + class MutationTask(TaskMixin): def __init__(self, parameters: 'CommonOptimizerParameters'): super().__init__(parameters) diff --git a/golem/core/optimisers/common_optimizer/nodes/old_crossover.py b/golem/core/optimisers/common_optimizer/nodes/old_crossover.py new file mode 100644 index 00000000..a3c95fa4 --- /dev/null +++ b/golem/core/optimisers/common_optimizer/nodes/old_crossover.py @@ -0,0 +1,36 @@ +from golem.core.optimisers.common_optimizer.node import Node +from golem.core.optimisers.genetic.operators.crossover import Crossover as OldCrossover +from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin + + +class CrossoverTask(TaskMixin): + def __init__(self, parameters: 'CommonOptimizerParameters'): + super().__init__(parameters) + self.graph_optimizer_params = parameters.graph_optimizer_params + self.requirements = parameters.requirements + self.graph_generation_params = parameters.graph_generation_params + self.generation = parameters.population + + def update_parameters(self, parameters: 'CommonOptimizerParameters'): + return super().update_parameters(parameters) + + +class Crossover(Node): + def __init__(self, name: str = 'crossover'): + self.name = name + self._crossover = None + + def __call__(self, task: CrossoverTask): + if self._crossover is None: + self._crossover = OldCrossover(task.graph_optimizer_params, + task.requirements, + task.graph_generation_params) + task.generation = self._crossover(task.generation) + task.status = TaskStatusEnum.SUCCESS + + new_tasks = list() + for new_ind in task.generation: + new_task = task.copy() + new_task.generation = [new_ind] + new_tasks.append(new_task) + return new_tasks diff --git a/golem/core/optimisers/common_optimizer/nodes/old_elitism.py b/golem/core/optimisers/common_optimizer/nodes/old_elitism.py new file mode 100644 index 00000000..02f846da --- /dev/null +++ b/golem/core/optimisers/common_optimizer/nodes/old_elitism.py @@ -0,0 +1,28 @@ +from golem.core.optimisers.common_optimizer.node import Node +from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin +from golem.core.optimisers.genetic.operators.elitism import Elitism as OldElitism + + + +class ElitismTask(TaskMixin): + def __init__(self, parameters: 'CommonOptimizerParameters'): + super().__init__(parameters) + self.graph_optimizer_params = parameters.graph_optimizer_params + self.generation = parameters.population + self.best_individuals = None # TODO + + def update_parameters(self, parameters: 'CommonOptimizerParameters'): + return super().update_parameters(parameters) + + +class Elitism(Node): + def __init__(self, name: str = 'elitism'): + self.name = name + self._regularization = None + + def __call__(self, task: ElitismTask): + if self._regularization is None: + self._regularization = OldElitism(task.graph_optimizer_params) + task.generation = self._regularization(task.best_individuals, task.generation) + task.status = TaskStatusEnum.SUCCESS + return [task] diff --git a/golem/core/optimisers/common_optimizer/nodes/old_inheritance.py b/golem/core/optimisers/common_optimizer/nodes/old_inheritance.py new file mode 100644 index 00000000..e44b13cb --- /dev/null +++ b/golem/core/optimisers/common_optimizer/nodes/old_inheritance.py @@ -0,0 +1,28 @@ +from golem.core.optimisers.common_optimizer.node import Node +from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin +from golem.core.optimisers.genetic.operators.inheritance import Inheritance as OldInheritance +from golem.core.optimisers.genetic.operators.selection import Selection as OldSelection + +class InheritanceTask(TaskMixin): + def __init__(self, parameters: 'CommonOptimizerParameters'): + super().__init__(parameters) + self.graph_optimizer_params = parameters.graph_optimizer_params + self.generation = parameters.population + self.origin_generation = parameters.population + + def update_parameters(self, parameters: 'CommonOptimizerParameters'): + return super().update_parameters(parameters) + + +class Inheritance(Node): + def __init__(self, name: str = 'inheritance'): + self.name = name + self._inheritance = None + + def __call__(self, task: InheritanceTask): + if self._inheritance is None: + selection = OldSelection(task.graph_optimizer_params) + self._inheritance = OldInheritance(task.graph_optimizer_params, selection) + task.generation = self._inheritance(task.origin_generation, task.generation) + task.status = TaskStatusEnum.SUCCESS + return [task] diff --git a/golem/core/optimisers/common_optimizer/nodes/old_mutation.py b/golem/core/optimisers/common_optimizer/nodes/old_mutation.py new file mode 100644 index 00000000..11a91502 --- /dev/null +++ b/golem/core/optimisers/common_optimizer/nodes/old_mutation.py @@ -0,0 +1,34 @@ +from golem.core.optimisers.common_optimizer.node import Node +from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin +from golem.core.optimisers.genetic.operators.mutation import Mutation as OldMutation + + +class MutationTask(TaskMixin): + def __init__(self, parameters: 'CommonOptimizerParameters'): + super().__init__(parameters) + self.graph_optimizer_params = parameters.graph_optimizer_params + self.requirements = parameters.requirements + self.graph_generation_params = parameters.graph_generation_params + self.generation = parameters.population + + def update_parameters(self, parameters: 'CommonOptimizerParameters'): + return super().update_parameters(parameters) + + +class Mutation(Node): + def __init__(self, name: str = 'mutation'): + self.name = name + self._mutation = None + + def __call__(self, task: MutationTask): + if self._mutation is None: + self._mutation = OldMutation(task.graph_optimizer_params, + task.requirements, + task.graph_generation_params) + ind = self._mutation(task.generation) + if not ind: + task.status = TaskStatusEnum.FAIL + else: + task.generation = [ind] + task.status = TaskStatusEnum.SUCCESS + return [task] diff --git a/golem/core/optimisers/common_optimizer/nodes/old_regularization.py b/golem/core/optimisers/common_optimizer/nodes/old_regularization.py new file mode 100644 index 00000000..7853d4b0 --- /dev/null +++ b/golem/core/optimisers/common_optimizer/nodes/old_regularization.py @@ -0,0 +1,30 @@ +from golem.core.optimisers.common_optimizer.node import Node +from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin +from golem.core.optimisers.genetic.operators.regularization import Regularization as OldRegularization + + + +class RegularizationTask(TaskMixin): + def __init__(self, parameters: 'CommonOptimizerParameters'): + super().__init__(parameters) + self.graph_optimizer_params = parameters.graph_optimizer_params + self.graph_generation_params = parameters.graph_generation_params + self.generation = parameters.population + self.evaluator = parameters.evaluator + + def update_parameters(self, parameters: 'CommonOptimizerParameters'): + return super().update_parameters(parameters) + + +class Regularization(Node): + def __init__(self, name: str = 'regularization'): + self.name = name + self._regularization = None + + def __call__(self, task: RegularizationTask): + if self._regularization is None: + self._regularization = OldRegularization(task.graph_optimizer_params, + task.graph_generation_params) + task.generation = self._regularization(task.generation, task.evaluator) + task.status = TaskStatusEnum.SUCCESS + return [task] diff --git a/golem/core/optimisers/common_optimizer/nodes/old_selection.py b/golem/core/optimisers/common_optimizer/nodes/old_selection.py new file mode 100644 index 00000000..c83ac341 --- /dev/null +++ b/golem/core/optimisers/common_optimizer/nodes/old_selection.py @@ -0,0 +1,27 @@ +from golem.core.optimisers.common_optimizer.node import Node +from golem.core.optimisers.common_optimizer.task import TaskStatusEnum, TaskMixin +from golem.core.optimisers.genetic.operators.selection import Selection as OldSelection + + + +class SelectionTask(TaskMixin): + def __init__(self, parameters: 'CommonOptimizerParameters'): + super().__init__(parameters) + self.graph_optimizer_params = parameters.graph_optimizer_params + self.generation = parameters.population + + def update_parameters(self, parameters: 'CommonOptimizerParameters'): + return super().update_parameters(parameters) + + +class Selection(Node): + def __init__(self, name: str = 'selection'): + self.name = name + self._selection = None + + def __call__(self, task: SelectionTask): + if self._selection is None: + self._selection = OldSelection(task.graph_optimizer_params) + task.generation = self._selection(task.generation) + task.status = TaskStatusEnum.SUCCESS + return [task] diff --git a/golem/core/optimisers/common_optimizer/old_config.py b/golem/core/optimisers/common_optimizer/old_config.py new file mode 100644 index 00000000..b38e600a --- /dev/null +++ b/golem/core/optimisers/common_optimizer/old_config.py @@ -0,0 +1,52 @@ +""" Module with genetic optimization settings + that reproduces behavior of default GOLEM + genetic optimization """ +from collections import defaultdict + +from golem.core.optimisers.common_optimizer.nodes.evaluator import Evaluator +from golem.core.optimisers.common_optimizer.nodes.old_crossover import Crossover, CrossoverTask +from golem.core.optimisers.common_optimizer.nodes.old_elitism import Elitism, ElitismTask +from golem.core.optimisers.common_optimizer.nodes.old_inheritance import Inheritance, InheritanceTask +from golem.core.optimisers.common_optimizer.nodes.old_mutation import Mutation, MutationTask +from golem.core.optimisers.common_optimizer.nodes.old_regularization import Regularization, RegularizationTask +from golem.core.optimisers.common_optimizer.nodes.old_selection import Selection, SelectionTask +from golem.core.optimisers.common_optimizer.runner import ParallelRunner, OneThreadRunner +from golem.core.optimisers.common_optimizer.scheme import Scheme +from golem.core.optimisers.common_optimizer.stage import Stage +from golem.core.optimisers.common_optimizer.task import Task, TaskStatusEnum + +default_stages = list() + + +# adaptive parameters + +# main evolution process +class EvolvePopulationTask(ElitismTask, MutationTask, + CrossoverTask, RegularizationTask, + SelectionTask, InheritanceTask, Task): + def update_parameters(self, parameters: 'CommonOptimizerParameters'): + parameters = super().update_parameters(parameters) + return parameters + +scheme_map = dict() +scheme_map[None] = defaultdict(lambda: 'regularization') +scheme_map['regularization'] = defaultdict(lambda: 'selection') +scheme_map['selection'] = defaultdict(lambda: 'crossover') +scheme_map['crossover'] = defaultdict(lambda: 'mutation') +scheme_map['mutation'] = {TaskStatusEnum.SUCCESS: 'evaluator', TaskStatusEnum.FAIL: None} +scheme_map['evaluator'] = defaultdict(lambda: None) +scheme = Scheme(scheme_map=scheme_map) + +nodes = [Elitism(), Mutation(), Crossover(), Regularization(), + Selection(), Inheritance(), Evaluator()] + +stop_fun = lambda f, a: a and len(f) >= a[0].graph_optimizer_params.pop_size + +def parameter_updater(finished_tasks, parameters): + parameters.new_population = [task.generation for task in finished_tasks] + return parameters + +runner = OneThreadRunner() +# runner = ParallelRunner() +default_stages.append(Stage(runner=runner, nodes=nodes, task_builder=EvolvePopulationTask, + scheme=scheme, stop_fun=stop_fun, parameter_updater=parameter_updater))