Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
kasyanovse committed Nov 24, 2023
1 parent f68a039 commit fe38500
Show file tree
Hide file tree
Showing 2 changed files with 253 additions and 0 deletions.
116 changes: 116 additions & 0 deletions golem/core/optimisers/genetic/gp_optimizer_new.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
from typing import Sequence, Union, Any

from golem.core.dag.graph import Graph
from golem.core.optimisers.genetic.gp_params import GPAlgorithmParameters
from golem.core.optimisers.genetic.operators.crossover import Crossover, SinglePredefinedGraphCrossover
from golem.core.optimisers.genetic.operators.elitism import Elitism
from golem.core.optimisers.genetic.operators.inheritance import Inheritance
from golem.core.optimisers.genetic.operators.mutation import Mutation, SinglePredefinedGraphMutation
from golem.core.optimisers.genetic.operators.operator import PopulationT, EvaluationOperator
from golem.core.optimisers.genetic.operators.regularization import Regularization
from golem.core.optimisers.genetic.operators.reproduction import ReproductionController
from golem.core.optimisers.genetic.operators.selection import Selection
from golem.core.optimisers.genetic.parameters.graph_depth import AdaptiveGraphDepth
from golem.core.optimisers.genetic.parameters.operators_prob import init_adaptive_operators_prob
from golem.core.optimisers.genetic.parameters.population_size import init_adaptive_pop_size, PopulationSize
from golem.core.optimisers.objective.objective import Objective
from golem.core.optimisers.opt_history_objects.individual import Individual
from golem.core.optimisers.optimization_parameters import GraphRequirements
from golem.core.optimisers.optimizer import GraphGenerationParams
from golem.core.optimisers.populational_optimizer import PopulationalOptimizer


class EvoGraphOptimizer(PopulationalOptimizer):
"""
Multi-objective evolutionary graph optimizer named GPComp
"""

def __init__(self,
objective: Objective,
initial_graphs: Sequence[Union[Graph, Any]],
requirements: GraphRequirements,
graph_generation_params: GraphGenerationParams,
graph_optimizer_params: GPAlgorithmParameters):
super().__init__(objective, initial_graphs, requirements, graph_generation_params, graph_optimizer_params)
# Define genetic operators
self.regularization = Regularization(graph_optimizer_params, graph_generation_params)
self.selection = Selection(graph_optimizer_params)
self.crossover = SinglePredefinedGraphCrossover(graph_optimizer_params, requirements, graph_generation_params)
self.mutation = SinglePredefinedGraphMutation(graph_optimizer_params, requirements, graph_generation_params)
self.inheritance = Inheritance(graph_optimizer_params, self.selection)
self.elitism = Elitism(graph_optimizer_params)
self.operators = [self.regularization, self.selection, self.crossover,
self.mutation, self.inheritance, self.elitism]

self.reproducer = ReproductionController(parameters=graph_optimizer_params,
selection=self.selection,
mutation=self.mutation,
crossover=self.crossover,
verifier=self.graph_generation_params.verifier)

# Define adaptive parameters
self._pop_size: PopulationSize = init_adaptive_pop_size(graph_optimizer_params, self.generations)
self._operators_prob = init_adaptive_operators_prob(graph_optimizer_params)
self._graph_depth = AdaptiveGraphDepth(self.generations,
start_depth=requirements.start_depth,
max_depth=requirements.max_depth,
max_stagnation_gens=graph_optimizer_params.adaptive_depth_max_stagnation,
adaptive=graph_optimizer_params.adaptive_depth)

# Define initial parameters
self.requirements.max_depth = self._graph_depth.initial
self.graph_optimizer_params.pop_size = self._pop_size.initial
self.initial_individuals = [Individual(graph, metadata=requirements.static_individual_metadata)
for graph in self.initial_graphs]

def _initial_population(self, evaluator: EvaluationOperator):
""" Initializes the initial population """
# Adding of initial assumptions to history as zero generation
self._update_population(evaluator(self.initial_individuals), 'initial_assumptions')
# pop_size = self.graph_optimizer_params.pop_size
#
# if len(self.initial_individuals) < pop_size:
# self.initial_individuals += self.reproducer._reproduce(population=self.initial_individuals,
# evaluator=evaluator)
# # Adding of extended population to history
# self._update_population(self.initial_individuals, 'extended_initial_assumptions')

def _evolve_population(self, evaluator: EvaluationOperator) -> PopulationT:
""" Method realizing full evolution cycle """

# Defines adaptive changes to algorithm parameters
# like pop_size and operator probabilities
self._update_requirements()

# Regularize previous population
individuals_to_select = self.regularization(self.population, evaluator)
# Reproduce from previous pop to get next population
new_population = self.reproducer.reproduce(individuals_to_select, evaluator)

# Adaptive agent experience collection & learning
# Must be called after reproduction (that collects the new experience)
experience = self.mutation.agent_experience
experience.collect_results(new_population)
self.mutation.agent.partial_fit(experience)

# Use some part of previous pop in the next pop
new_population = self.inheritance(self.population, new_population)
new_population = self.elitism(self.generations.best_individuals, new_population)
return new_population

def _update_requirements(self):
if not self.generations.is_any_improved:
self.graph_optimizer_params.mutation_prob, self.graph_optimizer_params.crossover_prob = \
self._operators_prob.next(self.population)
self.log.info(
f'Next mutation proba: {self.graph_optimizer_params.mutation_prob}; '
f'Next crossover proba: {self.graph_optimizer_params.crossover_prob}')
self.graph_optimizer_params.pop_size = self._pop_size.next(self.population)
self.requirements.max_depth = self._graph_depth.next()
self.log.info(
f'Next population size: {self.graph_optimizer_params.pop_size}; '
f'max graph depth: {self.requirements.max_depth}')

# update requirements in operators
for operator in self.operators:
operator.update_requirements(self.graph_optimizer_params, self.requirements)
137 changes: 137 additions & 0 deletions golem/core/optimisers/genetic/pool.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
from dataclasses import dataclass
from enum import Enum, auto
from typing import Optional, List, Any, Callable


class ParametersTypesEnum(Enum):
UNKNOWN = auto()
OPTIMIZER = auto()
POOL = auto()
NODE = auto()

def __ge__(self, other):
if self.__class__ is other.__class__:
return self.value >= other.value
return NotImplemented

def __gt__(self, other):
if self.__class__ is other.__class__:
return self.value > other.value
return NotImplemented

def __next__(self):
return ParametersTypesEnum(self.value + 1)


# class Parameters:
# def __init__(self, type_: ParametersTypesEnum, data: Optional[dict] = None):
# data = data or dict()
#
# for k in data:
# if isinstance(data[k], dict):
# data[k] = Parameters(next(type_), data[k])
# self.type = type_
# self.__data = data
#
# def __getitem__(self, keys):
# data = self.__data
# for key in keys:
# data = data[key]
# return data
#
# def __setitem__(self, keys, value):
# data = self.__data
# for key in keys[:-1]:
# if key not in data:
# data[key] = Parameters(next(self.type))
# data = data[key]
# data[keys[-1]] = value
#
# def __repr__(self):
# def pp(parameters, indent=0):
# return '\n' + '\n'.join(f"{' ' * indent}'{key}': {value.type.name + pp(value, indent + 2) if isinstance(value, self.__class__) else value}"
# for key, value in parameters.__data.items())
# return self.type.name + pp(self)
#
# def __iter__(self):
# return (x for x in self.__data.keys())
#
# def items(self):
# return (x for x in self.__data.items())
#
# def filter_by_type(self, type_: ParametersTypesEnum):
# return [pars for name, pars in self.items()
# if isinstance(pars, Parameters) and pars.type is type_]


class Parameters:
pass


@dataclass
class OptimizerParameters(Parameters):
pool_parameters: List['PoolParameters']
n_jobs: int = -1


@dataclass
class PoolParameters(Parameters):
name: str
constructor: Callable
n_jobs: int
nodes: List['Node']
scheme: 'Scheme'
task_constructor: Callable
task_history: List[Any]


class Optimizer:
def __init__(self, parameters: OptimizerParameters):
self.parameters = parameters

def _evolve_population(self):
common_parameters = self.parameters
for pool_params in common_parameters.pool_parameters:
pool = pool_params.constructor(pool_params, common_parameters)
common_parameters.update(pool.run())


class Pool:
""" Pool of nodes """

def __init__(self, pool_parameters: PoolParameters, parameters: OptimizerParameters):
self.name = pool_parameters.name
self.nodes_map = {node.name: node for node in pool_parameters.nodes}
self.task = pool_parameters.task
self.scheme = pool_parameters.scheme

# TODO error if there are some nodes with same name

def __call__(self, task: Task):
if not task.next in self.nodes_map:
raise ValueError((f"Pool {self.name}. Unknown node {task.next}. "
f"Existing nodes: {', '.join(self.nodes_map)}."))
processed_task = task.run_on_node(self.nodes_map[task.next])
return processed_task


class Node:
""" Node with operation """

def __init__(self, name: str, operation: Callable):
self.name = name
self.operation = operation

def __call__(self, *args, **kwargs):
return self.operation(*args, **kwargs)


class Task:
""" Data with parameters for operation """

def __init__(self, data: Any, parameters: Any):
self.data = data
self.parameters = parameters

def run_on_node(self, node: Node):
result = node(self.data, self.parameters)

0 comments on commit fe38500

Please sign in to comment.