Skip to content

Commit

Permalink
Fix returning types of evaluators (#223)
Browse files Browse the repository at this point in the history
* fix

* fix#2

* minor

* fix optional types

* minor

* minor
  • Loading branch information
maypink authored Oct 25, 2023
1 parent b6cce63 commit afa11e6
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 12 deletions.
17 changes: 8 additions & 9 deletions golem/core/optimisers/genetic/evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@
# at which evolution is not threatened with stagnation at the moment
STAGNATION_EVALUATION_PERCENTAGE = 0.5

OptionalEvalResult = Optional[GraphEvalResult]
EvalResultsList = List[OptionalEvalResult]
EvalResultsList = List[GraphEvalResult]
G = TypeVar('G', bound=Serializable)


Expand Down Expand Up @@ -153,12 +152,12 @@ def population_evaluation_info(self, pop_size: int, evaluated_pop_size: int):
f"were evaluated successfully.")

@abstractmethod
def evaluate_population(self, individuals: PopulationT) -> Optional[PopulationT]:
def evaluate_population(self, individuals: PopulationT) -> PopulationT:
raise NotImplementedError()

def evaluate_single(self, graph: OptGraph, uid_of_individual: str, with_time_limit: bool = True,
cache_key: Optional[str] = None,
logs_initializer: Optional[Tuple[int, pathlib.Path]] = None) -> OptionalEvalResult:
logs_initializer: Optional[Tuple[int, pathlib.Path]] = None) -> GraphEvalResult:

graph = self.evaluation_cache.get(cache_key, graph)

Expand Down Expand Up @@ -193,7 +192,7 @@ def _evaluate_graph(self, domain_graph: Graph) -> Tuple[Fitness, Graph]:

return fitness, domain_graph

def evaluate_with_cache(self, population: PopulationT) -> Optional[PopulationT]:
def evaluate_with_cache(self, population: PopulationT) -> PopulationT:
reversed_population = list(reversed(population))
self._remote_compute_cache(reversed_population)
evaluated_population = self.evaluate_population(reversed_population)
Expand Down Expand Up @@ -239,7 +238,7 @@ def dispatch(self, objective: ObjectiveFunction, timer: Optional[Timer] = None)
super().dispatch(objective, timer)
return self.evaluate_with_cache

def evaluate_population(self, individuals: PopulationT) -> Optional[PopulationT]:
def evaluate_population(self, individuals: PopulationT) -> PopulationT:
individuals_to_evaluate, individuals_to_skip = self.split_individuals_to_evaluate(individuals)
# Evaluate individuals without valid fitness in parallel.
n_jobs = determine_n_jobs(self._n_jobs, self.logger)
Expand All @@ -256,7 +255,7 @@ def evaluate_population(self, individuals: PopulationT) -> Optional[PopulationT]
if not successful_evals:
for single_ind in individuals:
evaluation_result = eval_func(single_ind.graph, single_ind.uid, with_time_limit=False)
successful_evals = self.apply_evaluation_results([single_ind], [evaluation_result]) or None
successful_evals = self.apply_evaluation_results([single_ind], [evaluation_result]) or []
if successful_evals:
break
MemoryAnalytics.log(self.logger,
Expand All @@ -271,11 +270,11 @@ class SequentialDispatcher(BaseGraphEvaluationDispatcher):
Usage: call `dispatch(objective_function)` to get evaluation function.
"""

def evaluate_population(self, individuals: PopulationT) -> Optional[PopulationT]:
def evaluate_population(self, individuals: PopulationT) -> PopulationT:
individuals_to_evaluate, individuals_to_skip = self.split_individuals_to_evaluate(individuals)
evaluation_results = [self.evaluate_single(ind.graph, ind.uid) for ind in individuals_to_evaluate]
individuals_evaluated = self.apply_evaluation_results(individuals_to_evaluate, evaluation_results)
evaluated_population = individuals_evaluated + individuals_to_skip or None
evaluated_population = individuals_evaluated + individuals_to_skip
return evaluated_population


Expand Down
4 changes: 2 additions & 2 deletions golem/core/optimisers/meta/surrogate_evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from golem.core.adapter import BaseOptimizationAdapter
from golem.core.log import Log
from golem.core.optimisers.genetic.evaluation import OptionalEvalResult, DelegateEvaluator, SequentialDispatcher
from golem.core.optimisers.genetic.evaluation import DelegateEvaluator, SequentialDispatcher
from golem.core.optimisers.graph import OptGraph
from golem.core.optimisers.meta.surrogate_model import SurrogateModel, RandomValuesSurrogateModel
from golem.core.optimisers.objective.objective import to_fitness, GraphFunction
Expand All @@ -30,7 +30,7 @@ def __init__(self,

def evaluate_single(self, graph: OptGraph, uid_of_individual: str, with_time_limit: bool = True,
cache_key: Optional[str] = None,
logs_initializer: Optional[Tuple[int, pathlib.Path]] = None) -> OptionalEvalResult:
logs_initializer: Optional[Tuple[int, pathlib.Path]] = None) -> GraphEvalResult:
graph = self.evaluation_cache.get(cache_key, graph)
if logs_initializer is not None:
# in case of multiprocessing run
Expand Down
2 changes: 1 addition & 1 deletion test/unit/optimizers/test_evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def test_dispatchers_with_faulty_objectives(objective, dispatcher):
adapter, population = set_up_tests()

evaluator = dispatcher.dispatch(objective)
assert evaluator(population) is None
assert evaluator(population) == []


@pytest.mark.parametrize('dispatcher', [
Expand Down

0 comments on commit afa11e6

Please sign in to comment.