Skip to content

Commit

Permalink
move 'move_climb'-method to core_optimizer; remove unused arguments a…
Browse files Browse the repository at this point in the history
…nd cleanup class-inherit.
  • Loading branch information
SimonBlanke committed Dec 7, 2024
1 parent 4a3e6ec commit ab60a2a
Show file tree
Hide file tree
Showing 30 changed files with 639 additions and 122 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,6 @@ def __init__(
random_state: int = None,
rand_rest_p: float = 0,
nth_process: int = None,
epsilon: float = 0.03,
distribution: Literal[
"normal", "laplace", "gumbel", "logistic"
] = "normal",
n_neighbours: int = 3,
n_positions=4,
pattern_size=0.25,
reduction=0.9,
Expand All @@ -63,9 +58,6 @@ def __init__(
random_state=random_state,
rand_rest_p=rand_rest_p,
nth_process=nth_process,
epsilon=epsilon,
distribution=distribution,
n_neighbours=n_neighbours,
n_positions=n_positions,
pattern_size=pattern_size,
reduction=reduction,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,12 @@ def __init__(
initialize: Dict[str, int] = {"grid": 4, "random": 2, "vertices": 4},
constraints: List[Dict[str, callable]] = [],
random_state: int = None,
nth_process: int = None,
):
super().__init__(
search_space=search_space,
initialize=initialize,
constraints=constraints,
random_state=random_state,
nth_process=nth_process,
)
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,15 @@

from .utils import set_random_seed, move_random

from numpy.random import normal, laplace, logistic, gumbel

dist_dict = {
"normal": normal,
"laplace": laplace,
"logistic": logistic,
"gumbel": gumbel,
}


class CoreOptimizer(SearchTracker):
def __init__(
Expand Down Expand Up @@ -51,6 +60,18 @@ def wrapper(self, *args, **kwargs):

return wrapper

def move_climb(
self, pos, epsilon=0.03, distribution="normal", epsilon_mod=1
):
while True:
sigma = self.conv.max_positions * epsilon * epsilon_mod
pos_normal = dist_dict[distribution](pos, sigma, pos.shape)
pos = self.conv2pos(pos_normal)

if self.conv.not_in_constraint(pos):
return pos
epsilon_mod *= 1.01

def conv2pos(self, pos):
# position to int
r_pos = np.rint(pos)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,15 @@ class EnsembleOptimizer(SMBO):

def __init__(
self,
*args,
search_space,
initialize={"grid": 4, "random": 2, "vertices": 4},
constraints=[],
random_state=None,
rand_rest_p=0,
nth_process=None,
epsilon=0.03,
distribution="normal",
n_neighbours=3,
estimators=[
GradientBoostingRegressor(n_estimators=5),
# DecisionTreeRegressor(),
Expand All @@ -43,10 +51,25 @@ def __init__(
warm_start_smbo=None,
max_sample_size=10000000,
sampling={"random": 1000000},
replacement=True,
warnings=100000000,
**kwargs
**kwargs,
):
super().__init__(*args, **kwargs)
super().__init__(
search_space=search_space,
initialize=initialize,
constraints=constraints,
random_state=random_state,
rand_rest_p=rand_rest_p,
nth_process=nth_process,
epsilon=epsilon,
distribution=distribution,
n_neighbours=n_neighbours, #
warm_start_smbo=warm_start_smbo,
max_sample_size=max_sample_size,
sampling=sampling,
replacement=replacement,
)
self.estimators = estimators
self.regr = EnsembleRegressor(estimators)
self.xi = xi
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,29 @@ class RandomAnnealingOptimizer(HillClimbingOptimizer):

def __init__(
self,
*args,
search_space,
initialize={"grid": 4, "random": 2, "vertices": 4},
constraints=[],
random_state=None,
rand_rest_p=0,
nth_process=None,
epsilon=0.03,
distribution="normal",
n_neighbours=3,
annealing_rate=0.98,
start_temp=10,
**kwargs,
):
super().__init__(*args, **kwargs)
super().__init__(
search_space=search_space,
initialize=initialize,
constraints=constraints,
random_state=random_state,
rand_rest_p=rand_rest_p,
nth_process=nth_process,
epsilon=epsilon,
distribution=distribution,
n_neighbours=n_neighbours,
)
self.epsilon = epsilon
self.distribution = distribution
self.n_neighbours = n_neighbours
Expand All @@ -32,7 +46,12 @@ def __init__(
@HillClimbingOptimizer.track_new_pos
@HillClimbingOptimizer.random_iteration
def iterate(self):
pos = self.move_climb(self.pos_current, epsilon_mod=self.temp)
pos = self.move_climb(
self.pos_current,
epsilon=self.epsilon,
distribution=self.distribution,
epsilon_mod=self.temp,
)
self.temp = self.temp * self.annealing_rate

return pos
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from scipy.spatial.distance import cdist

from ..smb_opt.smbo import SMBO
from ..local_opt import HillClimbingOptimizer


class SubSpace:
Expand Down Expand Up @@ -58,7 +59,9 @@ def lipschitz_bound_(self, score, K=1):
furthest_pos_.append(dim_array[0])
furthest_pos = np.array(furthest_pos_)

dist = cdist(furthest_pos.reshape(1, -1), self.center_pos.reshape(1, -1))
dist = cdist(
furthest_pos.reshape(1, -1), self.center_pos.reshape(1, -1)
)

self.lipschitz_bound = score + K * dist

Expand All @@ -71,8 +74,31 @@ class DirectAlgorithm(SMBO):
optimizer_type = "sequential"
computationally_expensive = True

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __init__(
self,
search_space,
initialize={"grid": 4, "random": 2, "vertices": 4},
constraints=[],
random_state=None,
rand_rest_p=0,
nth_process=None,
warm_start_smbo=None,
max_sample_size=10000000,
sampling={"random": 1000000},
replacement=True,
):
super().__init__(
search_space=search_space,
initialize=initialize,
constraints=constraints,
random_state=random_state,
rand_rest_p=rand_rest_p,
nth_process=nth_process,
warm_start_smbo=warm_start_smbo,
max_sample_size=max_sample_size,
sampling=sampling,
replacement=replacement,
)

self.subspace_l = []

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,31 @@ class LipschitzOptimizer(SMBO):
optimizer_type = "sequential"
computationally_expensive = True

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __init__(
self,
search_space,
initialize={"grid": 4, "random": 2, "vertices": 4},
constraints=[],
random_state=None,
rand_rest_p=0,
nth_process=None,
warm_start_smbo=None,
max_sample_size=10000000,
sampling={"random": 1000000},
replacement=True,
):
super().__init__(
search_space=search_space,
initialize=initialize,
constraints=constraints,
random_state=random_state,
rand_rest_p=rand_rest_p,
nth_process=nth_process,
warm_start_smbo=warm_start_smbo,
max_sample_size=max_sample_size,
sampling=sampling,
replacement=replacement,
)

def finish_initialization(self):
self.all_pos_comb = self._all_possible_pos()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
import random
import numpy as np

from ..local_opt import HillClimbingOptimizer
from ..base_optimizer import BaseOptimizer
from ..local_opt.hill_climbing_optimizer import HillClimbingOptimizer


def max_list_idx(list_):
Expand All @@ -14,7 +15,7 @@ def max_list_idx(list_):
return max_item_idx[-1:][0]


class PatternSearch(HillClimbingOptimizer):
class PatternSearch(BaseOptimizer):
name = "Pattern Search"
_name_ = "pattern_search"
__name__ = "PatternSearch"
Expand All @@ -23,9 +24,25 @@ class PatternSearch(HillClimbingOptimizer):
computationally_expensive = False

def __init__(
self, *args, n_positions=4, pattern_size=0.25, reduction=0.9, **kwargs
self,
search_space,
initialize={"grid": 4, "random": 2, "vertices": 4},
constraints=[],
random_state=None,
rand_rest_p=0,
nth_process=None,
n_positions=4,
pattern_size=0.25,
reduction=0.9,
):
super().__init__(*args, **kwargs)
super().__init__(
search_space=search_space,
initialize=initialize,
constraints=constraints,
random_state=random_state,
rand_rest_p=rand_rest_p,
nth_process=nth_process,
)

self.n_positions = n_positions
self.pattern_size = pattern_size
Expand Down Expand Up @@ -63,10 +80,12 @@ def generate_pattern(self, current_position):
pattern_pos_l.append(pos_pattern_p)
pattern_pos_l.append(pos_pattern_n)

self.pattern_pos_l = list(random.sample(pattern_pos_l, self.n_positions_))
self.pattern_pos_l = list(
random.sample(pattern_pos_l, self.n_positions_)
)

@HillClimbingOptimizer.track_new_pos
@HillClimbingOptimizer.random_iteration
@BaseOptimizer.track_new_pos
@BaseOptimizer.random_iteration
def iterate(self):
while True:
pos_new = self.pattern_pos_l[0]
Expand All @@ -80,9 +99,9 @@ def finish_initialization(self):
self.generate_pattern(self.pos_current)
self.search_state = "iter"

@HillClimbingOptimizer.track_new_score
@BaseOptimizer.track_new_score
def evaluate(self, score_new):
super(HillClimbingOptimizer, self).evaluate(score_new)
BaseOptimizer.evaluate(self, score_new)
if len(self.scores_valid) == 0:
return

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,30 @@ class PowellsMethod(HillClimbingOptimizer):
optimizer_type = "global"
computationally_expensive = False

def __init__(self, *args, iters_p_dim=10, **kwargs):
super().__init__(*args, **kwargs)
def __init__(
self,
search_space,
initialize={"grid": 4, "random": 2, "vertices": 4},
constraints=[],
random_state=None,
rand_rest_p=0,
nth_process=None,
epsilon=0.03,
distribution="normal",
n_neighbours=3,
iters_p_dim=10,
):
super().__init__(
search_space=search_space,
initialize=initialize,
constraints=constraints,
random_state=random_state,
rand_rest_p=rand_rest_p,
nth_process=nth_process,
epsilon=epsilon,
distribution=distribution,
n_neighbours=n_neighbours,
)

self.iters_p_dim = iters_p_dim

Expand Down Expand Up @@ -74,6 +96,9 @@ def new_dim(self):
self.hill_climb = HillClimbingOptimizer(
search_space=search_space_1D,
initialize={"random": 5},
epsilon=self.epsilon,
distribution=self.distribution,
n_neighbours=self.n_neighbours,
)

@HillClimbingOptimizer.track_new_pos
Expand All @@ -99,7 +124,9 @@ def iterate(self):

if self.conv.not_in_constraint(pos_new):
return pos_new
return self.move_climb(pos_new)
return self.move_climb(
pos_new, epsilon=self.epsilon, distribution=self.distribution
)

@HillClimbingOptimizer.track_new_score
def evaluate(self, score_new):
Expand Down
Loading

0 comments on commit ab60a2a

Please sign in to comment.