better defaults
This commit is contained in:
parent
a5b44de0f6
commit
8d03887b02
@ -154,6 +154,8 @@ class Hyperopt:
|
|||||||
backend.manager = Manager()
|
backend.manager = Manager()
|
||||||
self.mode = self.config.get('mode', 'single')
|
self.mode = self.config.get('mode', 'single')
|
||||||
self.shared = False
|
self.shared = False
|
||||||
|
# models are only needed for posterior eval
|
||||||
|
self.n_models = 1
|
||||||
if self.mode in ('multi', 'shared'):
|
if self.mode in ('multi', 'shared'):
|
||||||
self.multi = True
|
self.multi = True
|
||||||
if self.mode == 'shared':
|
if self.mode == 'shared':
|
||||||
@ -161,19 +163,19 @@ class Hyperopt:
|
|||||||
backend.optimizers = backend.manager.Queue()
|
backend.optimizers = backend.manager.Queue()
|
||||||
backend.results_board = backend.manager.Queue(maxsize=1)
|
backend.results_board = backend.manager.Queue(maxsize=1)
|
||||||
backend.results_board.put({})
|
backend.results_board.put({})
|
||||||
|
default_n_points = 2
|
||||||
self.opt_base_estimator = 'GBRT'
|
self.opt_base_estimator = 'GBRT'
|
||||||
self.opt_acq_optimizer = 'sampling'
|
self.opt_acq_optimizer = 'sampling'
|
||||||
# in multi opt one model is enough
|
|
||||||
self.n_models = 1
|
|
||||||
default_n_points = 2
|
|
||||||
else:
|
else:
|
||||||
self.multi = False
|
self.multi = False
|
||||||
backend.results = backend.manager.Queue()
|
backend.results = backend.manager.Queue()
|
||||||
self.opt_base_estimator = 'GP'
|
self.opt_base_estimator = 'ET'
|
||||||
self.opt_acq_optimizer = 'lbfgs'
|
self.opt_acq_optimizer = 'sampling'
|
||||||
# models are only needed for posterior eval
|
|
||||||
self.n_models = min(16, self.n_jobs)
|
|
||||||
default_n_points = 1
|
default_n_points = 1
|
||||||
|
# The GaussianProcessRegressor is heavy, which makes it not a good default
|
||||||
|
# however longer backtests might make it a better tradeoff
|
||||||
|
# self.opt_base_estimator = 'GP'
|
||||||
|
# self.opt_acq_optimizer = 'lbfgs'
|
||||||
|
|
||||||
# in single opt assume runs are expensive so default to 1 point per ask
|
# in single opt assume runs are expensive so default to 1 point per ask
|
||||||
self.n_points = self.config.get('n_points', default_n_points)
|
self.n_points = self.config.get('n_points', default_n_points)
|
||||||
@ -235,17 +237,21 @@ class Hyperopt:
|
|||||||
num_trials = len(self.trials)
|
num_trials = len(self.trials)
|
||||||
print()
|
print()
|
||||||
if num_trials > self.num_trials_saved:
|
if num_trials > self.num_trials_saved:
|
||||||
logger.info(f"Saving {num_trials} {plural(num_trials, 'epoch')}.")
|
logger.debug(f"Saving {num_trials} {plural(num_trials, 'epoch')}.")
|
||||||
dump(self.trials, self.trials_file)
|
dump(self.trials, self.trials_file)
|
||||||
self.num_trials_saved = num_trials
|
self.num_trials_saved = num_trials
|
||||||
self.save_opts()
|
self.save_opts()
|
||||||
if final:
|
if final:
|
||||||
logger.info(f"{num_trials} {plural(num_trials, 'epoch')} "
|
logger.debug(f"{num_trials} {plural(num_trials, 'epoch')} "
|
||||||
f"saved to '{self.trials_file}'.")
|
f"saved to '{self.trials_file}'.")
|
||||||
|
|
||||||
def save_opts(self) -> None:
|
def save_opts(self) -> None:
|
||||||
""" Save optimizers state to disk. The minimum required state could also be constructed
|
"""
|
||||||
from the attributes [ models, space, rng ] with Xi, yi loaded from trials """
|
Save optimizers state to disk. The minimum required state could also be constructed
|
||||||
|
from the attributes [ models, space, rng ] with Xi, yi loaded from trials.
|
||||||
|
All we really care about are [rng, Xi, yi] since models are never passed over queues
|
||||||
|
and space is dependent on dimensions matching with hyperopt config
|
||||||
|
"""
|
||||||
# synchronize with saved trials
|
# synchronize with saved trials
|
||||||
opts = []
|
opts = []
|
||||||
n_opts = 0
|
n_opts = 0
|
||||||
@ -259,7 +265,7 @@ class Hyperopt:
|
|||||||
if self.opt:
|
if self.opt:
|
||||||
n_opts = 1
|
n_opts = 1
|
||||||
opts = [self.opt]
|
opts = [self.opt]
|
||||||
logger.info(f"Saving {n_opts} {plural(n_opts, 'optimizer')}.")
|
logger.debug(f"Saving {n_opts} {plural(n_opts, 'optimizer')}.")
|
||||||
dump(opts, self.opts_file)
|
dump(opts, self.opts_file)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -874,7 +880,7 @@ class Hyperopt:
|
|||||||
if self.max_epoch > self.search_space_size:
|
if self.max_epoch > self.search_space_size:
|
||||||
self.max_epoch = self.search_space_size
|
self.max_epoch = self.search_space_size
|
||||||
print()
|
print()
|
||||||
logger.info(f'Max epoch set to: {self.epochs_limit()}')
|
logger.debug(f'Max epoch set to: {self.epochs_limit()}')
|
||||||
|
|
||||||
def setup_optimizers(self):
|
def setup_optimizers(self):
|
||||||
""" Setup the optimizers objects, try to load from disk, or create new ones """
|
""" Setup the optimizers objects, try to load from disk, or create new ones """
|
||||||
|
Loading…
Reference in New Issue
Block a user