Merge pull request #3003 from Fredrik81/cores-and-arguments

Hyperopt: fix number of CPU cores, jobs and total epochs
This commit is contained in:
hroff-1902 2020-03-03 02:12:21 +03:00 committed by GitHub
commit 82bdd01843
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -9,6 +9,7 @@ import logging
import random import random
import sys import sys
import warnings import warnings
from math import ceil
from collections import OrderedDict from collections import OrderedDict
from operator import itemgetter from operator import itemgetter
from pathlib import Path from pathlib import Path
@ -569,16 +570,21 @@ class Hyperopt:
with Parallel(n_jobs=config_jobs) as parallel: with Parallel(n_jobs=config_jobs) as parallel:
jobs = parallel._effective_n_jobs() jobs = parallel._effective_n_jobs()
logger.info(f'Effective number of parallel workers used: {jobs}') logger.info(f'Effective number of parallel workers used: {jobs}')
EVALS = max(self.total_epochs // jobs, 1) EVALS = ceil(self.total_epochs / jobs)
for i in range(EVALS): for i in range(EVALS):
asked = self.opt.ask(n_points=jobs) # Correct the number of epochs to be processed for the last
# iteration (should not exceed self.total_epochs in total)
n_rest = (i + 1) * jobs - self.total_epochs
current_jobs = jobs - n_rest if n_rest > 0 else jobs
asked = self.opt.ask(n_points=current_jobs)
f_val = self.run_optimizer_parallel(parallel, asked, i) f_val = self.run_optimizer_parallel(parallel, asked, i)
self.opt.tell(asked, [v['loss'] for v in f_val]) self.opt.tell(asked, [v['loss'] for v in f_val])
self.fix_optimizer_models_list() self.fix_optimizer_models_list()
for j in range(jobs):
for j, val in enumerate(f_val):
# Use human-friendly indexes here (starting from 1) # Use human-friendly indexes here (starting from 1)
current = i * jobs + j + 1 current = i * jobs + j + 1
val = f_val[j]
val['current_epoch'] = current val['current_epoch'] = current
val['is_initial_point'] = current <= INITIAL_POINTS val['is_initial_point'] = current <= INITIAL_POINTS
logger.debug(f"Optimizer epoch evaluated: {val}") logger.debug(f"Optimizer epoch evaluated: {val}")