Merge pull request #1792 from hroff-1902/hyperopt-jobs

hyperopt: -j/--job-workers command line option added
This commit is contained in:
Matthias 2019-04-24 12:19:07 +02:00 committed by GitHub
commit 34fa2011be
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 36 additions and 12 deletions

View File

@ -216,7 +216,7 @@ usage: freqtrade hyperopt [-h] [-i TICKER_INTERVAL] [--timerange TIMERANGE]
[--stake_amount STAKE_AMOUNT] [-r] [--stake_amount STAKE_AMOUNT] [-r]
[--customhyperopt NAME] [--eps] [--dmmp] [-e INT] [--customhyperopt NAME] [--eps] [--dmmp] [-e INT]
[-s {all,buy,sell,roi,stoploss} [{all,buy,sell,roi,stoploss} ...]] [-s {all,buy,sell,roi,stoploss} [{all,buy,sell,roi,stoploss} ...]]
[--print-all] [--print-all] [-j JOBS]
optional arguments: optional arguments:
-h, --help show this help message and exit -h, --help show this help message and exit
@ -247,6 +247,12 @@ optional arguments:
Specify which parameters to hyperopt. Space separate Specify which parameters to hyperopt. Space separate
list. Default: all. list. Default: all.
--print-all Print all results, not only the best ones. --print-all Print all results, not only the best ones.
-j JOBS, --job-workers JOBS
The number of concurrently running jobs for
hyperoptimization (hyperopt worker processes). If -1
(default), all CPUs are used, for -2, all CPUs but one
are used, etc. If 1 is given, no parallel computing
code is used at all.
``` ```
## Edge commands ## Edge commands

View File

@ -306,6 +306,17 @@ class Arguments(object):
dest='print_all', dest='print_all',
default=False default=False
) )
parser.add_argument(
'-j', '--job-workers',
help='The number of concurrently running jobs for hyperoptimization '
'(hyperopt worker processes). '
'If -1 (default), all CPUs are used, for -2, all CPUs but one are used, etc. '
'If 1 is given, no parallel computing code is used at all.',
dest='hyperopt_jobs',
default=-1,
type=int,
metavar='JOBS',
)
parser.add_argument( parser.add_argument(
'--random-state', '--random-state',
help='Set random state to some positive integer for reproducible hyperopt results.', help='Set random state to some positive integer for reproducible hyperopt results.',

View File

@ -309,6 +309,10 @@ class Configuration(object):
config.update({'print_all': self.args.print_all}) config.update({'print_all': self.args.print_all})
logger.info('Parameter --print-all detected: %s', config.get('print_all')) logger.info('Parameter --print-all detected: %s', config.get('print_all'))
if 'hyperopt_jobs' in self.args and self.args.hyperopt_jobs:
config.update({'hyperopt_jobs': self.args.hyperopt_jobs})
logger.info('Parameter -j/--job-workers detected: %s', config.get('hyperopt_jobs'))
if 'refresh_pairs' in self.args and self.args.refresh_pairs: if 'refresh_pairs' in self.args and self.args.refresh_pairs:
config.update({'refresh_pairs': True}) config.update({'refresh_pairs': True})
logger.info('Parameter -r/--refresh-pairs-cached detected ...') logger.info('Parameter -r/--refresh-pairs-cached detected ...')

View File

@ -5,7 +5,6 @@ This module contains the hyperopt logic
""" """
import logging import logging
import multiprocessing
import os import os
import sys import sys
from argparse import Namespace from argparse import Namespace
@ -15,7 +14,7 @@ from pathlib import Path
from pprint import pprint from pprint import pprint
from typing import Any, Dict, List from typing import Any, Dict, List
from joblib import Parallel, delayed, dump, load, wrap_non_picklable_objects from joblib import Parallel, delayed, dump, load, wrap_non_picklable_objects, cpu_count
from pandas import DataFrame from pandas import DataFrame
from skopt import Optimizer from skopt import Optimizer
from skopt.space import Dimension from skopt.space import Dimension
@ -275,28 +274,32 @@ class Hyperopt(Backtesting):
self.load_previous_results() self.load_previous_results()
cpus = multiprocessing.cpu_count() cpus = cpu_count()
logger.info(f'Found {cpus} CPU cores. Let\'s make them scream!') logger.info(f'Found {cpus} CPU cores. Let\'s make them scream!')
config_jobs = self.config.get('hyperopt_jobs', -1)
logger.info(f'Number of parallel jobs set as: {config_jobs}')
opt = self.get_optimizer(cpus) opt = self.get_optimizer(config_jobs)
EVALS = max(self.total_tries // cpus, 1)
try: try:
with Parallel(n_jobs=cpus) as parallel: with Parallel(n_jobs=config_jobs) as parallel:
jobs = parallel._effective_n_jobs()
logger.info(f'Effective number of parallel workers used: {jobs}')
EVALS = max(self.total_tries // jobs, 1)
for i in range(EVALS): for i in range(EVALS):
asked = opt.ask(n_points=cpus) asked = opt.ask(n_points=jobs)
f_val = self.run_optimizer_parallel(parallel, asked) f_val = self.run_optimizer_parallel(parallel, asked)
opt.tell(asked, [i['loss'] for i in f_val]) opt.tell(asked, [i['loss'] for i in f_val])
self.trials += f_val self.trials += f_val
for j in range(cpus): for j in range(jobs):
self.log_results({ self.log_results({
'loss': f_val[j]['loss'], 'loss': f_val[j]['loss'],
'current_tries': i * cpus + j, 'current_tries': i * jobs + j,
'total_tries': self.total_tries, 'total_tries': self.total_tries,
'result': f_val[j]['result'], 'result': f_val[j]['result'],
}) })
logger.debug(f"Optimizer params: {f_val[j]['params']}") logger.debug(f"Optimizer params: {f_val[j]['params']}")
for j in range(cpus): for j in range(jobs):
logger.debug(f"Opimizer state: Xi: {opt.Xi[-j-1]}, yi: {opt.yi[-j-1]}") logger.debug(f"Opimizer state: Xi: {opt.Xi[-j-1]}, yi: {opt.yi[-j-1]}")
except KeyboardInterrupt: except KeyboardInterrupt:
print('User interrupted..') print('User interrupted..')

View File

@ -314,7 +314,6 @@ def test_roi_table_generation(hyperopt) -> None:
def test_start_calls_optimizer(mocker, default_conf, caplog) -> None: def test_start_calls_optimizer(mocker, default_conf, caplog) -> None:
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock()) dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock()) mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.multiprocessing.cpu_count', MagicMock(return_value=1))
parallel = mocker.patch( parallel = mocker.patch(
'freqtrade.optimize.hyperopt.Hyperopt.run_optimizer_parallel', 'freqtrade.optimize.hyperopt.Hyperopt.run_optimizer_parallel',
MagicMock(return_value=[{'loss': 1, 'result': 'foo result', 'params': {}}]) MagicMock(return_value=[{'loss': 1, 'result': 'foo result', 'params': {}}])
@ -325,6 +324,7 @@ def test_start_calls_optimizer(mocker, default_conf, caplog) -> None:
default_conf.update({'epochs': 1}) default_conf.update({'epochs': 1})
default_conf.update({'timerange': None}) default_conf.update({'timerange': None})
default_conf.update({'spaces': 'all'}) default_conf.update({'spaces': 'all'})
default_conf.update({'hyperopt_jobs': 1})
hyperopt = Hyperopt(default_conf) hyperopt = Hyperopt(default_conf)
hyperopt.strategy.tickerdata_to_dataframe = MagicMock() hyperopt.strategy.tickerdata_to_dataframe = MagicMock()