Merge pull request #2142 from hroff-1902/hyperopt-print-json

Hyperopt: --print-json option
This commit is contained in:
Matthias 2019-08-16 11:08:54 +02:00 committed by GitHub
commit 161db08745
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 123 additions and 15 deletions

View File

@ -22,7 +22,7 @@ ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + ["position_stacking", "use_max_market_pos
ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + ["hyperopt", "hyperopt_path", ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + ["hyperopt", "hyperopt_path",
"position_stacking", "epochs", "spaces", "position_stacking", "epochs", "spaces",
"use_max_market_positions", "print_all", "use_max_market_positions", "print_all",
"print_colorized", "hyperopt_jobs", "print_colorized", "print_json", "hyperopt_jobs",
"hyperopt_random_state", "hyperopt_min_trades", "hyperopt_random_state", "hyperopt_min_trades",
"hyperopt_continue", "hyperopt_loss"] "hyperopt_continue", "hyperopt_loss"]

View File

@ -198,6 +198,12 @@ AVAILABLE_CLI_OPTIONS = {
action='store_false', action='store_false',
default=True, default=True,
), ),
"print_json": Arg(
'--print-json',
help='Print best result detailization in JSON format.',
action='store_true',
default=False,
),
"hyperopt_jobs": Arg( "hyperopt_jobs": Arg(
'-j', '--job-workers', '-j', '--job-workers',
help='The number of concurrently running jobs for hyperoptimization ' help='The number of concurrently running jobs for hyperoptimization '

View File

@ -242,6 +242,9 @@ class Configuration(object):
else: else:
config.update({'print_colorized': True}) config.update({'print_colorized': True})
self._args_to_config(config, argname='print_json',
logstring='Parameter --print-json detected ...')
self._args_to_config(config, argname='hyperopt_jobs', self._args_to_config(config, argname='hyperopt_jobs',
logstring='Parameter -j/--job-workers detected: {}') logstring='Parameter -j/--job-workers detected: {}')

View File

@ -8,11 +8,14 @@ import logging
import os import os
import sys import sys
from collections import OrderedDict
from operator import itemgetter from operator import itemgetter
from pathlib import Path from pathlib import Path
from pprint import pprint from pprint import pprint
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
import rapidjson
from colorama import init as colorama_init from colorama import init as colorama_init
from colorama import Fore, Style from colorama import Fore, Style
from joblib import Parallel, delayed, dump, load, wrap_non_picklable_objects, cpu_count from joblib import Parallel, delayed, dump, load, wrap_non_picklable_objects, cpu_count
@ -133,22 +136,44 @@ class Hyperopt(Backtesting):
results = sorted(self.trials, key=itemgetter('loss')) results = sorted(self.trials, key=itemgetter('loss'))
best_result = results[0] best_result = results[0]
params = best_result['params'] params = best_result['params']
log_str = self.format_results_logstring(best_result) log_str = self.format_results_logstring(best_result)
print(f"\nBest result:\n\n{log_str}\n") print(f"\nBest result:\n\n{log_str}\n")
if self.has_space('buy'):
print('Buy hyperspace params:') if self.config.get('print_json'):
pprint({p.name: params.get(p.name) for p in self.hyperopt_space('buy')}, result_dict: Dict = {}
indent=4) if self.has_space('buy') or self.has_space('sell'):
if self.has_space('sell'): result_dict['params'] = {}
print('Sell hyperspace params:') if self.has_space('buy'):
pprint({p.name: params.get(p.name) for p in self.hyperopt_space('sell')}, result_dict['params'].update({p.name: params.get(p.name)
indent=4) for p in self.hyperopt_space('buy')})
if self.has_space('roi'): if self.has_space('sell'):
print("ROI table:") result_dict['params'].update({p.name: params.get(p.name)
pprint(self.custom_hyperopt.generate_roi_table(params), indent=4) for p in self.hyperopt_space('sell')})
if self.has_space('stoploss'): if self.has_space('roi'):
print(f"Stoploss: {params.get('stoploss')}") # Convert keys in min_roi dict to strings because
# rapidjson cannot dump dicts with integer keys...
# OrderedDict is used to keep the numeric order of the items
# in the dict.
result_dict['minimal_roi'] = OrderedDict(
(str(k), v) for k, v in self.custom_hyperopt.generate_roi_table(params).items()
)
if self.has_space('stoploss'):
result_dict['stoploss'] = params.get('stoploss')
print(rapidjson.dumps(result_dict, default=str, number_mode=rapidjson.NM_NATIVE))
else:
if self.has_space('buy'):
print('Buy hyperspace params:')
pprint({p.name: params.get(p.name) for p in self.hyperopt_space('buy')},
indent=4)
if self.has_space('sell'):
print('Sell hyperspace params:')
pprint({p.name: params.get(p.name) for p in self.hyperopt_space('sell')},
indent=4)
if self.has_space('roi'):
print("ROI table:")
pprint(self.custom_hyperopt.generate_roi_table(params), indent=4)
if self.has_space('stoploss'):
print(f"Stoploss: {params.get('stoploss')}")
def log_results(self, results) -> None: def log_results(self, results) -> None:
""" """

View File

@ -618,3 +618,77 @@ def test_continue_hyperopt(mocker, default_conf, caplog):
assert unlinkmock.call_count == 0 assert unlinkmock.call_count == 0
assert log_has(f"Continuing on previous hyperopt results.", caplog) assert log_has(f"Continuing on previous hyperopt results.", caplog)
def test_print_json_spaces_all(mocker, default_conf, caplog, capsys) -> None:
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock())
mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
)
parallel = mocker.patch(
'freqtrade.optimize.hyperopt.Hyperopt.run_optimizer_parallel',
MagicMock(return_value=[{'loss': 1, 'results_explanation': 'foo result', 'params': {}}])
)
patch_exchange(mocker)
default_conf.update({'config': 'config.json.example',
'epochs': 1,
'timerange': None,
'spaces': 'all',
'hyperopt_jobs': 1,
'print_json': True,
})
hyperopt = Hyperopt(default_conf)
hyperopt.strategy.tickerdata_to_dataframe = MagicMock()
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
hyperopt.start()
parallel.assert_called_once()
out, err = capsys.readouterr()
assert '{"params":{"mfi-value":null,"fastd-value":null,"adx-value":null,"rsi-value":null,"mfi-enabled":null,"fastd-enabled":null,"adx-enabled":null,"rsi-enabled":null,"trigger":null,"sell-mfi-value":null,"sell-fastd-value":null,"sell-adx-value":null,"sell-rsi-value":null,"sell-mfi-enabled":null,"sell-fastd-enabled":null,"sell-adx-enabled":null,"sell-rsi-enabled":null,"sell-trigger":null},"minimal_roi":{},"stoploss":null}' in out # noqa: E501
assert dumper.called
# Should be called twice, once for tickerdata, once to save evaluations
assert dumper.call_count == 2
def test_print_json_spaces_roi_stoploss(mocker, default_conf, caplog, capsys) -> None:
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock())
mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
)
parallel = mocker.patch(
'freqtrade.optimize.hyperopt.Hyperopt.run_optimizer_parallel',
MagicMock(return_value=[{'loss': 1, 'results_explanation': 'foo result', 'params': {}}])
)
patch_exchange(mocker)
default_conf.update({'config': 'config.json.example',
'epochs': 1,
'timerange': None,
'spaces': 'roi stoploss',
'hyperopt_jobs': 1,
'print_json': True,
})
hyperopt = Hyperopt(default_conf)
hyperopt.strategy.tickerdata_to_dataframe = MagicMock()
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
hyperopt.start()
parallel.assert_called_once()
out, err = capsys.readouterr()
assert '{"minimal_roi":{},"stoploss":null}' in out
assert dumper.called
# Should be called twice, once for tickerdata, once to save evaluations
assert dumper.call_count == 2