2018-03-02 13:46:32 +00:00
|
|
|
# pragma pylint: disable=too-many-instance-attributes, pointless-string-statement
|
2017-11-25 00:04:11 +00:00
|
|
|
|
2018-03-02 13:46:32 +00:00
|
|
|
"""
|
|
|
|
This module contains the hyperopt logic
|
|
|
|
"""
|
2017-11-25 00:04:11 +00:00
|
|
|
|
2017-11-25 01:04:37 +00:00
|
|
|
import logging
|
2019-12-12 00:12:28 +00:00
|
|
|
import random
|
2022-07-16 09:05:58 +00:00
|
|
|
import sys
|
2019-12-10 12:45:10 +00:00
|
|
|
import warnings
|
2021-04-28 20:33:58 +00:00
|
|
|
from datetime import datetime, timezone
|
2020-08-08 15:04:32 +00:00
|
|
|
from math import ceil
|
2019-01-06 13:47:38 +00:00
|
|
|
from pathlib import Path
|
2022-03-29 23:29:14 +00:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple
|
2017-10-19 14:12:49 +00:00
|
|
|
|
2020-08-08 15:04:32 +00:00
|
|
|
import progressbar
|
2021-05-12 03:58:25 +00:00
|
|
|
import rapidjson
|
2019-08-09 11:48:57 +00:00
|
|
|
from colorama import Fore, Style
|
2020-06-01 07:34:03 +00:00
|
|
|
from colorama import init as colorama_init
|
2020-09-28 17:39:41 +00:00
|
|
|
from joblib import Parallel, cpu_count, delayed, dump, load, wrap_non_picklable_objects
|
2022-07-16 09:05:58 +00:00
|
|
|
from joblib.externals import cloudpickle
|
2021-03-17 19:43:51 +00:00
|
|
|
from pandas import DataFrame
|
2018-06-18 19:40:36 +00:00
|
|
|
|
2021-06-15 18:27:46 +00:00
|
|
|
from freqtrade.constants import DATETIME_PRINT_FORMAT, FTHYPT_FILEVERSION, LAST_BT_RESULT_FN
|
2021-05-21 06:26:19 +00:00
|
|
|
from freqtrade.data.converter import trim_dataframes
|
2019-12-30 18:40:43 +00:00
|
|
|
from freqtrade.data.history import get_timerange
|
2022-08-19 13:19:43 +00:00
|
|
|
from freqtrade.enums import HyperoptState
|
2021-09-11 07:06:57 +00:00
|
|
|
from freqtrade.exceptions import OperationalException
|
2021-06-14 18:45:06 +00:00
|
|
|
from freqtrade.misc import deep_merge_dicts, file_dump_json, plural
|
2018-03-02 15:22:00 +00:00
|
|
|
from freqtrade.optimize.backtesting import Backtesting
|
2019-08-14 10:25:49 +00:00
|
|
|
# Import IHyperOpt and IHyperOptLoss to allow unpickling classes from these modules
|
2021-03-23 08:02:32 +00:00
|
|
|
from freqtrade.optimize.hyperopt_auto import HyperOptAuto
|
2022-05-22 17:32:32 +00:00
|
|
|
from freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss
|
2022-08-19 13:19:43 +00:00
|
|
|
from freqtrade.optimize.hyperopt_tools import (HyperoptStateContainer, HyperoptTools,
|
|
|
|
hyperopt_serializer)
|
2021-04-28 20:33:58 +00:00
|
|
|
from freqtrade.optimize.optimize_reports import generate_strategy_stats
|
2021-09-11 07:06:57 +00:00
|
|
|
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
|
2022-03-30 08:39:07 +00:00
|
|
|
|
2020-09-28 17:39:41 +00:00
|
|
|
|
2019-12-10 15:10:51 +00:00
|
|
|
# Suppress scikit-learn FutureWarnings from skopt
|
|
|
|
with warnings.catch_warnings():
|
|
|
|
warnings.filterwarnings("ignore", category=FutureWarning)
|
|
|
|
from skopt import Optimizer
|
|
|
|
from skopt.space import Dimension
|
|
|
|
|
2020-04-06 11:12:32 +00:00
|
|
|
progressbar.streams.wrap_stderr()
|
|
|
|
progressbar.streams.wrap_stdout()
|
2018-03-25 19:37:14 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2019-04-25 08:11:04 +00:00
|
|
|
|
2022-03-30 08:39:07 +00:00
|
|
|
INITIAL_POINTS = 30
|
2019-09-23 08:59:34 +00:00
|
|
|
|
2020-04-29 07:49:25 +00:00
|
|
|
# Keep no more than SKOPT_MODEL_QUEUE_SIZE models
|
|
|
|
# in the skopt model queue, to optimize memory consumption
|
|
|
|
SKOPT_MODEL_QUEUE_SIZE = 10
|
2019-09-23 08:59:34 +00:00
|
|
|
|
2018-07-02 08:44:33 +00:00
|
|
|
MAX_LOSS = 100000 # just a big enough number to be bad result in loss optimization
|
|
|
|
|
2018-03-25 19:37:14 +00:00
|
|
|
|
2019-08-23 21:10:35 +00:00
|
|
|
class Hyperopt:
|
2018-01-23 14:56:12 +00:00
|
|
|
"""
|
2018-03-02 13:46:32 +00:00
|
|
|
Hyperopt class, this class contains all the logic to run a hyperopt simulation
|
2018-01-23 14:56:12 +00:00
|
|
|
|
2018-03-02 13:46:32 +00:00
|
|
|
To run a backtest:
|
|
|
|
hyperopt = Hyperopt(config)
|
|
|
|
hyperopt.start()
|
2018-01-23 14:56:12 +00:00
|
|
|
"""
|
2020-01-31 21:37:05 +00:00
|
|
|
|
2018-03-02 13:46:32 +00:00
|
|
|
def __init__(self, config: Dict[str, Any]) -> None:
|
2021-05-02 07:35:10 +00:00
|
|
|
self.buy_space: List[Dimension] = []
|
|
|
|
self.sell_space: List[Dimension] = []
|
2021-08-03 05:10:04 +00:00
|
|
|
self.protection_space: List[Dimension] = []
|
2021-05-02 07:35:10 +00:00
|
|
|
self.roi_space: List[Dimension] = []
|
|
|
|
self.stoploss_space: List[Dimension] = []
|
|
|
|
self.trailing_space: List[Dimension] = []
|
|
|
|
self.dimensions: List[Dimension] = []
|
|
|
|
|
2019-08-23 21:10:35 +00:00
|
|
|
self.config = config
|
2022-08-19 13:19:43 +00:00
|
|
|
self.min_date: datetime
|
|
|
|
self.max_date: datetime
|
2019-08-23 21:10:35 +00:00
|
|
|
|
2019-09-18 19:57:17 +00:00
|
|
|
self.backtesting = Backtesting(self.config)
|
2022-01-07 08:06:29 +00:00
|
|
|
self.pairlist = self.backtesting.pairlists.whitelist
|
2022-05-22 17:32:32 +00:00
|
|
|
self.custom_hyperopt: HyperOptAuto
|
2022-08-19 13:19:43 +00:00
|
|
|
self.analyze_per_epoch = self.config.get('analyze_per_epoch', False)
|
|
|
|
HyperoptStateContainer.set_state(HyperoptState.STARTUP)
|
2019-09-18 19:57:17 +00:00
|
|
|
|
2021-03-24 08:32:34 +00:00
|
|
|
if not self.config.get('hyperopt'):
|
2021-03-23 08:02:32 +00:00
|
|
|
self.custom_hyperopt = HyperOptAuto(self.config)
|
|
|
|
else:
|
2021-09-11 07:06:57 +00:00
|
|
|
raise OperationalException(
|
2021-09-11 15:11:02 +00:00
|
|
|
"Using separate Hyperopt files has been removed in 2021.9. Please convert "
|
2021-09-11 07:06:57 +00:00
|
|
|
"your existing Hyperopt file to the new Hyperoptable strategy interface")
|
2021-06-30 05:05:20 +00:00
|
|
|
|
2021-05-13 08:49:12 +00:00
|
|
|
self.backtesting._set_strategy(self.backtesting.strategylist[0])
|
2021-03-23 08:02:32 +00:00
|
|
|
self.custom_hyperopt.strategy = self.backtesting.strategy
|
2019-12-05 19:31:02 +00:00
|
|
|
|
2022-07-16 09:05:58 +00:00
|
|
|
self.hyperopt_pickle_magic(self.backtesting.strategy.__class__.__bases__)
|
2022-05-22 17:32:32 +00:00
|
|
|
self.custom_hyperoptloss: IHyperOptLoss = HyperOptLossResolver.load_hyperoptloss(
|
|
|
|
self.config)
|
2019-07-16 04:27:23 +00:00
|
|
|
self.calculate_loss = self.custom_hyperoptloss.hyperopt_loss_function
|
2020-09-27 14:33:26 +00:00
|
|
|
time_now = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
2021-03-05 18:56:11 +00:00
|
|
|
strategy = str(self.config['strategy'])
|
2021-04-28 20:33:58 +00:00
|
|
|
self.results_file: Path = (self.config['user_data_dir'] / 'hyperopt_results' /
|
2021-05-12 17:04:32 +00:00
|
|
|
f'strategy_{strategy}_{time_now}.fthypt')
|
2020-03-08 10:35:31 +00:00
|
|
|
self.data_pickle_file = (self.config['user_data_dir'] /
|
2020-03-13 01:04:23 +00:00
|
|
|
'hyperopt_results' / 'hyperopt_tickerdata.pkl')
|
2019-07-30 08:47:28 +00:00
|
|
|
self.total_epochs = config.get('epochs', 0)
|
2019-08-01 17:33:45 +00:00
|
|
|
|
2018-03-02 13:46:32 +00:00
|
|
|
self.current_best_loss = 100
|
|
|
|
|
2020-09-27 14:18:28 +00:00
|
|
|
self.clean_hyperopt()
|
2019-07-16 03:50:27 +00:00
|
|
|
|
2020-04-28 19:56:19 +00:00
|
|
|
self.num_epochs_saved = 0
|
2021-05-12 03:58:25 +00:00
|
|
|
self.current_best_epoch: Optional[Dict[str, Any]] = None
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2019-08-02 19:22:58 +00:00
|
|
|
# Use max_open_trades for hyperopt as well, except --disable-max-market-positions is set
|
2019-07-15 18:28:55 +00:00
|
|
|
if self.config.get('use_max_market_positions', True):
|
|
|
|
self.max_open_trades = self.config['max_open_trades']
|
|
|
|
else:
|
|
|
|
logger.debug('Ignoring max_open_trades (--disable-max-market-positions was used) ...')
|
|
|
|
self.max_open_trades = 0
|
2019-09-25 00:41:22 +00:00
|
|
|
self.position_stacking = self.config.get('position_stacking', False)
|
2019-07-15 18:28:55 +00:00
|
|
|
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'sell'):
|
2022-04-05 18:07:58 +00:00
|
|
|
# Make sure use_exit_signal is enabled
|
|
|
|
self.config['use_exit_signal'] = True
|
2019-08-01 20:57:26 +00:00
|
|
|
|
2019-11-26 12:01:42 +00:00
|
|
|
self.print_all = self.config.get('print_all', False)
|
2020-02-28 20:54:04 +00:00
|
|
|
self.hyperopt_table_header = 0
|
2019-11-26 12:01:42 +00:00
|
|
|
self.print_colorized = self.config.get('print_colorized', False)
|
|
|
|
self.print_json = self.config.get('print_json', False)
|
|
|
|
|
2019-07-21 14:07:06 +00:00
|
|
|
@staticmethod
|
2020-02-02 04:00:40 +00:00
|
|
|
def get_lock_filename(config: Dict[str, Any]) -> str:
|
2019-07-21 14:07:06 +00:00
|
|
|
|
|
|
|
return str(config['user_data_dir'] / 'hyperopt.lock')
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def clean_hyperopt(self) -> None:
|
2019-07-15 18:17:15 +00:00
|
|
|
"""
|
|
|
|
Remove hyperopt pickle files to restart hyperopt.
|
|
|
|
"""
|
2020-04-28 19:56:19 +00:00
|
|
|
for f in [self.data_pickle_file, self.results_file]:
|
2019-07-15 18:17:15 +00:00
|
|
|
p = Path(f)
|
|
|
|
if p.is_file():
|
|
|
|
logger.info(f"Removing `{p}`.")
|
|
|
|
p.unlink()
|
|
|
|
|
2022-07-16 09:05:58 +00:00
|
|
|
def hyperopt_pickle_magic(self, bases) -> None:
|
|
|
|
"""
|
|
|
|
Hyperopt magic to allow strategy inheritance across files.
|
|
|
|
For this to properly work, we need to register the module of the imported class
|
|
|
|
to pickle as value.
|
|
|
|
"""
|
|
|
|
for modules in bases:
|
|
|
|
if modules.__name__ != 'IStrategy':
|
|
|
|
cloudpickle.register_pickle_by_value(sys.modules[modules.__module__])
|
|
|
|
self.hyperopt_pickle_magic(modules.__bases__)
|
|
|
|
|
2021-05-02 07:35:10 +00:00
|
|
|
def _get_params_dict(self, dimensions: List[Dimension], raw_params: List[Any]) -> Dict:
|
2019-09-16 18:22:07 +00:00
|
|
|
|
2018-06-19 06:09:54 +00:00
|
|
|
# Ensure the number of dimensions match
|
2019-11-26 12:01:42 +00:00
|
|
|
# the number of parameters in the list.
|
|
|
|
if len(raw_params) != len(dimensions):
|
|
|
|
raise ValueError('Mismatch in number of search-space dimensions.')
|
2018-06-19 06:09:54 +00:00
|
|
|
|
2019-11-26 12:01:42 +00:00
|
|
|
# Return a dict where the keys are the names of the dimensions
|
|
|
|
# and the values are taken from the list of parameters.
|
|
|
|
return {d.name: v for d, v in zip(dimensions, raw_params)}
|
2018-06-19 06:09:54 +00:00
|
|
|
|
2021-05-12 03:58:25 +00:00
|
|
|
def _save_result(self, epoch: Dict) -> None:
|
2018-03-02 13:46:32 +00:00
|
|
|
"""
|
2020-04-28 19:56:19 +00:00
|
|
|
Save hyperopt results to file
|
2021-05-12 03:58:25 +00:00
|
|
|
Store one line per epoch.
|
|
|
|
While not a valid json object - this allows appending easily.
|
|
|
|
:param epoch: result dictionary for this epoch.
|
2018-03-02 13:46:32 +00:00
|
|
|
"""
|
2021-06-15 18:27:46 +00:00
|
|
|
epoch[FTHYPT_FILEVERSION] = 2
|
2021-05-12 03:58:25 +00:00
|
|
|
with self.results_file.open('a') as f:
|
2021-06-30 17:48:34 +00:00
|
|
|
rapidjson.dump(epoch, f, default=hyperopt_serializer,
|
2021-05-24 05:48:36 +00:00
|
|
|
number_mode=rapidjson.NM_NATIVE | rapidjson.NM_NAN)
|
2021-05-15 05:01:32 +00:00
|
|
|
f.write("\n")
|
2021-05-12 03:58:25 +00:00
|
|
|
|
|
|
|
self.num_epochs_saved += 1
|
|
|
|
logger.debug(f"{self.num_epochs_saved} {plural(self.num_epochs_saved, 'epoch')} "
|
|
|
|
f"saved to '{self.results_file}'.")
|
|
|
|
# Store hyperopt filename
|
|
|
|
latest_filename = Path.joinpath(self.results_file.parent, LAST_BT_RESULT_FN)
|
|
|
|
file_dump_json(latest_filename, {'latest_hyperopt': str(self.results_file.name)},
|
|
|
|
log=False)
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2019-11-26 12:01:42 +00:00
|
|
|
def _get_params_details(self, params: Dict) -> Dict:
|
2018-03-02 13:46:32 +00:00
|
|
|
"""
|
2019-11-26 12:01:42 +00:00
|
|
|
Return the params for each space
|
2018-03-02 13:46:32 +00:00
|
|
|
"""
|
2019-11-26 12:01:42 +00:00
|
|
|
result: Dict = {}
|
2019-11-23 08:32:33 +00:00
|
|
|
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'buy'):
|
2021-05-02 07:35:10 +00:00
|
|
|
result['buy'] = {p.name: params.get(p.name) for p in self.buy_space}
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'sell'):
|
2021-05-02 07:35:10 +00:00
|
|
|
result['sell'] = {p.name: params.get(p.name) for p in self.sell_space}
|
2021-08-03 05:10:04 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'protection'):
|
|
|
|
result['protection'] = {p.name: params.get(p.name) for p in self.protection_space}
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'roi'):
|
2021-05-17 18:58:50 +00:00
|
|
|
result['roi'] = {str(k): v for k, v in
|
|
|
|
self.custom_hyperopt.generate_roi_table(params).items()}
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'stoploss'):
|
2021-05-02 07:35:10 +00:00
|
|
|
result['stoploss'] = {p.name: params.get(p.name) for p in self.stoploss_space}
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'trailing'):
|
2019-12-10 00:13:45 +00:00
|
|
|
result['trailing'] = self.custom_hyperopt.generate_trailing_params(params)
|
2019-08-15 18:39:04 +00:00
|
|
|
|
2019-11-26 12:01:42 +00:00
|
|
|
return result
|
2019-11-23 08:32:33 +00:00
|
|
|
|
2021-06-14 18:45:06 +00:00
|
|
|
def _get_no_optimize_details(self) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Get non-optimized parameters
|
|
|
|
"""
|
|
|
|
result: Dict[str, Any] = {}
|
|
|
|
strategy = self.backtesting.strategy
|
|
|
|
if not HyperoptTools.has_space(self.config, 'roi'):
|
2021-06-15 18:14:31 +00:00
|
|
|
result['roi'] = {str(k): v for k, v in strategy.minimal_roi.items()}
|
2021-06-14 18:45:06 +00:00
|
|
|
if not HyperoptTools.has_space(self.config, 'stoploss'):
|
2021-06-15 18:14:31 +00:00
|
|
|
result['stoploss'] = {'stoploss': strategy.stoploss}
|
2021-06-14 18:45:06 +00:00
|
|
|
if not HyperoptTools.has_space(self.config, 'trailing'):
|
|
|
|
result['trailing'] = {
|
|
|
|
'trailing_stop': strategy.trailing_stop,
|
|
|
|
'trailing_stop_positive': strategy.trailing_stop_positive,
|
|
|
|
'trailing_stop_positive_offset': strategy.trailing_stop_positive_offset,
|
|
|
|
'trailing_only_offset_is_reached': strategy.trailing_only_offset_is_reached,
|
|
|
|
}
|
|
|
|
return result
|
|
|
|
|
2019-11-26 12:01:42 +00:00
|
|
|
def print_results(self, results) -> None:
|
2018-03-02 13:46:32 +00:00
|
|
|
"""
|
|
|
|
Log results if it is better than any previous evaluation
|
2021-03-17 19:43:51 +00:00
|
|
|
TODO: this should be moved to HyperoptTools too
|
2018-03-02 13:46:32 +00:00
|
|
|
"""
|
2019-11-26 12:01:42 +00:00
|
|
|
is_best = results['is_best']
|
2019-11-23 08:32:33 +00:00
|
|
|
|
2019-11-26 12:01:42 +00:00
|
|
|
if self.print_all or is_best:
|
2020-04-09 09:42:13 +00:00
|
|
|
print(
|
2021-03-17 19:43:51 +00:00
|
|
|
HyperoptTools.get_result_table(
|
2020-03-11 21:30:36 +00:00
|
|
|
self.config, results, self.total_epochs,
|
|
|
|
self.print_all, self.print_colorized,
|
|
|
|
self.hyperopt_table_header
|
|
|
|
)
|
|
|
|
)
|
2020-02-29 22:24:08 +00:00
|
|
|
self.hyperopt_table_header = 2
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2021-05-02 07:35:10 +00:00
|
|
|
def init_spaces(self):
|
2018-03-02 13:46:32 +00:00
|
|
|
"""
|
2021-05-02 07:35:10 +00:00
|
|
|
Assign the dimensions in the hyperoptimization space.
|
2018-03-02 13:46:32 +00:00
|
|
|
"""
|
2021-09-11 07:06:57 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'protection'):
|
2021-08-03 05:10:04 +00:00
|
|
|
# Protections can only be optimized when using the Parameter interface
|
|
|
|
logger.debug("Hyperopt has 'protection' space")
|
2021-08-04 05:14:47 +00:00
|
|
|
# Enable Protections if protection space is selected.
|
|
|
|
self.config['enable_protections'] = True
|
2021-10-13 17:54:35 +00:00
|
|
|
self.protection_space = self.custom_hyperopt.protection_space()
|
2019-11-07 22:55:14 +00:00
|
|
|
|
2021-05-02 07:35:10 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'buy'):
|
2019-08-01 20:57:26 +00:00
|
|
|
logger.debug("Hyperopt has 'buy' space")
|
2021-10-13 17:54:35 +00:00
|
|
|
self.buy_space = self.custom_hyperopt.buy_indicator_space()
|
2019-11-07 22:55:14 +00:00
|
|
|
|
2021-05-02 07:35:10 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'sell'):
|
2019-08-01 20:57:26 +00:00
|
|
|
logger.debug("Hyperopt has 'sell' space")
|
2021-10-13 17:54:35 +00:00
|
|
|
self.sell_space = self.custom_hyperopt.sell_indicator_space()
|
2019-11-07 22:55:14 +00:00
|
|
|
|
2021-05-02 07:35:10 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'roi'):
|
2019-08-01 20:57:26 +00:00
|
|
|
logger.debug("Hyperopt has 'roi' space")
|
2021-10-13 17:54:35 +00:00
|
|
|
self.roi_space = self.custom_hyperopt.roi_space()
|
2019-11-07 22:55:14 +00:00
|
|
|
|
2021-05-02 07:35:10 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'stoploss'):
|
2019-08-01 20:57:26 +00:00
|
|
|
logger.debug("Hyperopt has 'stoploss' space")
|
2021-10-13 17:54:35 +00:00
|
|
|
self.stoploss_space = self.custom_hyperopt.stoploss_space()
|
2019-11-07 22:55:14 +00:00
|
|
|
|
2021-05-02 07:35:10 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'trailing'):
|
2019-11-07 22:55:14 +00:00
|
|
|
logger.debug("Hyperopt has 'trailing' space")
|
2021-10-13 17:54:35 +00:00
|
|
|
self.trailing_space = self.custom_hyperopt.trailing_space()
|
|
|
|
|
2021-08-03 05:10:04 +00:00
|
|
|
self.dimensions = (self.buy_space + self.sell_space + self.protection_space
|
|
|
|
+ self.roi_space + self.stoploss_space + self.trailing_space)
|
2017-12-26 08:08:10 +00:00
|
|
|
|
2021-09-11 07:06:57 +00:00
|
|
|
def assign_params(self, params_dict: Dict, category: str) -> None:
|
|
|
|
"""
|
|
|
|
Assign hyperoptable parameters
|
|
|
|
"""
|
|
|
|
for attr_name, attr in self.backtesting.strategy.enumerate_parameters(category):
|
|
|
|
if attr.optimize:
|
|
|
|
# noinspection PyProtectedMember
|
|
|
|
attr.value = params_dict[attr_name]
|
|
|
|
|
2022-09-11 09:56:17 +00:00
|
|
|
def generate_optimizer(self, raw_params: List[Any]) -> Dict[str, Any]:
|
2019-07-15 18:28:55 +00:00
|
|
|
"""
|
2021-08-02 19:12:10 +00:00
|
|
|
Used Optimize function.
|
|
|
|
Called once per epoch to optimize whatever is configured.
|
2019-07-15 18:28:55 +00:00
|
|
|
Keep this function as optimized as possible!
|
|
|
|
"""
|
2022-08-19 13:19:43 +00:00
|
|
|
HyperoptStateContainer.set_state(HyperoptState.OPTIMIZE)
|
2021-04-28 20:33:58 +00:00
|
|
|
backtest_start_time = datetime.now(timezone.utc)
|
2021-05-02 07:35:10 +00:00
|
|
|
params_dict = self._get_params_dict(self.dimensions, raw_params)
|
2019-09-23 08:59:34 +00:00
|
|
|
|
2021-05-02 07:35:10 +00:00
|
|
|
# Apply parameters
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'buy'):
|
2021-09-11 07:06:57 +00:00
|
|
|
self.assign_params(params_dict, 'buy')
|
2018-03-04 08:51:22 +00:00
|
|
|
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'sell'):
|
2021-09-11 07:06:57 +00:00
|
|
|
self.assign_params(params_dict, 'sell')
|
2019-01-06 09:16:30 +00:00
|
|
|
|
2021-08-03 05:10:04 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'protection'):
|
2021-09-11 07:06:57 +00:00
|
|
|
self.assign_params(params_dict, 'protection')
|
2021-08-03 05:10:04 +00:00
|
|
|
|
2021-08-02 19:12:10 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'roi'):
|
2022-04-23 09:31:12 +00:00
|
|
|
self.backtesting.strategy.minimal_roi = (
|
2021-08-02 19:12:10 +00:00
|
|
|
self.custom_hyperopt.generate_roi_table(params_dict))
|
|
|
|
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'stoploss'):
|
2019-11-26 12:01:42 +00:00
|
|
|
self.backtesting.strategy.stoploss = params_dict['stoploss']
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2021-05-01 14:36:35 +00:00
|
|
|
if HyperoptTools.has_space(self.config, 'trailing'):
|
2019-12-10 00:13:45 +00:00
|
|
|
d = self.custom_hyperopt.generate_trailing_params(params_dict)
|
|
|
|
self.backtesting.strategy.trailing_stop = d['trailing_stop']
|
|
|
|
self.backtesting.strategy.trailing_stop_positive = d['trailing_stop_positive']
|
2019-11-07 22:55:14 +00:00
|
|
|
self.backtesting.strategy.trailing_stop_positive_offset = \
|
2019-12-10 00:13:45 +00:00
|
|
|
d['trailing_stop_positive_offset']
|
2019-11-07 22:55:14 +00:00
|
|
|
self.backtesting.strategy.trailing_only_offset_is_reached = \
|
2019-12-10 00:13:45 +00:00
|
|
|
d['trailing_only_offset_is_reached']
|
2019-11-07 22:55:14 +00:00
|
|
|
|
2021-05-18 18:39:55 +00:00
|
|
|
with self.data_pickle_file.open('rb') as f:
|
|
|
|
processed = load(f, mmap_mode='r')
|
2022-08-19 13:19:43 +00:00
|
|
|
if self.analyze_per_epoch:
|
|
|
|
# Data is not yet analyzed, rerun populate_indicators.
|
|
|
|
processed = self.advise_and_trim(processed)
|
|
|
|
|
2021-04-30 05:31:57 +00:00
|
|
|
bt_results = self.backtesting.backtest(
|
2020-01-31 21:37:05 +00:00
|
|
|
processed=processed,
|
2021-05-06 17:34:10 +00:00
|
|
|
start_date=self.min_date,
|
|
|
|
end_date=self.max_date,
|
2020-01-31 21:37:05 +00:00
|
|
|
max_open_trades=self.max_open_trades,
|
|
|
|
position_stacking=self.position_stacking,
|
2020-11-23 19:29:29 +00:00
|
|
|
enable_protections=self.config.get('enable_protections', False),
|
2018-03-02 13:46:32 +00:00
|
|
|
)
|
2021-04-28 20:33:58 +00:00
|
|
|
backtest_end_time = datetime.now(timezone.utc)
|
2021-04-30 05:31:57 +00:00
|
|
|
bt_results.update({
|
2021-04-28 20:33:58 +00:00
|
|
|
'backtest_start_time': int(backtest_start_time.timestamp()),
|
|
|
|
'backtest_end_time': int(backtest_end_time.timestamp()),
|
2021-04-30 05:31:57 +00:00
|
|
|
})
|
|
|
|
|
2021-05-01 11:33:12 +00:00
|
|
|
return self._get_results_dict(bt_results, self.min_date, self.max_date,
|
2021-05-02 09:01:26 +00:00
|
|
|
params_dict,
|
2021-02-16 09:11:33 +00:00
|
|
|
processed=processed)
|
2019-11-27 19:52:43 +00:00
|
|
|
|
|
|
|
def _get_results_dict(self, backtesting_results, min_date, max_date,
|
2021-05-02 09:01:26 +00:00
|
|
|
params_dict, processed: Dict[str, DataFrame]
|
2021-05-02 07:35:10 +00:00
|
|
|
) -> Dict[str, Any]:
|
2021-05-02 09:01:26 +00:00
|
|
|
params_details = self._get_params_details(params_dict)
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2021-05-01 11:32:34 +00:00
|
|
|
strat_stats = generate_strategy_stats(
|
2022-01-07 08:06:29 +00:00
|
|
|
self.pairlist, self.backtesting.strategy.get_strategy_name(),
|
2021-05-01 11:32:34 +00:00
|
|
|
backtesting_results, min_date, max_date, market_change=0
|
|
|
|
)
|
2021-05-01 07:05:46 +00:00
|
|
|
results_explanation = HyperoptTools.format_results_explanation_string(
|
2021-04-29 18:20:26 +00:00
|
|
|
strat_stats, self.config['stake_currency'])
|
2021-04-28 20:33:58 +00:00
|
|
|
|
2021-06-14 18:42:54 +00:00
|
|
|
not_optimized = self.backtesting.strategy.get_no_optimize_params()
|
2021-06-14 18:45:06 +00:00
|
|
|
not_optimized = deep_merge_dicts(not_optimized, self._get_no_optimize_details())
|
2021-05-02 08:46:04 +00:00
|
|
|
|
2021-04-28 20:33:58 +00:00
|
|
|
trade_count = strat_stats['total_trades']
|
|
|
|
total_profit = strat_stats['profit_total']
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2019-05-12 18:14:00 +00:00
|
|
|
# If this evaluation contains too short amount of trades to be
|
|
|
|
# interesting -- consider it as 'bad' (assigned max. loss value)
|
2019-05-01 12:27:58 +00:00
|
|
|
# in order to cast this hyperspace point away from optimization
|
|
|
|
# path. We do not want to optimize 'hodl' strategies.
|
2019-11-26 12:01:42 +00:00
|
|
|
loss: float = MAX_LOSS
|
|
|
|
if trade_count >= self.config['hyperopt_min_trades']:
|
2021-04-28 20:33:58 +00:00
|
|
|
loss = self.calculate_loss(results=backtesting_results['results'],
|
|
|
|
trade_count=trade_count,
|
2021-05-06 17:34:10 +00:00
|
|
|
min_date=min_date, max_date=max_date,
|
2021-07-04 08:15:19 +00:00
|
|
|
config=self.config, processed=processed,
|
|
|
|
backtest_stats=strat_stats)
|
2018-06-19 18:57:42 +00:00
|
|
|
return {
|
|
|
|
'loss': loss,
|
2019-11-26 12:01:42 +00:00
|
|
|
'params_dict': params_dict,
|
|
|
|
'params_details': params_details,
|
2021-05-02 08:46:04 +00:00
|
|
|
'params_not_optimized': not_optimized,
|
2021-04-28 20:33:58 +00:00
|
|
|
'results_metrics': strat_stats,
|
2019-07-30 08:47:28 +00:00
|
|
|
'results_explanation': results_explanation,
|
2019-08-03 16:09:42 +00:00
|
|
|
'total_profit': total_profit,
|
2018-06-19 18:57:42 +00:00
|
|
|
}
|
2017-11-25 00:04:11 +00:00
|
|
|
|
2019-11-26 12:01:42 +00:00
|
|
|
def get_optimizer(self, dimensions: List[Dimension], cpu_count) -> Optimizer:
|
2022-01-25 11:43:40 +00:00
|
|
|
estimator = self.custom_hyperopt.generate_estimator(dimensions=dimensions)
|
2021-09-16 05:13:25 +00:00
|
|
|
|
|
|
|
acq_optimizer = "sampling"
|
|
|
|
if isinstance(estimator, str):
|
|
|
|
if estimator not in ("GP", "RF", "ET", "GBRT"):
|
|
|
|
raise OperationalException(f"Estimator {estimator} not supported.")
|
|
|
|
else:
|
|
|
|
acq_optimizer = "auto"
|
|
|
|
|
2021-09-15 19:36:53 +00:00
|
|
|
logger.info(f"Using estimator {estimator}.")
|
2018-06-24 12:27:53 +00:00
|
|
|
return Optimizer(
|
2019-09-16 18:22:07 +00:00
|
|
|
dimensions,
|
2021-09-15 19:36:53 +00:00
|
|
|
base_estimator=estimator,
|
2021-09-16 05:13:25 +00:00
|
|
|
acq_optimizer=acq_optimizer,
|
2019-05-10 07:54:44 +00:00
|
|
|
n_initial_points=INITIAL_POINTS,
|
2019-04-23 18:18:52 +00:00
|
|
|
acq_optimizer_kwargs={'n_jobs': cpu_count},
|
2019-12-12 00:12:28 +00:00
|
|
|
random_state=self.random_state,
|
2020-04-29 07:49:25 +00:00
|
|
|
model_queue_size=SKOPT_MODEL_QUEUE_SIZE,
|
2018-06-24 12:27:53 +00:00
|
|
|
)
|
|
|
|
|
2022-09-11 09:54:31 +00:00
|
|
|
def run_optimizer_parallel(
|
2022-09-11 09:56:17 +00:00
|
|
|
self, parallel: Parallel, asked: List[List]) -> List[Dict[str, Any]]:
|
2022-09-11 09:54:31 +00:00
|
|
|
""" Start optimizer in a parallel way """
|
2018-11-20 16:43:49 +00:00
|
|
|
return parallel(delayed(
|
2022-09-11 09:56:17 +00:00
|
|
|
wrap_non_picklable_objects(self.generate_optimizer))(v) for v in asked)
|
2018-06-24 12:27:53 +00:00
|
|
|
|
2019-12-12 00:12:28 +00:00
|
|
|
def _set_random_state(self, random_state: Optional[int]) -> int:
|
2019-12-14 12:17:45 +00:00
|
|
|
return random_state or random.randint(1, 2**16 - 1)
|
2019-12-12 00:12:28 +00:00
|
|
|
|
2022-08-19 13:12:55 +00:00
|
|
|
def advise_and_trim(self, data: Dict[str, DataFrame]) -> Dict[str, DataFrame]:
|
|
|
|
preprocessed = self.backtesting.strategy.advise_all_indicators(data)
|
|
|
|
|
|
|
|
# Trim startup period from analyzed dataframe to get correct dates for output.
|
2022-09-06 04:52:50 +00:00
|
|
|
trimmed = trim_dataframes(preprocessed, self.timerange, self.backtesting.required_startup)
|
|
|
|
self.min_date, self.max_date = get_timerange(trimmed)
|
|
|
|
# Real trimming will happen as part of backtesting.
|
2022-09-05 16:12:19 +00:00
|
|
|
return preprocessed
|
2022-08-19 13:12:55 +00:00
|
|
|
|
2021-05-12 19:15:01 +00:00
|
|
|
def prepare_hyperopt_data(self) -> None:
|
2022-08-19 13:19:43 +00:00
|
|
|
HyperoptStateContainer.set_state(HyperoptState.DATALOAD)
|
|
|
|
data, self.timerange = self.backtesting.load_bt_data()
|
2022-03-02 18:50:16 +00:00
|
|
|
self.backtesting.load_bt_data_detail()
|
2021-04-23 17:22:41 +00:00
|
|
|
logger.info("Dataload complete. Calculating indicators")
|
2020-04-07 08:44:18 +00:00
|
|
|
|
2022-08-19 13:19:43 +00:00
|
|
|
if not self.analyze_per_epoch:
|
|
|
|
HyperoptStateContainer.set_state(HyperoptState.INDICATORS)
|
2021-05-21 06:26:19 +00:00
|
|
|
|
2022-08-19 13:19:43 +00:00
|
|
|
preprocessed = self.advise_and_trim(data)
|
2019-06-15 11:46:19 +00:00
|
|
|
|
2022-08-19 13:19:43 +00:00
|
|
|
logger.info(f'Hyperopting with data from '
|
|
|
|
f'{self.min_date.strftime(DATETIME_PRINT_FORMAT)} '
|
|
|
|
f'up to {self.max_date.strftime(DATETIME_PRINT_FORMAT)} '
|
|
|
|
f'({(self.max_date - self.min_date).days} days)..')
|
|
|
|
# Store non-trimmed data - will be trimmed after signal generation.
|
|
|
|
dump(preprocessed, self.data_pickle_file)
|
|
|
|
else:
|
|
|
|
dump(data, self.data_pickle_file)
|
2019-04-22 18:24:45 +00:00
|
|
|
|
2022-03-29 23:29:14 +00:00
|
|
|
def get_asked_points(self, n_points: int) -> Tuple[List[List[Any]], List[bool]]:
|
2022-04-23 07:44:04 +00:00
|
|
|
"""
|
2022-03-20 16:02:03 +00:00
|
|
|
Enforce points returned from `self.opt.ask` have not been already evaluated
|
|
|
|
|
|
|
|
Steps:
|
|
|
|
1. Try to get points using `self.opt.ask` first
|
|
|
|
2. Discard the points that have already been evaluated
|
|
|
|
3. Retry using `self.opt.ask` up to 3 times
|
|
|
|
4. If still some points are missing in respect to `n_points`, random sample some points
|
|
|
|
5. Repeat until at least `n_points` points in the `asked_non_tried` list
|
2022-03-20 16:03:07 +00:00
|
|
|
6. Return a list with length truncated at `n_points`
|
2022-04-23 07:44:04 +00:00
|
|
|
"""
|
2022-04-08 10:44:42 +00:00
|
|
|
def unique_list(a_list):
|
2022-04-13 08:36:46 +00:00
|
|
|
new_list = []
|
|
|
|
for item in a_list:
|
|
|
|
if item not in new_list:
|
|
|
|
new_list.append(item)
|
|
|
|
return new_list
|
2022-03-20 16:02:03 +00:00
|
|
|
i = 0
|
|
|
|
asked_non_tried: List[List[Any]] = []
|
2022-06-09 19:06:23 +00:00
|
|
|
is_random_non_tried: List[bool] = []
|
2022-03-29 18:33:35 +00:00
|
|
|
while i < 5 and len(asked_non_tried) < n_points:
|
2022-03-20 16:06:41 +00:00
|
|
|
if i < 3:
|
2022-03-21 11:36:53 +00:00
|
|
|
self.opt.cache_ = {}
|
2022-04-08 10:44:42 +00:00
|
|
|
asked = unique_list(self.opt.ask(n_points=n_points * 5))
|
2022-03-29 23:29:14 +00:00
|
|
|
is_random = [False for _ in range(len(asked))]
|
2022-03-20 16:02:03 +00:00
|
|
|
else:
|
2022-04-08 10:44:42 +00:00
|
|
|
asked = unique_list(self.opt.space.rvs(n_samples=n_points * 5))
|
2022-03-29 23:29:14 +00:00
|
|
|
is_random = [True for _ in range(len(asked))]
|
2022-06-09 19:06:23 +00:00
|
|
|
is_random_non_tried += [rand for x, rand in zip(asked, is_random)
|
|
|
|
if x not in self.opt.Xi
|
|
|
|
and x not in asked_non_tried]
|
2022-04-14 13:15:11 +00:00
|
|
|
asked_non_tried += [x for x in asked
|
|
|
|
if x not in self.opt.Xi
|
|
|
|
and x not in asked_non_tried]
|
2022-03-20 16:06:41 +00:00
|
|
|
i += 1
|
2022-03-29 23:29:14 +00:00
|
|
|
|
2022-03-20 16:08:38 +00:00
|
|
|
if asked_non_tried:
|
2022-03-29 23:29:14 +00:00
|
|
|
return (
|
|
|
|
asked_non_tried[:min(len(asked_non_tried), n_points)],
|
2022-06-09 19:06:23 +00:00
|
|
|
is_random_non_tried[:min(len(asked_non_tried), n_points)]
|
2022-03-29 23:29:14 +00:00
|
|
|
)
|
2022-03-20 16:08:38 +00:00
|
|
|
else:
|
2022-03-29 23:29:14 +00:00
|
|
|
return self.opt.ask(n_points=n_points), [False for _ in range(n_points)]
|
2022-03-20 16:02:03 +00:00
|
|
|
|
2022-09-11 09:38:36 +00:00
|
|
|
def get_progressbar_widgets(self):
|
|
|
|
if self.print_colorized:
|
|
|
|
widgets = [
|
|
|
|
' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
|
|
|
|
' (', progressbar.Percentage(), ')] ',
|
|
|
|
progressbar.Bar(marker=progressbar.AnimatedMarker(
|
|
|
|
fill='\N{FULL BLOCK}',
|
|
|
|
fill_wrap=Fore.GREEN + '{}' + Fore.RESET,
|
|
|
|
marker_wrap=Style.BRIGHT + '{}' + Style.RESET_ALL,
|
|
|
|
)),
|
|
|
|
' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
|
|
|
|
]
|
|
|
|
else:
|
|
|
|
widgets = [
|
|
|
|
' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
|
|
|
|
' (', progressbar.Percentage(), ')] ',
|
|
|
|
progressbar.Bar(marker=progressbar.AnimatedMarker(
|
|
|
|
fill='\N{FULL BLOCK}',
|
|
|
|
)),
|
|
|
|
' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
|
|
|
|
]
|
|
|
|
return widgets
|
|
|
|
|
2022-09-11 09:54:31 +00:00
|
|
|
def evaluate_result(self, val: Dict[str, Any], current: int, is_random: bool):
|
|
|
|
"""
|
|
|
|
Evaluate results returned from generate_optimizer
|
|
|
|
"""
|
|
|
|
val['current_epoch'] = current
|
|
|
|
val['is_initial_point'] = current <= INITIAL_POINTS
|
|
|
|
|
|
|
|
logger.debug("Optimizer epoch evaluated: %s", val)
|
|
|
|
|
|
|
|
is_best = HyperoptTools.is_best_loss(val, self.current_best_loss)
|
|
|
|
# This value is assigned here and not in the optimization method
|
|
|
|
# to keep proper order in the list of results. That's because
|
|
|
|
# evaluations can take different time. Here they are aligned in the
|
|
|
|
# order they will be shown to the user.
|
|
|
|
val['is_best'] = is_best
|
|
|
|
val['is_random'] = is_random
|
|
|
|
self.print_results(val)
|
|
|
|
|
|
|
|
if is_best:
|
|
|
|
self.current_best_loss = val['loss']
|
|
|
|
self.current_best_epoch = val
|
|
|
|
|
|
|
|
self._save_result(val)
|
|
|
|
|
2021-05-12 19:15:01 +00:00
|
|
|
def start(self) -> None:
|
2022-05-17 22:11:10 +00:00
|
|
|
self.random_state = self._set_random_state(self.config.get('hyperopt_random_state'))
|
2021-05-12 19:15:01 +00:00
|
|
|
logger.info(f"Using optimizer random state: {self.random_state}")
|
|
|
|
self.hyperopt_table_header = -1
|
|
|
|
# Initialize spaces ...
|
|
|
|
self.init_spaces()
|
|
|
|
|
|
|
|
self.prepare_hyperopt_data()
|
|
|
|
|
2019-04-22 18:24:45 +00:00
|
|
|
# We don't need exchange instance anymore while running hyperopt
|
2021-02-06 09:22:59 +00:00
|
|
|
self.backtesting.exchange.close()
|
2022-04-23 09:31:12 +00:00
|
|
|
self.backtesting.exchange._api = None
|
|
|
|
self.backtesting.exchange._api_async = None
|
2021-12-31 16:35:08 +00:00
|
|
|
self.backtesting.exchange.loop = None # type: ignore
|
2022-04-28 04:59:03 +00:00
|
|
|
self.backtesting.exchange._loop_lock = None # type: ignore
|
2022-08-16 17:43:41 +00:00
|
|
|
self.backtesting.exchange._cache_lock = None # type: ignore
|
2021-01-30 06:15:04 +00:00
|
|
|
# self.backtesting.exchange = None # type: ignore
|
2020-04-25 13:46:20 +00:00
|
|
|
self.backtesting.pairlists = None # type: ignore
|
2019-04-22 18:24:45 +00:00
|
|
|
|
2019-04-23 18:25:36 +00:00
|
|
|
cpus = cpu_count()
|
2019-08-25 18:38:51 +00:00
|
|
|
logger.info(f"Found {cpus} CPU cores. Let's make them scream!")
|
2019-04-22 21:30:09 +00:00
|
|
|
config_jobs = self.config.get('hyperopt_jobs', -1)
|
|
|
|
logger.info(f'Number of parallel jobs set as: {config_jobs}')
|
2018-06-21 11:59:36 +00:00
|
|
|
|
2019-09-16 18:22:07 +00:00
|
|
|
self.opt = self.get_optimizer(self.dimensions, config_jobs)
|
2020-06-01 07:37:10 +00:00
|
|
|
|
|
|
|
if self.print_colorized:
|
|
|
|
colorama_init(autoreset=True)
|
|
|
|
|
2018-06-22 10:02:26 +00:00
|
|
|
try:
|
2019-04-22 21:30:09 +00:00
|
|
|
with Parallel(n_jobs=config_jobs) as parallel:
|
|
|
|
jobs = parallel._effective_n_jobs()
|
|
|
|
logger.info(f'Effective number of parallel workers used: {jobs}')
|
2020-03-11 21:30:36 +00:00
|
|
|
|
|
|
|
# Define progressbar
|
2022-09-11 09:38:36 +00:00
|
|
|
widgets = self.get_progressbar_widgets()
|
2020-04-06 11:12:32 +00:00
|
|
|
with progressbar.ProgressBar(
|
2021-08-06 22:19:36 +00:00
|
|
|
max_value=self.total_epochs, redirect_stdout=False, redirect_stderr=False,
|
|
|
|
widgets=widgets
|
|
|
|
) as pbar:
|
2022-09-11 13:42:27 +00:00
|
|
|
start = 0
|
|
|
|
|
|
|
|
if self.analyze_per_epoch:
|
|
|
|
# First analysis not in parallel mode when using --analyze-per-epoch.
|
|
|
|
# This allows dataprovider to load it's informative cache.
|
|
|
|
asked, is_random = self.get_asked_points(n_points=1)
|
|
|
|
# print(asked)
|
|
|
|
f_val = self.generate_optimizer(asked[0])
|
|
|
|
self.opt.tell(asked, [f_val['loss']])
|
|
|
|
self.evaluate_result(f_val, 1, is_random[0])
|
|
|
|
pbar.update(1)
|
|
|
|
start += 1
|
|
|
|
|
|
|
|
evals = ceil((self.total_epochs - start) / jobs)
|
|
|
|
for i in range(evals):
|
2020-04-06 11:12:32 +00:00
|
|
|
# Correct the number of epochs to be processed for the last
|
|
|
|
# iteration (should not exceed self.total_epochs in total)
|
2022-09-11 13:42:27 +00:00
|
|
|
n_rest = (i + 1) * jobs - (self.total_epochs - start)
|
2020-04-06 11:12:32 +00:00
|
|
|
current_jobs = jobs - n_rest if n_rest > 0 else jobs
|
|
|
|
|
2022-03-29 23:29:14 +00:00
|
|
|
asked, is_random = self.get_asked_points(n_points=current_jobs)
|
2022-09-11 09:56:17 +00:00
|
|
|
f_val = self.run_optimizer_parallel(parallel, asked)
|
2022-03-30 08:39:07 +00:00
|
|
|
self.opt.tell(asked, [v['loss'] for v in f_val])
|
2020-04-06 11:12:32 +00:00
|
|
|
|
|
|
|
# Calculate progressbar outputs
|
|
|
|
for j, val in enumerate(f_val):
|
|
|
|
# Use human-friendly indexes here (starting from 1)
|
2022-09-11 13:42:27 +00:00
|
|
|
current = i * jobs + j + 1 + start
|
2020-04-06 11:12:32 +00:00
|
|
|
|
2022-09-11 09:54:31 +00:00
|
|
|
self.evaluate_result(val, current, is_random[j])
|
2020-04-06 11:12:32 +00:00
|
|
|
|
|
|
|
pbar.update(current)
|
2020-03-10 19:30:36 +00:00
|
|
|
|
2018-06-22 10:02:26 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
print('User interrupted..')
|
2018-01-07 01:12:32 +00:00
|
|
|
|
2020-04-28 19:56:19 +00:00
|
|
|
logger.info(f"{self.num_epochs_saved} {plural(self.num_epochs_saved, 'epoch')} "
|
|
|
|
f"saved to '{self.results_file}'.")
|
2019-11-26 12:01:42 +00:00
|
|
|
|
2021-05-12 03:58:25 +00:00
|
|
|
if self.current_best_epoch:
|
2021-09-11 07:06:57 +00:00
|
|
|
HyperoptTools.try_export_params(
|
|
|
|
self.config,
|
|
|
|
self.backtesting.strategy.get_strategy_name(),
|
|
|
|
self.current_best_epoch)
|
2021-06-29 18:22:30 +00:00
|
|
|
|
2021-06-13 09:24:24 +00:00
|
|
|
HyperoptTools.show_epoch_details(self.current_best_epoch, self.total_epochs,
|
|
|
|
self.print_json)
|
2019-11-26 12:01:42 +00:00
|
|
|
else:
|
|
|
|
# This is printed when Ctrl+C is pressed quickly, before first epochs have
|
|
|
|
# a chance to be evaluated.
|
|
|
|
print("No epochs evaluated yet, no best result.")
|