2018-02-04 06:42:03 +00:00
|
|
|
"""
|
|
|
|
This module contains the configuration class
|
|
|
|
"""
|
|
|
|
import json
|
2018-03-25 19:37:14 +00:00
|
|
|
import logging
|
2018-07-04 07:31:35 +00:00
|
|
|
import os
|
2019-03-29 19:12:44 +00:00
|
|
|
import sys
|
2018-03-17 21:43:59 +00:00
|
|
|
from argparse import Namespace
|
2019-03-29 19:12:44 +00:00
|
|
|
from logging.handlers import RotatingFileHandler
|
2019-03-29 19:16:41 +00:00
|
|
|
from typing import Any, Dict, List, Optional
|
2018-07-04 07:31:35 +00:00
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
from jsonschema import Draft4Validator, validate
|
|
|
|
from jsonschema.exceptions import ValidationError, best_match
|
|
|
|
|
2018-05-02 20:49:55 +00:00
|
|
|
from freqtrade import OperationalException, constants
|
2019-04-09 09:27:35 +00:00
|
|
|
from freqtrade.exchange import is_exchange_supported, supported_exchanges
|
2019-02-19 12:14:47 +00:00
|
|
|
from freqtrade.misc import deep_merge_dicts
|
2019-03-29 19:12:44 +00:00
|
|
|
from freqtrade.state import RunMode
|
2019-02-19 12:14:47 +00:00
|
|
|
|
2019-04-09 09:27:35 +00:00
|
|
|
|
2018-07-19 18:43:41 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2018-07-18 20:52:57 +00:00
|
|
|
|
|
|
|
|
2018-07-19 19:12:27 +00:00
|
|
|
def set_loggers(log_level: int = 0) -> None:
|
|
|
|
"""
|
|
|
|
Set the logger level for Third party libs
|
|
|
|
:return: None
|
|
|
|
"""
|
|
|
|
|
|
|
|
logging.getLogger('requests').setLevel(logging.INFO if log_level <= 1 else logging.DEBUG)
|
|
|
|
logging.getLogger("urllib3").setLevel(logging.INFO if log_level <= 1 else logging.DEBUG)
|
|
|
|
logging.getLogger('ccxt.base.exchange').setLevel(
|
|
|
|
logging.INFO if log_level <= 2 else logging.DEBUG)
|
|
|
|
logging.getLogger('telegram').setLevel(logging.INFO)
|
|
|
|
|
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
class Configuration(object):
|
|
|
|
"""
|
|
|
|
Class to read and init the bot configuration
|
|
|
|
Reuse this class for the bot, backtesting, hyperopt and every script that required configuration
|
|
|
|
"""
|
2018-11-14 10:37:53 +00:00
|
|
|
|
2018-12-25 13:23:59 +00:00
|
|
|
def __init__(self, args: Namespace, runmode: RunMode = None) -> None:
|
2018-02-04 06:42:03 +00:00
|
|
|
self.args = args
|
2018-05-30 20:38:09 +00:00
|
|
|
self.config: Optional[Dict[str, Any]] = None
|
2018-12-25 13:23:59 +00:00
|
|
|
self.runmode = runmode
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2018-03-03 21:39:39 +00:00
|
|
|
def load_config(self) -> Dict[str, Any]:
|
2018-02-04 06:42:03 +00:00
|
|
|
"""
|
|
|
|
Extract information for sys.argv and load the bot configuration
|
|
|
|
:return: Configuration dictionary
|
|
|
|
"""
|
2019-02-19 12:14:47 +00:00
|
|
|
config: Dict[str, Any] = {}
|
|
|
|
# Now expecting a list of config filenames here, not a string
|
|
|
|
for path in self.args.config:
|
|
|
|
logger.info('Using config: %s ...', path)
|
|
|
|
# Merge config options, overwriting old values
|
|
|
|
config = deep_merge_dicts(self._load_config_file(path), config)
|
|
|
|
|
|
|
|
if 'internals' not in config:
|
|
|
|
config['internals'] = {}
|
|
|
|
|
|
|
|
logger.info('Validating configuration ...')
|
2019-03-14 08:01:03 +00:00
|
|
|
self._validate_config_schema(config)
|
2019-03-16 09:38:25 +00:00
|
|
|
self._validate_config_consistency(config)
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2018-03-27 16:20:15 +00:00
|
|
|
# Set strategy if not specified in config and or if it's non default
|
2018-04-02 14:42:53 +00:00
|
|
|
if self.args.strategy != constants.DEFAULT_STRATEGY or not config.get('strategy'):
|
2018-03-27 16:15:49 +00:00
|
|
|
config.update({'strategy': self.args.strategy})
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2018-03-25 14:28:04 +00:00
|
|
|
if self.args.strategy_path:
|
|
|
|
config.update({'strategy_path': self.args.strategy_path})
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2018-03-03 21:39:39 +00:00
|
|
|
# Load Common configuration
|
|
|
|
config = self._load_common_config(config)
|
2018-03-02 13:46:32 +00:00
|
|
|
|
|
|
|
# Load Backtesting
|
2018-02-09 07:35:38 +00:00
|
|
|
config = self._load_backtesting_config(config)
|
|
|
|
|
2018-11-14 11:53:20 +00:00
|
|
|
# Load Edge
|
|
|
|
config = self._load_edge_config(config)
|
|
|
|
|
2018-03-02 13:46:32 +00:00
|
|
|
# Load Hyperopt
|
|
|
|
config = self._load_hyperopt_config(config)
|
|
|
|
|
2018-12-25 13:23:59 +00:00
|
|
|
# Set runmode
|
|
|
|
if not self.runmode:
|
|
|
|
# Handle real mode, infer dry/live from config
|
|
|
|
self.runmode = RunMode.DRY_RUN if config.get('dry_run', True) else RunMode.LIVE
|
|
|
|
|
|
|
|
config.update({'runmode': self.runmode})
|
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
return config
|
|
|
|
|
|
|
|
def _load_config_file(self, path: str) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Loads a config file from the given path
|
|
|
|
:param path: path as str
|
|
|
|
:return: configuration as dictionary
|
|
|
|
"""
|
2018-03-05 04:22:40 +00:00
|
|
|
try:
|
|
|
|
with open(path) as file:
|
|
|
|
conf = json.load(file)
|
|
|
|
except FileNotFoundError:
|
2018-06-08 00:00:42 +00:00
|
|
|
raise OperationalException(
|
2018-06-14 05:31:29 +00:00
|
|
|
f'Config file "{path}" not found!'
|
|
|
|
' Please create a config file or check whether it exists.')
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2019-02-19 12:14:47 +00:00
|
|
|
return conf
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2018-03-03 21:39:39 +00:00
|
|
|
def _load_common_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Extract information for sys.argv and load common configuration
|
|
|
|
:return: configuration as dictionary
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Log level
|
|
|
|
if 'loglevel' in self.args and self.args.loglevel:
|
2018-07-19 19:12:27 +00:00
|
|
|
config.update({'verbosity': self.args.loglevel})
|
|
|
|
else:
|
|
|
|
config.update({'verbosity': 0})
|
2019-03-29 19:12:44 +00:00
|
|
|
|
|
|
|
# Log to stdout, not stderr
|
2019-03-29 19:16:41 +00:00
|
|
|
log_handlers: List[logging.Handler] = [logging.StreamHandler(sys.stdout)]
|
2019-03-29 19:12:44 +00:00
|
|
|
if 'logfile' in self.args and self.args.logfile:
|
|
|
|
config.update({'logfile': self.args.logfile})
|
|
|
|
|
|
|
|
# Allow setting this as either configuration or argument
|
|
|
|
if 'logfile' in config:
|
|
|
|
log_handlers.append(RotatingFileHandler(config['logfile'],
|
|
|
|
maxBytes=1024 * 1024, # 1Mb
|
|
|
|
backupCount=10))
|
|
|
|
|
2018-07-19 19:12:27 +00:00
|
|
|
logging.basicConfig(
|
|
|
|
level=logging.INFO if config['verbosity'] < 1 else logging.DEBUG,
|
|
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
2019-03-29 19:12:44 +00:00
|
|
|
handlers=log_handlers
|
2018-07-19 19:12:27 +00:00
|
|
|
)
|
|
|
|
set_loggers(config['verbosity'])
|
|
|
|
logger.info('Verbosity set to %s', config['verbosity'])
|
2018-03-03 21:39:39 +00:00
|
|
|
|
2019-03-10 17:05:33 +00:00
|
|
|
# Support for sd_notify
|
|
|
|
if self.args.sd_notify:
|
|
|
|
config['internals'].update({'sd_notify': True})
|
|
|
|
|
2018-03-03 21:39:39 +00:00
|
|
|
# Add dynamic_whitelist if found
|
|
|
|
if 'dynamic_whitelist' in self.args and self.args.dynamic_whitelist:
|
2018-12-03 19:00:18 +00:00
|
|
|
# Update to volumePairList (the previous default)
|
2018-12-04 19:23:03 +00:00
|
|
|
config['pairlist'] = {'method': 'VolumePairList',
|
|
|
|
'config': {'number_assets': self.args.dynamic_whitelist}
|
|
|
|
}
|
2018-12-03 19:00:18 +00:00
|
|
|
logger.warning(
|
|
|
|
'Parameter --dynamic-whitelist has been deprecated, '
|
|
|
|
'and will be completely replaced by the whitelist dict in the future. '
|
|
|
|
'For now: using dynamically generated whitelist based on VolumePairList. '
|
2018-03-03 21:39:39 +00:00
|
|
|
'(not applicable with Backtesting and Hyperopt)'
|
|
|
|
)
|
|
|
|
|
2018-09-17 17:57:47 +00:00
|
|
|
if self.args.db_url and self.args.db_url != constants.DEFAULT_DB_PROD_URL:
|
2018-06-07 03:26:39 +00:00
|
|
|
config.update({'db_url': self.args.db_url})
|
|
|
|
logger.info('Parameter --db-url detected ...')
|
|
|
|
|
|
|
|
if config.get('dry_run', False):
|
|
|
|
logger.info('Dry run is enabled')
|
2018-06-07 15:29:43 +00:00
|
|
|
if config.get('db_url') in [None, constants.DEFAULT_DB_PROD_URL]:
|
2018-06-07 03:26:39 +00:00
|
|
|
# Default to in-memory db for dry_run if not specified
|
2018-06-07 15:29:43 +00:00
|
|
|
config['db_url'] = constants.DEFAULT_DB_DRYRUN_URL
|
2018-06-07 03:26:39 +00:00
|
|
|
else:
|
|
|
|
if not config.get('db_url', None):
|
2018-06-07 15:29:43 +00:00
|
|
|
config['db_url'] = constants.DEFAULT_DB_PROD_URL
|
2018-06-07 03:26:39 +00:00
|
|
|
logger.info('Dry run is disabled')
|
|
|
|
|
2018-10-10 18:13:56 +00:00
|
|
|
if config.get('forcebuy_enable', False):
|
|
|
|
logger.warning('`forcebuy` RPC message enabled.')
|
|
|
|
|
2018-11-14 10:37:53 +00:00
|
|
|
# Setting max_open_trades to infinite if -1
|
|
|
|
if config.get('max_open_trades') == -1:
|
|
|
|
config['max_open_trades'] = float('inf')
|
|
|
|
|
2018-06-14 05:31:29 +00:00
|
|
|
logger.info(f'Using DB: "{config["db_url"]}"')
|
2018-03-03 21:39:39 +00:00
|
|
|
|
2018-03-30 20:14:35 +00:00
|
|
|
# Check if the exchange set by the user is supported
|
2018-04-04 20:05:17 +00:00
|
|
|
self.check_exchange(config)
|
2018-03-30 20:14:35 +00:00
|
|
|
|
2018-03-03 21:39:39 +00:00
|
|
|
return config
|
|
|
|
|
2019-01-01 13:07:40 +00:00
|
|
|
def _create_datadir(self, config: Dict[str, Any], datadir: Optional[str] = None) -> str:
|
|
|
|
if not datadir:
|
|
|
|
# set datadir
|
|
|
|
exchange_name = config.get('exchange', {}).get('name').lower()
|
|
|
|
datadir = os.path.join('user_data', 'data', exchange_name)
|
|
|
|
|
|
|
|
if not os.path.isdir(datadir):
|
|
|
|
os.makedirs(datadir)
|
|
|
|
logger.info(f'Created data directory: {datadir}')
|
|
|
|
return datadir
|
2018-06-04 11:13:19 +00:00
|
|
|
|
2019-04-14 08:17:06 +00:00
|
|
|
def _load_backtesting_config(self, config: Dict[str, Any]) -> Dict[str, Any]: # noqa: C901
|
2018-02-09 07:35:38 +00:00
|
|
|
"""
|
2018-03-02 13:46:32 +00:00
|
|
|
Extract information for sys.argv and load Backtesting configuration
|
2018-02-09 07:35:38 +00:00
|
|
|
:return: configuration as dictionary
|
|
|
|
"""
|
2018-03-03 21:39:39 +00:00
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
# If -i/--ticker-interval is used we override the configuration parameter
|
|
|
|
# (that will override the strategy configuration)
|
|
|
|
if 'ticker_interval' in self.args and self.args.ticker_interval:
|
|
|
|
config.update({'ticker_interval': self.args.ticker_interval})
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Parameter -i/--ticker-interval detected ...')
|
|
|
|
logger.info('Using ticker_interval: %s ...', config.get('ticker_interval'))
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
# If -l/--live is used we add it to the configuration
|
|
|
|
if 'live' in self.args and self.args.live:
|
|
|
|
config.update({'live': True})
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Parameter -l/--live detected ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2018-07-17 18:26:59 +00:00
|
|
|
# If --enable-position-stacking is used we add it to the configuration
|
|
|
|
if 'position_stacking' in self.args and self.args.position_stacking:
|
|
|
|
config.update({'position_stacking': True})
|
|
|
|
logger.info('Parameter --enable-position-stacking detected ...')
|
|
|
|
|
2019-04-05 13:48:14 +00:00
|
|
|
# If --disable-max-market-positions or --max_open_trades is used we update configuration
|
2018-07-17 19:05:03 +00:00
|
|
|
if 'use_max_market_positions' in self.args and not self.args.use_max_market_positions:
|
|
|
|
config.update({'use_max_market_positions': False})
|
|
|
|
logger.info('Parameter --disable-max-market-positions detected ...')
|
|
|
|
logger.info('max_open_trades set to unlimited ...')
|
2019-04-05 13:48:14 +00:00
|
|
|
elif 'max_open_trades' in self.args and self.args.max_open_trades:
|
|
|
|
config.update({'max_open_trades': self.args.max_open_trades})
|
2019-04-14 08:17:06 +00:00
|
|
|
logger.info('Parameter --max_open_trades detected, '
|
|
|
|
'overriding max_open_trades to: %s ...', config.get('max_open_trades'))
|
2018-07-17 19:05:03 +00:00
|
|
|
else:
|
|
|
|
logger.info('Using max_open_trades: %s ...', config.get('max_open_trades'))
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-05 13:48:14 +00:00
|
|
|
# If --stake_amount is used we update configuration
|
|
|
|
if 'stake_amount' in self.args and self.args.stake_amount:
|
|
|
|
config.update({'stake_amount': self.args.stake_amount})
|
2019-04-14 08:17:06 +00:00
|
|
|
logger.info('Parameter --stake_amount detected, overriding stake_amount to: %s ...',
|
|
|
|
config.get('stake_amount'))
|
2019-04-05 13:48:14 +00:00
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
# If --timerange is used we add it to the configuration
|
|
|
|
if 'timerange' in self.args and self.args.timerange:
|
|
|
|
config.update({'timerange': self.args.timerange})
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Parameter --timerange detected: %s ...', self.args.timerange)
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
# If --datadir is used we add it to the configuration
|
|
|
|
if 'datadir' in self.args and self.args.datadir:
|
2019-01-01 13:07:40 +00:00
|
|
|
config.update({'datadir': self._create_datadir(config, self.args.datadir)})
|
2018-06-04 11:13:19 +00:00
|
|
|
else:
|
2019-01-01 13:07:40 +00:00
|
|
|
config.update({'datadir': self._create_datadir(config, None)})
|
2018-06-04 11:13:19 +00:00
|
|
|
logger.info('Using data folder: %s ...', config.get('datadir'))
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
# If -r/--refresh-pairs-cached is used we add it to the configuration
|
|
|
|
if 'refresh_pairs' in self.args and self.args.refresh_pairs:
|
|
|
|
config.update({'refresh_pairs': True})
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Parameter -r/--refresh-pairs-cached detected ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2018-07-27 21:00:50 +00:00
|
|
|
if 'strategy_list' in self.args and self.args.strategy_list:
|
|
|
|
config.update({'strategy_list': self.args.strategy_list})
|
2018-07-28 04:40:39 +00:00
|
|
|
logger.info('Using strategy list of %s Strategies', len(self.args.strategy_list))
|
2018-07-27 21:00:50 +00:00
|
|
|
|
|
|
|
if 'ticker_interval' in self.args and self.args.ticker_interval:
|
|
|
|
config.update({'ticker_interval': self.args.ticker_interval})
|
|
|
|
logger.info('Overriding ticker interval with Command line argument')
|
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
# If --export is used we add it to the configuration
|
|
|
|
if 'export' in self.args and self.args.export:
|
|
|
|
config.update({'export': self.args.export})
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Parameter --export detected: %s ...', self.args.export)
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2018-06-03 12:52:03 +00:00
|
|
|
# If --export-filename is used we add it to the configuration
|
|
|
|
if 'export' in config and 'exportfilename' in self.args and self.args.exportfilename:
|
|
|
|
config.update({'exportfilename': self.args.exportfilename})
|
|
|
|
logger.info('Storing backtest results to %s ...', self.args.exportfilename)
|
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
return config
|
|
|
|
|
2018-11-14 11:53:20 +00:00
|
|
|
def _load_edge_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Extract information for sys.argv and load Edge configuration
|
|
|
|
:return: configuration as dictionary
|
|
|
|
"""
|
|
|
|
|
|
|
|
# If --timerange is used we add it to the configuration
|
|
|
|
if 'timerange' in self.args and self.args.timerange:
|
|
|
|
config.update({'timerange': self.args.timerange})
|
|
|
|
logger.info('Parameter --timerange detected: %s ...', self.args.timerange)
|
|
|
|
|
2018-11-14 15:31:23 +00:00
|
|
|
# If --timerange is used we add it to the configuration
|
|
|
|
if 'stoploss_range' in self.args and self.args.stoploss_range:
|
|
|
|
txt_range = eval(self.args.stoploss_range)
|
|
|
|
config['edge'].update({'stoploss_range_min': txt_range[0]})
|
|
|
|
config['edge'].update({'stoploss_range_max': txt_range[1]})
|
|
|
|
config['edge'].update({'stoploss_range_step': txt_range[2]})
|
|
|
|
logger.info('Parameter --stoplosses detected: %s ...', self.args.stoploss_range)
|
|
|
|
|
2018-11-14 11:53:20 +00:00
|
|
|
# If -r/--refresh-pairs-cached is used we add it to the configuration
|
|
|
|
if 'refresh_pairs' in self.args and self.args.refresh_pairs:
|
|
|
|
config.update({'refresh_pairs': True})
|
|
|
|
logger.info('Parameter -r/--refresh-pairs-cached detected ...')
|
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
return config
|
|
|
|
|
2018-03-02 13:46:32 +00:00
|
|
|
def _load_hyperopt_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Extract information for sys.argv and load Hyperopt configuration
|
|
|
|
:return: configuration as dictionary
|
|
|
|
"""
|
2019-03-04 06:24:05 +00:00
|
|
|
|
|
|
|
if "hyperopt" in self.args:
|
|
|
|
# Add the hyperopt file to use
|
|
|
|
config.update({'hyperopt': self.args.hyperopt})
|
|
|
|
|
2018-07-17 18:26:59 +00:00
|
|
|
# If --epochs is used we add it to the configuration
|
2018-03-02 13:46:32 +00:00
|
|
|
if 'epochs' in self.args and self.args.epochs:
|
|
|
|
config.update({'epochs': self.args.epochs})
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Parameter --epochs detected ...')
|
|
|
|
logger.info('Will run Hyperopt with for %s epochs ...', config.get('epochs'))
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2018-03-04 08:51:22 +00:00
|
|
|
# If --spaces is used we add it to the configuration
|
|
|
|
if 'spaces' in self.args and self.args.spaces:
|
|
|
|
config.update({'spaces': self.args.spaces})
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Parameter -s/--spaces detected: %s', config.get('spaces'))
|
2018-03-04 08:51:22 +00:00
|
|
|
|
2019-04-21 22:10:01 +00:00
|
|
|
if 'print_all' in self.args and self.args.print_all:
|
|
|
|
config.update({'print_all': self.args.print_all})
|
|
|
|
logger.info('Parameter --print-all detected: %s', config.get('print_all'))
|
|
|
|
|
2018-03-02 13:46:32 +00:00
|
|
|
return config
|
|
|
|
|
2019-03-14 08:01:03 +00:00
|
|
|
def _validate_config_schema(self, conf: Dict[str, Any]) -> Dict[str, Any]:
|
2018-02-04 06:42:03 +00:00
|
|
|
"""
|
|
|
|
Validate the configuration follow the Config Schema
|
|
|
|
:param conf: Config in JSON format
|
|
|
|
:return: Returns the config if valid, otherwise throw an exception
|
|
|
|
"""
|
|
|
|
try:
|
2018-12-06 05:57:07 +00:00
|
|
|
validate(conf, constants.CONF_SCHEMA, Draft4Validator)
|
2018-02-04 06:42:03 +00:00
|
|
|
return conf
|
|
|
|
except ValidationError as exception:
|
2018-05-30 20:38:09 +00:00
|
|
|
logger.critical(
|
2018-02-04 06:42:03 +00:00
|
|
|
'Invalid configuration. See config.json.example. Reason: %s',
|
|
|
|
exception
|
|
|
|
)
|
|
|
|
raise ValidationError(
|
2018-04-02 14:42:53 +00:00
|
|
|
best_match(Draft4Validator(constants.CONF_SCHEMA).iter_errors(conf)).message
|
2018-02-04 06:42:03 +00:00
|
|
|
)
|
|
|
|
|
2019-03-16 09:38:25 +00:00
|
|
|
def _validate_config_consistency(self, conf: Dict[str, Any]) -> None:
|
2019-03-14 08:01:03 +00:00
|
|
|
"""
|
|
|
|
Validate the configuration consistency
|
|
|
|
:param conf: Config in JSON format
|
2019-03-16 09:38:25 +00:00
|
|
|
:return: Returns None if everything is ok, otherwise throw an OperationalException
|
2019-03-14 08:01:03 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
# validating trailing stoploss
|
|
|
|
self._validate_trailing_stoploss(conf)
|
|
|
|
|
|
|
|
def _validate_trailing_stoploss(self, conf: Dict[str, Any]) -> None:
|
|
|
|
# Skip if trailing stoploss is not activated
|
|
|
|
if not conf.get('trailing_stop', False):
|
|
|
|
return
|
|
|
|
|
|
|
|
tsl_positive = float(conf.get('trailing_stop_positive', 0))
|
|
|
|
tsl_offset = float(conf.get('trailing_stop_positive_offset', 0))
|
|
|
|
tsl_only_offset = conf.get('trailing_only_offset_is_reached', False)
|
|
|
|
|
|
|
|
if tsl_only_offset:
|
|
|
|
if tsl_positive == 0.0:
|
|
|
|
raise OperationalException(
|
2019-03-16 09:38:25 +00:00
|
|
|
f'The config trailing_only_offset_is_reached needs '
|
2019-03-14 08:01:03 +00:00
|
|
|
'trailing_stop_positive_offset to be more than 0 in your config.')
|
|
|
|
if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive:
|
|
|
|
raise OperationalException(
|
2019-03-16 09:38:25 +00:00
|
|
|
f'The config trailing_stop_positive_offset needs '
|
2019-03-14 08:01:03 +00:00
|
|
|
'to be greater than trailing_stop_positive_offset in your config.')
|
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
def get_config(self) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Return the config. Use this method to get the bot config
|
|
|
|
:return: Dict: Bot config
|
|
|
|
"""
|
2018-03-03 21:39:39 +00:00
|
|
|
if self.config is None:
|
|
|
|
self.config = self.load_config()
|
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
return self.config
|
2018-03-30 20:14:35 +00:00
|
|
|
|
2018-04-04 20:05:17 +00:00
|
|
|
def check_exchange(self, config: Dict[str, Any]) -> bool:
|
2018-03-30 20:14:35 +00:00
|
|
|
"""
|
|
|
|
Check if the exchange name in the config file is supported by Freqtrade
|
|
|
|
:return: True or raised an exception if the exchange if not supported
|
|
|
|
"""
|
2018-04-04 20:05:17 +00:00
|
|
|
exchange = config.get('exchange', {}).get('name').lower()
|
2019-04-09 09:27:35 +00:00
|
|
|
if not is_exchange_supported(exchange):
|
2018-03-30 20:14:35 +00:00
|
|
|
|
2018-06-04 09:35:51 +00:00
|
|
|
exception_msg = f'Exchange "{exchange}" not supported.\n' \
|
2019-04-09 09:27:35 +00:00
|
|
|
f'The following exchanges are supported: ' \
|
|
|
|
f'{", ".join(supported_exchanges())}'
|
2018-03-30 20:14:35 +00:00
|
|
|
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.critical(exception_msg)
|
2018-03-30 20:14:35 +00:00
|
|
|
raise OperationalException(
|
|
|
|
exception_msg
|
|
|
|
)
|
|
|
|
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.debug('Exchange "%s" supported', exchange)
|
2018-03-30 20:14:35 +00:00
|
|
|
return True
|