2018-02-04 06:42:03 +00:00
|
|
|
"""
|
|
|
|
This module contains the configuration class
|
|
|
|
"""
|
|
|
|
import json
|
2018-03-25 19:37:14 +00:00
|
|
|
import logging
|
2018-07-04 07:31:35 +00:00
|
|
|
import os
|
2019-03-29 19:12:44 +00:00
|
|
|
import sys
|
2018-03-17 21:43:59 +00:00
|
|
|
from argparse import Namespace
|
2019-03-29 19:12:44 +00:00
|
|
|
from logging.handlers import RotatingFileHandler
|
2019-04-24 19:32:33 +00:00
|
|
|
from typing import Any, Callable, Dict, List, Optional
|
2018-07-04 07:31:35 +00:00
|
|
|
|
2019-04-08 01:23:29 +00:00
|
|
|
from jsonschema import Draft4Validator, validators
|
2018-02-04 06:42:03 +00:00
|
|
|
from jsonschema.exceptions import ValidationError, best_match
|
|
|
|
|
2018-05-02 20:49:55 +00:00
|
|
|
from freqtrade import OperationalException, constants
|
2019-06-12 19:37:43 +00:00
|
|
|
from freqtrade.exchange import (is_exchange_bad, is_exchange_available,
|
|
|
|
is_exchange_officially_supported, available_exchanges)
|
2019-02-19 12:14:47 +00:00
|
|
|
from freqtrade.misc import deep_merge_dicts
|
2019-03-29 19:12:44 +00:00
|
|
|
from freqtrade.state import RunMode
|
2019-02-19 12:14:47 +00:00
|
|
|
|
2018-07-19 18:43:41 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2018-07-18 20:52:57 +00:00
|
|
|
|
|
|
|
|
2018-07-19 19:12:27 +00:00
|
|
|
def set_loggers(log_level: int = 0) -> None:
|
|
|
|
"""
|
|
|
|
Set the logger level for Third party libs
|
|
|
|
:return: None
|
|
|
|
"""
|
|
|
|
|
|
|
|
logging.getLogger('requests').setLevel(logging.INFO if log_level <= 1 else logging.DEBUG)
|
|
|
|
logging.getLogger("urllib3").setLevel(logging.INFO if log_level <= 1 else logging.DEBUG)
|
|
|
|
logging.getLogger('ccxt.base.exchange').setLevel(
|
|
|
|
logging.INFO if log_level <= 2 else logging.DEBUG)
|
|
|
|
logging.getLogger('telegram').setLevel(logging.INFO)
|
|
|
|
|
|
|
|
|
2019-04-08 01:23:29 +00:00
|
|
|
def _extend_with_default(validator_class):
|
|
|
|
validate_properties = validator_class.VALIDATORS["properties"]
|
|
|
|
|
|
|
|
def set_defaults(validator, properties, instance, schema):
|
|
|
|
for prop, subschema in properties.items():
|
|
|
|
if "default" in subschema:
|
|
|
|
instance.setdefault(prop, subschema["default"])
|
|
|
|
|
|
|
|
for error in validate_properties(
|
|
|
|
validator, properties, instance, schema,
|
|
|
|
):
|
|
|
|
yield error
|
|
|
|
|
|
|
|
return validators.extend(
|
|
|
|
validator_class, {"properties": set_defaults},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
ValidatorWithDefaults = _extend_with_default(Draft4Validator)
|
|
|
|
|
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
class Configuration(object):
|
|
|
|
"""
|
|
|
|
Class to read and init the bot configuration
|
|
|
|
Reuse this class for the bot, backtesting, hyperopt and every script that required configuration
|
|
|
|
"""
|
2018-11-14 10:37:53 +00:00
|
|
|
|
2018-12-25 13:23:59 +00:00
|
|
|
def __init__(self, args: Namespace, runmode: RunMode = None) -> None:
|
2018-02-04 06:42:03 +00:00
|
|
|
self.args = args
|
2018-05-30 20:38:09 +00:00
|
|
|
self.config: Optional[Dict[str, Any]] = None
|
2018-12-25 13:23:59 +00:00
|
|
|
self.runmode = runmode
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2018-03-03 21:39:39 +00:00
|
|
|
def load_config(self) -> Dict[str, Any]:
|
2018-02-04 06:42:03 +00:00
|
|
|
"""
|
|
|
|
Extract information for sys.argv and load the bot configuration
|
|
|
|
:return: Configuration dictionary
|
|
|
|
"""
|
2019-02-19 12:14:47 +00:00
|
|
|
config: Dict[str, Any] = {}
|
|
|
|
# Now expecting a list of config filenames here, not a string
|
|
|
|
for path in self.args.config:
|
|
|
|
logger.info('Using config: %s ...', path)
|
|
|
|
# Merge config options, overwriting old values
|
|
|
|
config = deep_merge_dicts(self._load_config_file(path), config)
|
|
|
|
|
|
|
|
if 'internals' not in config:
|
|
|
|
config['internals'] = {}
|
|
|
|
|
|
|
|
logger.info('Validating configuration ...')
|
2019-03-14 08:01:03 +00:00
|
|
|
self._validate_config_schema(config)
|
2019-03-16 09:38:25 +00:00
|
|
|
self._validate_config_consistency(config)
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2018-03-27 16:20:15 +00:00
|
|
|
# Set strategy if not specified in config and or if it's non default
|
2018-04-02 14:42:53 +00:00
|
|
|
if self.args.strategy != constants.DEFAULT_STRATEGY or not config.get('strategy'):
|
2018-03-27 16:15:49 +00:00
|
|
|
config.update({'strategy': self.args.strategy})
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2018-03-25 14:28:04 +00:00
|
|
|
if self.args.strategy_path:
|
|
|
|
config.update({'strategy_path': self.args.strategy_path})
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2018-03-03 21:39:39 +00:00
|
|
|
# Load Common configuration
|
|
|
|
config = self._load_common_config(config)
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2019-04-24 19:27:32 +00:00
|
|
|
# Load Optimize configurations
|
|
|
|
config = self._load_optimize_config(config)
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2018-12-25 13:23:59 +00:00
|
|
|
# Set runmode
|
|
|
|
if not self.runmode:
|
|
|
|
# Handle real mode, infer dry/live from config
|
|
|
|
self.runmode = RunMode.DRY_RUN if config.get('dry_run', True) else RunMode.LIVE
|
|
|
|
|
|
|
|
config.update({'runmode': self.runmode})
|
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
return config
|
|
|
|
|
|
|
|
def _load_config_file(self, path: str) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Loads a config file from the given path
|
|
|
|
:param path: path as str
|
|
|
|
:return: configuration as dictionary
|
|
|
|
"""
|
2018-03-05 04:22:40 +00:00
|
|
|
try:
|
|
|
|
with open(path) as file:
|
|
|
|
conf = json.load(file)
|
|
|
|
except FileNotFoundError:
|
2018-06-08 00:00:42 +00:00
|
|
|
raise OperationalException(
|
2018-06-14 05:31:29 +00:00
|
|
|
f'Config file "{path}" not found!'
|
|
|
|
' Please create a config file or check whether it exists.')
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2019-02-19 12:14:47 +00:00
|
|
|
return conf
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2019-05-29 18:57:14 +00:00
|
|
|
def _load_logging_config(self, config: Dict[str, Any]) -> None:
|
2018-03-03 21:39:39 +00:00
|
|
|
"""
|
2019-05-29 18:57:14 +00:00
|
|
|
Extract information for sys.argv and load logging configuration:
|
|
|
|
the --loglevel, --logfile options
|
2018-03-03 21:39:39 +00:00
|
|
|
"""
|
|
|
|
# Log level
|
|
|
|
if 'loglevel' in self.args and self.args.loglevel:
|
2018-07-19 19:12:27 +00:00
|
|
|
config.update({'verbosity': self.args.loglevel})
|
|
|
|
else:
|
|
|
|
config.update({'verbosity': 0})
|
2019-03-29 19:12:44 +00:00
|
|
|
|
|
|
|
# Log to stdout, not stderr
|
2019-03-29 19:16:41 +00:00
|
|
|
log_handlers: List[logging.Handler] = [logging.StreamHandler(sys.stdout)]
|
2019-03-29 19:12:44 +00:00
|
|
|
if 'logfile' in self.args and self.args.logfile:
|
|
|
|
config.update({'logfile': self.args.logfile})
|
|
|
|
|
|
|
|
# Allow setting this as either configuration or argument
|
|
|
|
if 'logfile' in config:
|
|
|
|
log_handlers.append(RotatingFileHandler(config['logfile'],
|
|
|
|
maxBytes=1024 * 1024, # 1Mb
|
|
|
|
backupCount=10))
|
|
|
|
|
2018-07-19 19:12:27 +00:00
|
|
|
logging.basicConfig(
|
|
|
|
level=logging.INFO if config['verbosity'] < 1 else logging.DEBUG,
|
|
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
2019-03-29 19:12:44 +00:00
|
|
|
handlers=log_handlers
|
2018-07-19 19:12:27 +00:00
|
|
|
)
|
|
|
|
set_loggers(config['verbosity'])
|
|
|
|
logger.info('Verbosity set to %s', config['verbosity'])
|
2018-03-03 21:39:39 +00:00
|
|
|
|
2019-05-29 18:57:14 +00:00
|
|
|
def _load_common_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Extract information for sys.argv and load common configuration
|
|
|
|
:return: configuration as dictionary
|
|
|
|
"""
|
|
|
|
self._load_logging_config(config)
|
|
|
|
|
2019-03-10 17:05:33 +00:00
|
|
|
# Support for sd_notify
|
|
|
|
if self.args.sd_notify:
|
|
|
|
config['internals'].update({'sd_notify': True})
|
|
|
|
|
2018-03-03 21:39:39 +00:00
|
|
|
# Add dynamic_whitelist if found
|
|
|
|
if 'dynamic_whitelist' in self.args and self.args.dynamic_whitelist:
|
2018-12-03 19:00:18 +00:00
|
|
|
# Update to volumePairList (the previous default)
|
2018-12-04 19:23:03 +00:00
|
|
|
config['pairlist'] = {'method': 'VolumePairList',
|
|
|
|
'config': {'number_assets': self.args.dynamic_whitelist}
|
|
|
|
}
|
2018-12-03 19:00:18 +00:00
|
|
|
logger.warning(
|
|
|
|
'Parameter --dynamic-whitelist has been deprecated, '
|
|
|
|
'and will be completely replaced by the whitelist dict in the future. '
|
|
|
|
'For now: using dynamically generated whitelist based on VolumePairList. '
|
2018-03-03 21:39:39 +00:00
|
|
|
'(not applicable with Backtesting and Hyperopt)'
|
|
|
|
)
|
|
|
|
|
2018-09-17 17:57:47 +00:00
|
|
|
if self.args.db_url and self.args.db_url != constants.DEFAULT_DB_PROD_URL:
|
2018-06-07 03:26:39 +00:00
|
|
|
config.update({'db_url': self.args.db_url})
|
|
|
|
logger.info('Parameter --db-url detected ...')
|
|
|
|
|
|
|
|
if config.get('dry_run', False):
|
|
|
|
logger.info('Dry run is enabled')
|
2018-06-07 15:29:43 +00:00
|
|
|
if config.get('db_url') in [None, constants.DEFAULT_DB_PROD_URL]:
|
2018-06-07 03:26:39 +00:00
|
|
|
# Default to in-memory db for dry_run if not specified
|
2018-06-07 15:29:43 +00:00
|
|
|
config['db_url'] = constants.DEFAULT_DB_DRYRUN_URL
|
2018-06-07 03:26:39 +00:00
|
|
|
else:
|
|
|
|
if not config.get('db_url', None):
|
2018-06-07 15:29:43 +00:00
|
|
|
config['db_url'] = constants.DEFAULT_DB_PROD_URL
|
2018-06-07 03:26:39 +00:00
|
|
|
logger.info('Dry run is disabled')
|
|
|
|
|
2018-10-10 18:13:56 +00:00
|
|
|
if config.get('forcebuy_enable', False):
|
|
|
|
logger.warning('`forcebuy` RPC message enabled.')
|
|
|
|
|
2018-11-14 10:37:53 +00:00
|
|
|
# Setting max_open_trades to infinite if -1
|
|
|
|
if config.get('max_open_trades') == -1:
|
|
|
|
config['max_open_trades'] = float('inf')
|
|
|
|
|
2018-06-14 05:31:29 +00:00
|
|
|
logger.info(f'Using DB: "{config["db_url"]}"')
|
2018-03-03 21:39:39 +00:00
|
|
|
|
2018-03-30 20:14:35 +00:00
|
|
|
# Check if the exchange set by the user is supported
|
2018-04-04 20:05:17 +00:00
|
|
|
self.check_exchange(config)
|
2018-03-30 20:14:35 +00:00
|
|
|
|
2018-03-03 21:39:39 +00:00
|
|
|
return config
|
|
|
|
|
2019-01-01 13:07:40 +00:00
|
|
|
def _create_datadir(self, config: Dict[str, Any], datadir: Optional[str] = None) -> str:
|
|
|
|
if not datadir:
|
|
|
|
# set datadir
|
|
|
|
exchange_name = config.get('exchange', {}).get('name').lower()
|
|
|
|
datadir = os.path.join('user_data', 'data', exchange_name)
|
|
|
|
|
|
|
|
if not os.path.isdir(datadir):
|
|
|
|
os.makedirs(datadir)
|
|
|
|
logger.info(f'Created data directory: {datadir}')
|
|
|
|
return datadir
|
2018-06-04 11:13:19 +00:00
|
|
|
|
2019-04-24 19:32:33 +00:00
|
|
|
def _args_to_config(self, config: Dict[str, Any], argname: str,
|
2019-04-24 20:08:56 +00:00
|
|
|
logstring: str, logfun: Optional[Callable] = None) -> None:
|
2019-04-24 19:02:05 +00:00
|
|
|
"""
|
2019-04-24 19:32:33 +00:00
|
|
|
:param config: Configuration dictionary
|
|
|
|
:param argname: Argumentname in self.args - will be copied to config dict.
|
|
|
|
:param logstring: Logging String
|
|
|
|
:param logfun: logfun is applied to the configuration entry before passing
|
|
|
|
that entry to the log string using .format().
|
|
|
|
sample: logfun=len (prints the length of the found
|
|
|
|
configuration instead of the content)
|
2019-04-24 19:02:05 +00:00
|
|
|
"""
|
2019-04-24 18:44:36 +00:00
|
|
|
if argname in self.args and getattr(self.args, argname):
|
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
config.update({argname: getattr(self.args, argname)})
|
2019-04-24 19:02:05 +00:00
|
|
|
if logfun:
|
2019-04-24 19:13:57 +00:00
|
|
|
logger.info(logstring.format(logfun(config[argname])))
|
2019-04-24 19:02:05 +00:00
|
|
|
else:
|
2019-04-24 19:13:57 +00:00
|
|
|
logger.info(logstring.format(config[argname]))
|
2019-04-24 18:44:36 +00:00
|
|
|
|
2019-05-29 18:57:14 +00:00
|
|
|
def _load_datadir_config(self, config: Dict[str, Any]) -> None:
|
|
|
|
"""
|
|
|
|
Extract information for sys.argv and load datadir configuration:
|
|
|
|
the --datadir option
|
|
|
|
"""
|
|
|
|
if 'datadir' in self.args and self.args.datadir:
|
|
|
|
config.update({'datadir': self._create_datadir(config, self.args.datadir)})
|
|
|
|
else:
|
|
|
|
config.update({'datadir': self._create_datadir(config, None)})
|
|
|
|
logger.info('Using data folder: %s ...', config.get('datadir'))
|
|
|
|
|
2019-04-24 19:27:32 +00:00
|
|
|
def _load_optimize_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
2018-02-09 07:35:38 +00:00
|
|
|
"""
|
2019-04-24 19:27:32 +00:00
|
|
|
Extract information for sys.argv and load Optimize configuration
|
2018-02-09 07:35:38 +00:00
|
|
|
:return: configuration as dictionary
|
|
|
|
"""
|
2018-03-03 21:39:39 +00:00
|
|
|
|
2019-04-23 18:58:27 +00:00
|
|
|
# This will override the strategy configuration
|
2019-04-24 19:24:00 +00:00
|
|
|
self._args_to_config(config, argname='ticker_interval',
|
|
|
|
logstring='Parameter -i/--ticker-interval detected ... '
|
|
|
|
'Using ticker_interval: {} ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='live',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter -l/--live detected ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='position_stacking',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter --enable-position-stacking detected ...')
|
2018-07-17 18:26:59 +00:00
|
|
|
|
2018-07-17 19:05:03 +00:00
|
|
|
if 'use_max_market_positions' in self.args and not self.args.use_max_market_positions:
|
|
|
|
config.update({'use_max_market_positions': False})
|
|
|
|
logger.info('Parameter --disable-max-market-positions detected ...')
|
|
|
|
logger.info('max_open_trades set to unlimited ...')
|
2019-04-05 13:48:14 +00:00
|
|
|
elif 'max_open_trades' in self.args and self.args.max_open_trades:
|
|
|
|
config.update({'max_open_trades': self.args.max_open_trades})
|
2019-04-14 08:17:06 +00:00
|
|
|
logger.info('Parameter --max_open_trades detected, '
|
|
|
|
'overriding max_open_trades to: %s ...', config.get('max_open_trades'))
|
2018-07-17 19:05:03 +00:00
|
|
|
else:
|
|
|
|
logger.info('Using max_open_trades: %s ...', config.get('max_open_trades'))
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='stake_amount',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter --stake_amount detected, '
|
|
|
|
'overriding stake_amount to: {} ...')
|
2019-04-05 13:48:14 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='timerange',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter --timerange detected: {} ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-05-29 18:57:14 +00:00
|
|
|
self._load_datadir_config(config)
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='refresh_pairs',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter -r/--refresh-pairs-cached detected ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='strategy_list',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Using strategy list of {} Strategies', logfun=len)
|
2018-07-27 21:00:50 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='ticker_interval',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Overriding ticker interval with Command line argument')
|
2018-07-27 21:00:50 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='export',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter --export detected: {} ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='exportfilename',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Storing backtest results to {} ...')
|
2018-06-03 12:52:03 +00:00
|
|
|
|
2019-04-24 19:27:32 +00:00
|
|
|
# Edge section:
|
2018-11-14 15:31:23 +00:00
|
|
|
if 'stoploss_range' in self.args and self.args.stoploss_range:
|
|
|
|
txt_range = eval(self.args.stoploss_range)
|
|
|
|
config['edge'].update({'stoploss_range_min': txt_range[0]})
|
|
|
|
config['edge'].update({'stoploss_range_max': txt_range[1]})
|
|
|
|
config['edge'].update({'stoploss_range_step': txt_range[2]})
|
|
|
|
logger.info('Parameter --stoplosses detected: %s ...', self.args.stoploss_range)
|
|
|
|
|
2019-04-24 19:27:32 +00:00
|
|
|
# Hyperopt section
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='hyperopt',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Using Hyperopt file {}')
|
2019-03-04 06:24:05 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='epochs',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Parameter --epochs detected ... '
|
|
|
|
'Will run Hyperopt with for {} epochs ...'
|
|
|
|
)
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='spaces',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Parameter -s/--spaces detected: {}')
|
2018-03-04 08:51:22 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='print_all',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Parameter --print-all detected ...')
|
2019-04-21 22:10:01 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='hyperopt_jobs',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Parameter -j/--job-workers detected: {}')
|
2019-04-22 21:30:09 +00:00
|
|
|
|
2019-04-24 19:12:08 +00:00
|
|
|
self._args_to_config(config, argname='hyperopt_random_state',
|
|
|
|
logstring='Parameter --random-state detected: {}')
|
2019-05-01 12:27:58 +00:00
|
|
|
|
|
|
|
self._args_to_config(config, argname='hyperopt_min_trades',
|
|
|
|
logstring='Parameter --min-trades detected: {}')
|
|
|
|
|
2018-03-02 13:46:32 +00:00
|
|
|
return config
|
|
|
|
|
2019-03-14 08:01:03 +00:00
|
|
|
def _validate_config_schema(self, conf: Dict[str, Any]) -> Dict[str, Any]:
|
2018-02-04 06:42:03 +00:00
|
|
|
"""
|
|
|
|
Validate the configuration follow the Config Schema
|
|
|
|
:param conf: Config in JSON format
|
|
|
|
:return: Returns the config if valid, otherwise throw an exception
|
|
|
|
"""
|
|
|
|
try:
|
2019-04-08 01:23:29 +00:00
|
|
|
ValidatorWithDefaults(constants.CONF_SCHEMA).validate(conf)
|
2018-02-04 06:42:03 +00:00
|
|
|
return conf
|
|
|
|
except ValidationError as exception:
|
2018-05-30 20:38:09 +00:00
|
|
|
logger.critical(
|
2018-02-04 06:42:03 +00:00
|
|
|
'Invalid configuration. See config.json.example. Reason: %s',
|
|
|
|
exception
|
|
|
|
)
|
|
|
|
raise ValidationError(
|
2018-04-02 14:42:53 +00:00
|
|
|
best_match(Draft4Validator(constants.CONF_SCHEMA).iter_errors(conf)).message
|
2018-02-04 06:42:03 +00:00
|
|
|
)
|
|
|
|
|
2019-03-16 09:38:25 +00:00
|
|
|
def _validate_config_consistency(self, conf: Dict[str, Any]) -> None:
|
2019-03-14 08:01:03 +00:00
|
|
|
"""
|
|
|
|
Validate the configuration consistency
|
|
|
|
:param conf: Config in JSON format
|
2019-03-16 09:38:25 +00:00
|
|
|
:return: Returns None if everything is ok, otherwise throw an OperationalException
|
2019-03-14 08:01:03 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
# validating trailing stoploss
|
|
|
|
self._validate_trailing_stoploss(conf)
|
|
|
|
|
|
|
|
def _validate_trailing_stoploss(self, conf: Dict[str, Any]) -> None:
|
|
|
|
# Skip if trailing stoploss is not activated
|
|
|
|
if not conf.get('trailing_stop', False):
|
|
|
|
return
|
|
|
|
|
|
|
|
tsl_positive = float(conf.get('trailing_stop_positive', 0))
|
|
|
|
tsl_offset = float(conf.get('trailing_stop_positive_offset', 0))
|
|
|
|
tsl_only_offset = conf.get('trailing_only_offset_is_reached', False)
|
|
|
|
|
|
|
|
if tsl_only_offset:
|
|
|
|
if tsl_positive == 0.0:
|
|
|
|
raise OperationalException(
|
2019-03-16 09:38:25 +00:00
|
|
|
f'The config trailing_only_offset_is_reached needs '
|
2019-03-14 08:01:03 +00:00
|
|
|
'trailing_stop_positive_offset to be more than 0 in your config.')
|
|
|
|
if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive:
|
|
|
|
raise OperationalException(
|
2019-03-16 09:38:25 +00:00
|
|
|
f'The config trailing_stop_positive_offset needs '
|
2019-03-14 08:01:03 +00:00
|
|
|
'to be greater than trailing_stop_positive_offset in your config.')
|
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
def get_config(self) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Return the config. Use this method to get the bot config
|
|
|
|
:return: Dict: Bot config
|
|
|
|
"""
|
2018-03-03 21:39:39 +00:00
|
|
|
if self.config is None:
|
|
|
|
self.config = self.load_config()
|
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
return self.config
|
2018-03-30 20:14:35 +00:00
|
|
|
|
2019-06-11 10:18:35 +00:00
|
|
|
def check_exchange(self, config: Dict[str, Any], check_for_bad: bool = True) -> bool:
|
2018-03-30 20:14:35 +00:00
|
|
|
"""
|
|
|
|
Check if the exchange name in the config file is supported by Freqtrade
|
2019-06-11 10:18:35 +00:00
|
|
|
:param check_for_bad: if True, check the exchange against the list of known 'bad'
|
|
|
|
exchanges
|
|
|
|
:return: False if exchange is 'bad', i.e. is known to work with the bot with
|
|
|
|
critical issues or does not work at all, crashes, etc. True otherwise.
|
|
|
|
raises an exception if the exchange if not supported by ccxt
|
|
|
|
and thus is not known for the Freqtrade at all.
|
2018-03-30 20:14:35 +00:00
|
|
|
"""
|
2019-06-11 10:18:35 +00:00
|
|
|
logger.info("Checking exchange...")
|
2018-03-30 20:14:35 +00:00
|
|
|
|
2019-06-11 10:18:35 +00:00
|
|
|
exchange = config.get('exchange', {}).get('name').lower()
|
2019-06-12 19:37:43 +00:00
|
|
|
if not is_exchange_available(exchange):
|
2018-03-30 20:14:35 +00:00
|
|
|
raise OperationalException(
|
2019-06-11 10:47:04 +00:00
|
|
|
f'Exchange "{exchange}" is not supported by ccxt '
|
2019-06-12 19:37:43 +00:00
|
|
|
f'and therefore not available for the bot.\n'
|
2019-06-11 10:47:04 +00:00
|
|
|
f'The following exchanges are supported by ccxt: '
|
2019-06-12 19:37:43 +00:00
|
|
|
f'{", ".join(available_exchanges())}'
|
2018-03-30 20:14:35 +00:00
|
|
|
)
|
|
|
|
|
2019-06-14 15:40:02 +00:00
|
|
|
if check_for_bad and is_exchange_bad(exchange):
|
|
|
|
logger.warning(f'Exchange "{exchange}" is known to not work with the bot yet. '
|
|
|
|
f'Use it only for development and testing purposes.')
|
|
|
|
return False
|
2019-06-11 10:18:35 +00:00
|
|
|
|
|
|
|
if is_exchange_officially_supported(exchange):
|
|
|
|
logger.info(f'Exchange "{exchange}" is officially supported '
|
|
|
|
f'by the Freqtrade development team.')
|
|
|
|
else:
|
2019-06-14 15:40:02 +00:00
|
|
|
logger.warning(f'Exchange "{exchange}" is supported by ccxt '
|
|
|
|
f'and therefore available for the bot but not officially supported '
|
|
|
|
f'by the Freqtrade development team. '
|
2019-06-12 19:37:43 +00:00
|
|
|
f'It may work flawlessly (please report back) or have serious issues. '
|
2019-06-11 10:18:35 +00:00
|
|
|
f'Use it at your own discretion.')
|
|
|
|
|
2018-03-30 20:14:35 +00:00
|
|
|
return True
|