Merge branch 'feat/freqai' of https://github.com/freqtrade/freqtrade into feat/freqai
This commit is contained in:
@@ -6,6 +6,7 @@ Contains all start-commands, subcommands and CLI Interface creation.
|
||||
Note: Be careful with file-scoped imports in these subfiles.
|
||||
as they are parsed on startup, nothing containing optional modules should be loaded.
|
||||
"""
|
||||
from freqtrade.commands.analyze_commands import start_analysis_entries_exits
|
||||
from freqtrade.commands.arguments import Arguments
|
||||
from freqtrade.commands.build_config_commands import start_new_config
|
||||
from freqtrade.commands.data_commands import (start_convert_data, start_convert_trades,
|
||||
|
69
freqtrade/commands/analyze_commands.py
Executable file
69
freqtrade/commands/analyze_commands.py
Executable file
@@ -0,0 +1,69 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str, Any]:
|
||||
"""
|
||||
Prepare the configuration for the entry/exit reason analysis module
|
||||
:param args: Cli args from Arguments()
|
||||
:param method: Bot running mode
|
||||
:return: Configuration
|
||||
"""
|
||||
config = setup_utils_configuration(args, method)
|
||||
|
||||
no_unlimited_runmodes = {
|
||||
RunMode.BACKTEST: 'backtesting',
|
||||
}
|
||||
if method in no_unlimited_runmodes.keys():
|
||||
from freqtrade.data.btanalysis import get_latest_backtest_filename
|
||||
|
||||
if 'exportfilename' in config:
|
||||
if config['exportfilename'].is_dir():
|
||||
btfile = Path(get_latest_backtest_filename(config['exportfilename']))
|
||||
signals_file = f"{config['exportfilename']}/{btfile.stem}_signals.pkl"
|
||||
else:
|
||||
if config['exportfilename'].exists():
|
||||
btfile = Path(config['exportfilename'])
|
||||
signals_file = f"{btfile.parent}/{btfile.stem}_signals.pkl"
|
||||
else:
|
||||
raise OperationalException(f"{config['exportfilename']} does not exist.")
|
||||
else:
|
||||
raise OperationalException('exportfilename not in config.')
|
||||
|
||||
if (not Path(signals_file).exists()):
|
||||
raise OperationalException(
|
||||
(f"Cannot find latest backtest signals file: {signals_file}."
|
||||
"Run backtesting with `--export signals`.")
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def start_analysis_entries_exits(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Start analysis script
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
from freqtrade.data.entryexitanalysis import process_entry_exit_reasons
|
||||
|
||||
# Initialize configuration
|
||||
config = setup_analyze_configuration(args, RunMode.BACKTEST)
|
||||
|
||||
logger.info('Starting freqtrade in analysis mode')
|
||||
|
||||
process_entry_exit_reasons(config['exportfilename'],
|
||||
config['exchange']['pair_whitelist'],
|
||||
config['analysis_groups'],
|
||||
config['enter_reason_list'],
|
||||
config['exit_reason_list'],
|
||||
config['indicator_list']
|
||||
)
|
@@ -102,6 +102,9 @@ ARGS_HYPEROPT_SHOW = ["hyperopt_list_best", "hyperopt_list_profitable", "hyperop
|
||||
"print_json", "hyperoptexportfilename", "hyperopt_show_no_header",
|
||||
"disableparamexport", "backtest_breakdown"]
|
||||
|
||||
ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason_list",
|
||||
"exit_reason_list", "indicator_list"]
|
||||
|
||||
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
||||
"list-markets", "list-pairs", "list-strategies", "list-data",
|
||||
"hyperopt-list", "hyperopt-show", "backtest-filter",
|
||||
@@ -183,8 +186,9 @@ class Arguments:
|
||||
self.parser = argparse.ArgumentParser(description='Free, open source crypto trading bot')
|
||||
self._build_args(optionlist=['version'], parser=self.parser)
|
||||
|
||||
from freqtrade.commands import (start_backtesting, start_backtesting_show,
|
||||
start_convert_data, start_convert_db, start_convert_trades,
|
||||
from freqtrade.commands import (start_analysis_entries_exits, start_backtesting,
|
||||
start_backtesting_show, start_convert_data,
|
||||
start_convert_db, start_convert_trades,
|
||||
start_create_userdir, start_download_data, start_edge,
|
||||
start_hyperopt, start_hyperopt_list, start_hyperopt_show,
|
||||
start_install_ui, start_list_data, start_list_exchanges,
|
||||
@@ -284,6 +288,13 @@ class Arguments:
|
||||
backtesting_show_cmd.set_defaults(func=start_backtesting_show)
|
||||
self._build_args(optionlist=ARGS_BACKTEST_SHOW, parser=backtesting_show_cmd)
|
||||
|
||||
# Add backtesting analysis subcommand
|
||||
analysis_cmd = subparsers.add_parser('backtesting-analysis',
|
||||
help='Backtest Analysis module.',
|
||||
parents=[_common_parser])
|
||||
analysis_cmd.set_defaults(func=start_analysis_entries_exits)
|
||||
self._build_args(optionlist=ARGS_ANALYZE_ENTRIES_EXITS, parser=analysis_cmd)
|
||||
|
||||
# Add edge subcommand
|
||||
edge_cmd = subparsers.add_parser('edge', help='Edge module.',
|
||||
parents=[_common_parser, _strategy_parser])
|
||||
|
@@ -614,13 +614,44 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
"that do not contain any parameters."),
|
||||
action="store_true",
|
||||
),
|
||||
|
||||
"analysis_groups": Arg(
|
||||
"--analysis-groups",
|
||||
help=("grouping output - "
|
||||
"0: simple wins/losses by enter tag, "
|
||||
"1: by enter_tag, "
|
||||
"2: by enter_tag and exit_tag, "
|
||||
"3: by pair and enter_tag, "
|
||||
"4: by pair, enter_ and exit_tag (this can get quite large)"),
|
||||
nargs='+',
|
||||
default=['0', '1', '2'],
|
||||
choices=['0', '1', '2', '3', '4'],
|
||||
),
|
||||
"enter_reason_list": Arg(
|
||||
"--enter-reason-list",
|
||||
help=("Comma separated list of entry signals to analyse. Default: all. "
|
||||
"e.g. 'entry_tag_a,entry_tag_b'"),
|
||||
nargs='+',
|
||||
default=['all'],
|
||||
),
|
||||
"exit_reason_list": Arg(
|
||||
"--exit-reason-list",
|
||||
help=("Comma separated list of exit signals to analyse. Default: all. "
|
||||
"e.g. 'exit_tag_a,roi,stop_loss,trailing_stop_loss'"),
|
||||
nargs='+',
|
||||
default=['all'],
|
||||
),
|
||||
"indicator_list": Arg(
|
||||
"--indicator-list",
|
||||
help=("Comma separated list of indicators to analyse. "
|
||||
"e.g. 'close,rsi,bb_lowerband,profit_abs'"),
|
||||
nargs='+',
|
||||
default=[],
|
||||
),
|
||||
"freqaimodel": Arg(
|
||||
'--freqaimodel',
|
||||
help='Specify a custom freqaimodels.',
|
||||
metavar='NAME',
|
||||
),
|
||||
|
||||
"freqaimodel_path": Arg(
|
||||
'--freqaimodel-path',
|
||||
help='Specify additional lookup path for freqaimodels.',
|
||||
|
@@ -80,6 +80,12 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
data_format_trades=config['dataformat_trades'],
|
||||
)
|
||||
else:
|
||||
if not exchange._ft_has.get('ohlcv_has_history', True):
|
||||
raise OperationalException(
|
||||
f"Historic klines not available for {exchange.name}. "
|
||||
"Please use `--dl-trades` instead for this exchange "
|
||||
"(will unfortunately take a long time)."
|
||||
)
|
||||
pairs_not_available = refresh_backtest_ohlcv_data(
|
||||
exchange, pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange,
|
||||
|
@@ -19,9 +19,9 @@ def start_convert_db(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
init_db(config['db_url'], False)
|
||||
init_db(config['db_url'])
|
||||
session_target = Trade._session
|
||||
init_db(config['db_url_from'], False)
|
||||
init_db(config['db_url_from'])
|
||||
logger.info("Starting db migration.")
|
||||
|
||||
trade_count = 0
|
||||
|
@@ -24,7 +24,7 @@ def start_hyperopt_list(args: Dict[str, Any]) -> None:
|
||||
|
||||
print_colorized = config.get('print_colorized', False)
|
||||
print_json = config.get('print_json', False)
|
||||
export_csv = config.get('export_csv', None)
|
||||
export_csv = config.get('export_csv')
|
||||
no_details = config.get('hyperopt_list_no_details', False)
|
||||
no_header = False
|
||||
|
||||
|
@@ -212,7 +212,7 @@ def start_show_trades(args: Dict[str, Any]) -> None:
|
||||
raise OperationalException("--db-url is required for this command.")
|
||||
|
||||
logger.info(f'Using DB: "{parse_db_uri_for_logging(config["db_url"])}"')
|
||||
init_db(config['db_url'], clean_open_orders=False)
|
||||
init_db(config['db_url'])
|
||||
tfilter = []
|
||||
|
||||
if config.get('trade_ids'):
|
||||
|
@@ -27,7 +27,7 @@ def check_exchange(config: Dict[str, Any], check_for_bad: bool = True) -> bool:
|
||||
return True
|
||||
logger.info("Checking exchange...")
|
||||
|
||||
exchange = config.get('exchange', {}).get('name').lower()
|
||||
exchange = config.get('exchange', {}).get('name', '').lower()
|
||||
if not exchange:
|
||||
raise OperationalException(
|
||||
f'This command requires a configured exchange. You should either use '
|
||||
|
@@ -95,6 +95,8 @@ class Configuration:
|
||||
|
||||
self._process_data_options(config)
|
||||
|
||||
self._process_analyze_options(config)
|
||||
|
||||
self._process_freqai_options(config)
|
||||
|
||||
# Check if the exchange set by the user is supported
|
||||
@@ -129,7 +131,7 @@ class Configuration:
|
||||
# Default to in-memory db for dry_run if not specified
|
||||
config['db_url'] = constants.DEFAULT_DB_DRYRUN_URL
|
||||
else:
|
||||
if not config.get('db_url', None):
|
||||
if not config.get('db_url'):
|
||||
config['db_url'] = constants.DEFAULT_DB_PROD_URL
|
||||
logger.info('Dry run is disabled')
|
||||
|
||||
@@ -182,7 +184,7 @@ class Configuration:
|
||||
config['user_data_dir'] = create_userdata_dir(config['user_data_dir'], create_dir=False)
|
||||
logger.info('Using user-data directory: %s ...', config['user_data_dir'])
|
||||
|
||||
config.update({'datadir': create_datadir(config, self.args.get('datadir', None))})
|
||||
config.update({'datadir': create_datadir(config, self.args.get('datadir'))})
|
||||
logger.info('Using data directory: %s ...', config.get('datadir'))
|
||||
|
||||
if self.args.get('exportfilename'):
|
||||
@@ -221,7 +223,7 @@ class Configuration:
|
||||
if config.get('max_open_trades') == -1:
|
||||
config['max_open_trades'] = float('inf')
|
||||
|
||||
if self.args.get('stake_amount', None):
|
||||
if self.args.get('stake_amount'):
|
||||
# Convert explicitly to float to support CLI argument for both unlimited and value
|
||||
try:
|
||||
self.args['stake_amount'] = float(self.args['stake_amount'])
|
||||
@@ -435,6 +437,19 @@ class Configuration:
|
||||
self._args_to_config(config, argname='candle_types',
|
||||
logstring='Detected --candle-types: {}')
|
||||
|
||||
def _process_analyze_options(self, config: Dict[str, Any]) -> None:
|
||||
self._args_to_config(config, argname='analysis_groups',
|
||||
logstring='Analysis reason groups: {}')
|
||||
|
||||
self._args_to_config(config, argname='enter_reason_list',
|
||||
logstring='Analysis enter tag list: {}')
|
||||
|
||||
self._args_to_config(config, argname='exit_reason_list',
|
||||
logstring='Analysis exit tag list: {}')
|
||||
|
||||
self._args_to_config(config, argname='indicator_list',
|
||||
logstring='Analysis indicator list: {}')
|
||||
|
||||
def _process_runmode(self, config: Dict[str, Any]) -> None:
|
||||
|
||||
self._args_to_config(config, argname='dry_run',
|
||||
@@ -471,7 +486,7 @@ class Configuration:
|
||||
configuration instead of the content)
|
||||
"""
|
||||
if (argname in self.args and self.args[argname] is not None
|
||||
and self.args[argname] is not False):
|
||||
and self.args[argname] is not False):
|
||||
|
||||
config.update({argname: self.args[argname]})
|
||||
if logfun:
|
||||
@@ -502,7 +517,8 @@ class Configuration:
|
||||
if not pairs_file.exists():
|
||||
raise OperationalException(f'No pairs file found with path "{pairs_file}".')
|
||||
config['pairs'] = load_file(pairs_file)
|
||||
config['pairs'].sort()
|
||||
if isinstance(config['pairs'], list):
|
||||
config['pairs'].sort()
|
||||
return
|
||||
|
||||
if 'config' in self.args and self.args['config']:
|
||||
@@ -513,5 +529,5 @@ class Configuration:
|
||||
pairs_file = config['datadir'] / 'pairs.json'
|
||||
if pairs_file.exists():
|
||||
config['pairs'] = load_file(pairs_file)
|
||||
if 'pairs' in config:
|
||||
if 'pairs' in config and isinstance(config['pairs'], list):
|
||||
config['pairs'].sort()
|
||||
|
@@ -113,7 +113,7 @@ def process_temporary_deprecated_settings(config: Dict[str, Any]) -> None:
|
||||
process_removed_setting(config, 'experimental', 'ignore_roi_if_buy_signal',
|
||||
None, 'ignore_roi_if_entry_signal')
|
||||
|
||||
process_removed_setting(config, 'ask_strategy', 'use_sell_signal', None, 'exit_sell_signal')
|
||||
process_removed_setting(config, 'ask_strategy', 'use_sell_signal', None, 'use_exit_signal')
|
||||
process_removed_setting(config, 'ask_strategy', 'sell_profit_only', None, 'exit_profit_only')
|
||||
process_removed_setting(config, 'ask_strategy', 'sell_profit_offset',
|
||||
None, 'exit_profit_offset')
|
||||
|
@@ -15,7 +15,7 @@ def create_datadir(config: Dict[str, Any], datadir: Optional[str] = None) -> Pat
|
||||
folder = Path(datadir) if datadir else Path(f"{config['user_data_dir']}/data")
|
||||
if not datadir:
|
||||
# set datadir
|
||||
exchange_name = config.get('exchange', {}).get('name').lower()
|
||||
exchange_name = config.get('exchange', {}).get('name', '').lower()
|
||||
folder = folder.joinpath(exchange_name)
|
||||
|
||||
if not folder.is_dir():
|
||||
|
@@ -303,12 +303,12 @@ CONF_SCHEMA = {
|
||||
'exit_fill': {
|
||||
'type': 'string',
|
||||
'enum': TELEGRAM_SETTING_OPTIONS,
|
||||
'default': 'off'
|
||||
'default': 'on'
|
||||
},
|
||||
'protection_trigger': {
|
||||
'type': 'string',
|
||||
'enum': TELEGRAM_SETTING_OPTIONS,
|
||||
'default': 'off'
|
||||
'default': 'on'
|
||||
},
|
||||
'protection_trigger_global': {
|
||||
'type': 'string',
|
||||
@@ -337,6 +337,47 @@ CONF_SCHEMA = {
|
||||
'webhookstatus': {'type': 'object'},
|
||||
},
|
||||
},
|
||||
'discord': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'enabled': {'type': 'boolean'},
|
||||
'webhook_url': {'type': 'string'},
|
||||
"exit_fill": {
|
||||
'type': 'array', 'items': {'type': 'object'},
|
||||
'default': [
|
||||
{"Trade ID": "{trade_id}"},
|
||||
{"Exchange": "{exchange}"},
|
||||
{"Pair": "{pair}"},
|
||||
{"Direction": "{direction}"},
|
||||
{"Open rate": "{open_rate}"},
|
||||
{"Close rate": "{close_rate}"},
|
||||
{"Amount": "{amount}"},
|
||||
{"Open date": "{open_date:%Y-%m-%d %H:%M:%S}"},
|
||||
{"Close date": "{close_date:%Y-%m-%d %H:%M:%S}"},
|
||||
{"Profit": "{profit_amount} {stake_currency}"},
|
||||
{"Profitability": "{profit_ratio:.2%}"},
|
||||
{"Enter tag": "{enter_tag}"},
|
||||
{"Exit Reason": "{exit_reason}"},
|
||||
{"Strategy": "{strategy}"},
|
||||
{"Timeframe": "{timeframe}"},
|
||||
]
|
||||
},
|
||||
"entry_fill": {
|
||||
'type': 'array', 'items': {'type': 'object'},
|
||||
'default': [
|
||||
{"Trade ID": "{trade_id}"},
|
||||
{"Exchange": "{exchange}"},
|
||||
{"Pair": "{pair}"},
|
||||
{"Direction": "{direction}"},
|
||||
{"Open rate": "{open_rate}"},
|
||||
{"Amount": "{amount}"},
|
||||
{"Open date": "{open_date:%Y-%m-%d %H:%M:%S}"},
|
||||
{"Enter tag": "{enter_tag}"},
|
||||
{"Strategy": "{strategy} {timeframe}"},
|
||||
]
|
||||
},
|
||||
}
|
||||
},
|
||||
'api_server': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
|
@@ -26,7 +26,7 @@ BT_DATA_COLUMNS = ['pair', 'stake_amount', 'amount', 'open_date', 'close_date',
|
||||
'profit_ratio', 'profit_abs', 'exit_reason',
|
||||
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
|
||||
'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'enter_tag',
|
||||
'is_short'
|
||||
'is_short', 'open_timestamp', 'close_timestamp', 'orders'
|
||||
]
|
||||
|
||||
|
||||
@@ -283,6 +283,8 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
|
||||
if 'enter_tag' not in df.columns:
|
||||
df['enter_tag'] = df['buy_tag']
|
||||
df = df.drop(['buy_tag'], axis=1)
|
||||
if 'orders' not in df.columns:
|
||||
df.loc[:, 'orders'] = None
|
||||
|
||||
else:
|
||||
# old format - only with lists.
|
||||
@@ -337,7 +339,7 @@ def trade_list_to_dataframe(trades: List[LocalTrade]) -> pd.DataFrame:
|
||||
:param trades: List of trade objects
|
||||
:return: Dataframe with BT_DATA_COLUMNS
|
||||
"""
|
||||
df = pd.DataFrame.from_records([t.to_json() for t in trades], columns=BT_DATA_COLUMNS)
|
||||
df = pd.DataFrame.from_records([t.to_json(True) for t in trades], columns=BT_DATA_COLUMNS)
|
||||
if len(df) > 0:
|
||||
df.loc[:, 'close_date'] = pd.to_datetime(df['close_date'], utc=True)
|
||||
df.loc[:, 'open_date'] = pd.to_datetime(df['open_date'], utc=True)
|
||||
@@ -353,7 +355,7 @@ def load_trades_from_db(db_url: str, strategy: Optional[str] = None) -> pd.DataF
|
||||
Can also serve as protection to load the correct result.
|
||||
:return: Dataframe containing Trades
|
||||
"""
|
||||
init_db(db_url, clean_open_orders=False)
|
||||
init_db(db_url)
|
||||
|
||||
filters = []
|
||||
if strategy:
|
||||
|
227
freqtrade/data/entryexitanalysis.py
Executable file
227
freqtrade/data/entryexitanalysis.py
Executable file
@@ -0,0 +1,227 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import joblib
|
||||
import pandas as pd
|
||||
from tabulate import tabulate
|
||||
|
||||
from freqtrade.data.btanalysis import (get_latest_backtest_filename, load_backtest_data,
|
||||
load_backtest_stats)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _load_signal_candles(backtest_dir: Path):
|
||||
if backtest_dir.is_dir():
|
||||
scpf = Path(backtest_dir,
|
||||
Path(get_latest_backtest_filename(backtest_dir)).stem + "_signals.pkl"
|
||||
)
|
||||
else:
|
||||
scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_signals.pkl")
|
||||
|
||||
try:
|
||||
scp = open(scpf, "rb")
|
||||
signal_candles = joblib.load(scp)
|
||||
logger.info(f"Loaded signal candles: {str(scpf)}")
|
||||
except Exception as e:
|
||||
logger.error("Cannot load signal candles from pickled results: ", e)
|
||||
|
||||
return signal_candles
|
||||
|
||||
|
||||
def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_candles):
|
||||
analysed_trades_dict = {}
|
||||
analysed_trades_dict[strategy_name] = {}
|
||||
|
||||
try:
|
||||
logger.info(f"Processing {strategy_name} : {len(pairlist)} pairs")
|
||||
|
||||
for pair in pairlist:
|
||||
if pair in signal_candles[strategy_name]:
|
||||
analysed_trades_dict[strategy_name][pair] = _analyze_candles_and_indicators(
|
||||
pair,
|
||||
trades,
|
||||
signal_candles[strategy_name][pair])
|
||||
except Exception as e:
|
||||
print(f"Cannot process entry/exit reasons for {strategy_name}: ", e)
|
||||
|
||||
return analysed_trades_dict
|
||||
|
||||
|
||||
def _analyze_candles_and_indicators(pair, trades, signal_candles):
|
||||
buyf = signal_candles
|
||||
|
||||
if len(buyf) > 0:
|
||||
buyf = buyf.set_index('date', drop=False)
|
||||
trades_red = trades.loc[trades['pair'] == pair].copy()
|
||||
|
||||
trades_inds = pd.DataFrame()
|
||||
|
||||
if trades_red.shape[0] > 0 and buyf.shape[0] > 0:
|
||||
for t, v in trades_red.open_date.items():
|
||||
allinds = buyf.loc[(buyf['date'] < v)]
|
||||
if allinds.shape[0] > 0:
|
||||
tmp_inds = allinds.iloc[[-1]]
|
||||
|
||||
trades_red.loc[t, 'signal_date'] = tmp_inds['date'].values[0]
|
||||
trades_red.loc[t, 'enter_reason'] = trades_red.loc[t, 'enter_tag']
|
||||
tmp_inds.index.rename('signal_date', inplace=True)
|
||||
trades_inds = pd.concat([trades_inds, tmp_inds])
|
||||
|
||||
if 'signal_date' in trades_red:
|
||||
trades_red['signal_date'] = pd.to_datetime(trades_red['signal_date'], utc=True)
|
||||
trades_red.set_index('signal_date', inplace=True)
|
||||
|
||||
try:
|
||||
trades_red = pd.merge(trades_red, trades_inds, on='signal_date', how='outer')
|
||||
except Exception as e:
|
||||
raise e
|
||||
return trades_red
|
||||
else:
|
||||
return pd.DataFrame()
|
||||
|
||||
|
||||
def _do_group_table_output(bigdf, glist):
|
||||
for g in glist:
|
||||
# 0: summary wins/losses grouped by enter tag
|
||||
if g == "0":
|
||||
group_mask = ['enter_reason']
|
||||
wins = bigdf.loc[bigdf['profit_abs'] >= 0] \
|
||||
.groupby(group_mask) \
|
||||
.agg({'profit_abs': ['sum']})
|
||||
|
||||
wins.columns = ['profit_abs_wins']
|
||||
loss = bigdf.loc[bigdf['profit_abs'] < 0] \
|
||||
.groupby(group_mask) \
|
||||
.agg({'profit_abs': ['sum']})
|
||||
loss.columns = ['profit_abs_loss']
|
||||
|
||||
new = bigdf.groupby(group_mask).agg({'profit_abs': [
|
||||
'count',
|
||||
lambda x: sum(x > 0),
|
||||
lambda x: sum(x <= 0)]})
|
||||
new = pd.concat([new, wins, loss], axis=1).fillna(0)
|
||||
|
||||
new['profit_tot'] = new['profit_abs_wins'] - abs(new['profit_abs_loss'])
|
||||
new['wl_ratio_pct'] = (new.iloc[:, 1] / new.iloc[:, 0] * 100).fillna(0)
|
||||
new['avg_win'] = (new['profit_abs_wins'] / new.iloc[:, 1]).fillna(0)
|
||||
new['avg_loss'] = (new['profit_abs_loss'] / new.iloc[:, 2]).fillna(0)
|
||||
|
||||
new.columns = ['total_num_buys', 'wins', 'losses', 'profit_abs_wins', 'profit_abs_loss',
|
||||
'profit_tot', 'wl_ratio_pct', 'avg_win', 'avg_loss']
|
||||
|
||||
sortcols = ['total_num_buys']
|
||||
|
||||
_print_table(new, sortcols, show_index=True)
|
||||
|
||||
else:
|
||||
agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'],
|
||||
'profit_ratio': ['sum', 'median', 'mean']}
|
||||
agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median',
|
||||
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
|
||||
'total_profit_pct']
|
||||
sortcols = ['profit_abs_sum', 'enter_reason']
|
||||
|
||||
# 1: profit summaries grouped by enter_tag
|
||||
if g == "1":
|
||||
group_mask = ['enter_reason']
|
||||
|
||||
# 2: profit summaries grouped by enter_tag and exit_tag
|
||||
if g == "2":
|
||||
group_mask = ['enter_reason', 'exit_reason']
|
||||
|
||||
# 3: profit summaries grouped by pair and enter_tag
|
||||
if g == "3":
|
||||
group_mask = ['pair', 'enter_reason']
|
||||
|
||||
# 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
|
||||
if g == "4":
|
||||
group_mask = ['pair', 'enter_reason', 'exit_reason']
|
||||
if group_mask:
|
||||
new = bigdf.groupby(group_mask).agg(agg_mask).reset_index()
|
||||
new.columns = group_mask + agg_cols
|
||||
new['median_profit_pct'] = new['median_profit_pct'] * 100
|
||||
new['mean_profit_pct'] = new['mean_profit_pct'] * 100
|
||||
new['total_profit_pct'] = new['total_profit_pct'] * 100
|
||||
|
||||
_print_table(new, sortcols)
|
||||
else:
|
||||
logger.warning("Invalid group mask specified.")
|
||||
|
||||
|
||||
def _print_results(analysed_trades, stratname, analysis_groups,
|
||||
enter_reason_list, exit_reason_list,
|
||||
indicator_list, columns=None):
|
||||
if columns is None:
|
||||
columns = ['pair', 'open_date', 'close_date', 'profit_abs', 'enter_reason', 'exit_reason']
|
||||
|
||||
bigdf = pd.DataFrame()
|
||||
for pair, trades in analysed_trades[stratname].items():
|
||||
bigdf = pd.concat([bigdf, trades], ignore_index=True)
|
||||
|
||||
if bigdf.shape[0] > 0 and ('enter_reason' in bigdf.columns):
|
||||
if analysis_groups:
|
||||
_do_group_table_output(bigdf, analysis_groups)
|
||||
|
||||
if enter_reason_list and "all" not in enter_reason_list:
|
||||
bigdf = bigdf.loc[(bigdf['enter_reason'].isin(enter_reason_list))]
|
||||
|
||||
if exit_reason_list and "all" not in exit_reason_list:
|
||||
bigdf = bigdf.loc[(bigdf['exit_reason'].isin(exit_reason_list))]
|
||||
|
||||
if "all" in indicator_list:
|
||||
print(bigdf)
|
||||
elif indicator_list is not None:
|
||||
available_inds = []
|
||||
for ind in indicator_list:
|
||||
if ind in bigdf:
|
||||
available_inds.append(ind)
|
||||
ilist = ["pair", "enter_reason", "exit_reason"] + available_inds
|
||||
_print_table(bigdf[ilist], sortcols=['exit_reason'], show_index=False)
|
||||
else:
|
||||
print("\\_ No trades to show")
|
||||
|
||||
|
||||
def _print_table(df, sortcols=None, show_index=False):
|
||||
if (sortcols is not None):
|
||||
data = df.sort_values(sortcols)
|
||||
else:
|
||||
data = df
|
||||
|
||||
print(
|
||||
tabulate(
|
||||
data,
|
||||
headers='keys',
|
||||
tablefmt='psql',
|
||||
showindex=show_index
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def process_entry_exit_reasons(backtest_dir: Path,
|
||||
pairlist: List[str],
|
||||
analysis_groups: Optional[List[str]] = ["0", "1", "2"],
|
||||
enter_reason_list: Optional[List[str]] = ["all"],
|
||||
exit_reason_list: Optional[List[str]] = ["all"],
|
||||
indicator_list: Optional[List[str]] = []):
|
||||
try:
|
||||
backtest_stats = load_backtest_stats(backtest_dir)
|
||||
for strategy_name, results in backtest_stats['strategy'].items():
|
||||
trades = load_backtest_data(backtest_dir, strategy_name)
|
||||
|
||||
if not trades.empty:
|
||||
signal_candles = _load_signal_candles(backtest_dir)
|
||||
analysed_trades_dict = _process_candles_and_indicators(pairlist, strategy_name,
|
||||
trades, signal_candles)
|
||||
_print_results(analysed_trades_dict,
|
||||
strategy_name,
|
||||
analysis_groups,
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
indicator_list)
|
||||
|
||||
except ValueError as e:
|
||||
raise OperationalException(e) from e
|
@@ -40,7 +40,7 @@ class HDF5DataHandler(IDataHandler):
|
||||
return [
|
||||
(
|
||||
cls.rebuild_pair_from_filename(match[1]),
|
||||
match[2],
|
||||
cls.rebuild_timeframe_from_filename(match[2]),
|
||||
CandleType.from_string(match[3])
|
||||
) for match in _tmp if match and len(match.groups()) > 1]
|
||||
|
||||
@@ -109,7 +109,11 @@ class HDF5DataHandler(IDataHandler):
|
||||
)
|
||||
|
||||
if not filename.exists():
|
||||
return pd.DataFrame(columns=self._columns)
|
||||
# Fallback mode for 1M files
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
|
||||
if not filename.exists():
|
||||
return pd.DataFrame(columns=self._columns)
|
||||
where = []
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
|
@@ -68,7 +68,8 @@ def load_data(datadir: Path,
|
||||
startup_candles: int = 0,
|
||||
fail_without_data: bool = False,
|
||||
data_format: str = 'json',
|
||||
candle_type: CandleType = CandleType.SPOT
|
||||
candle_type: CandleType = CandleType.SPOT,
|
||||
user_futures_funding_rate: int = None,
|
||||
) -> Dict[str, DataFrame]:
|
||||
"""
|
||||
Load ohlcv history data for a list of pairs.
|
||||
@@ -100,6 +101,10 @@ def load_data(datadir: Path,
|
||||
)
|
||||
if not hist.empty:
|
||||
result[pair] = hist
|
||||
else:
|
||||
if candle_type is CandleType.FUNDING_RATE and user_futures_funding_rate is not None:
|
||||
logger.warn(f"{pair} using user specified [{user_futures_funding_rate}]")
|
||||
result[pair] = DataFrame(columns=["open", "close", "high", "low", "volume"])
|
||||
|
||||
if fail_without_data and not result:
|
||||
raise OperationalException("No data found. Terminating.")
|
||||
@@ -216,7 +221,7 @@ def _download_pair_history(pair: str, *,
|
||||
prepend=prepend)
|
||||
|
||||
logger.info(f'({process}) - Download history data for "{pair}", {timeframe}, '
|
||||
f'{candle_type} and store in {datadir}.'
|
||||
f'{candle_type} and store in {datadir}. '
|
||||
f'From {format_ms_time(since_ms) if since_ms else "start"} to '
|
||||
f'{format_ms_time(until_ms) if until_ms else "now"}'
|
||||
)
|
||||
@@ -277,6 +282,7 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
||||
pairs_not_available = []
|
||||
data_handler = get_datahandler(datadir, data_format)
|
||||
candle_type = CandleType.get_default(trading_mode)
|
||||
process = ''
|
||||
for idx, pair in enumerate(pairs, start=1):
|
||||
if pair not in exchange.markets:
|
||||
pairs_not_available.append(pair)
|
||||
|
@@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class IDataHandler(ABC):
|
||||
|
||||
_OHLCV_REGEX = r'^([a-zA-Z_-]+)\-(\d+\S)\-?([a-zA-Z_]*)?(?=\.)'
|
||||
_OHLCV_REGEX = r'^([a-zA-Z_-]+)\-(\d+[a-zA-Z]{1,2})\-?([a-zA-Z_]*)?(?=\.)'
|
||||
|
||||
def __init__(self, datadir: Path) -> None:
|
||||
self._datadir = datadir
|
||||
@@ -193,10 +193,14 @@ class IDataHandler(ABC):
|
||||
datadir: Path,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
candle_type: CandleType
|
||||
candle_type: CandleType,
|
||||
no_timeframe_modify: bool = False
|
||||
) -> Path:
|
||||
pair_s = misc.pair_to_filename(pair)
|
||||
candle = ""
|
||||
if not no_timeframe_modify:
|
||||
timeframe = cls.timeframe_to_file(timeframe)
|
||||
|
||||
if candle_type != CandleType.SPOT:
|
||||
datadir = datadir.joinpath('futures')
|
||||
candle = f"-{candle_type}"
|
||||
@@ -210,6 +214,18 @@ class IDataHandler(ABC):
|
||||
filename = datadir.joinpath(f'{pair_s}-trades.{cls._get_file_extension()}')
|
||||
return filename
|
||||
|
||||
@staticmethod
|
||||
def timeframe_to_file(timeframe: str):
|
||||
return timeframe.replace('M', 'Mo')
|
||||
|
||||
@staticmethod
|
||||
def rebuild_timeframe_from_filename(timeframe: str) -> str:
|
||||
"""
|
||||
converts timeframe from disk to file
|
||||
Replaces mo with M (to avoid problems on case-insensitive filesystems)
|
||||
"""
|
||||
return re.sub('1mo', '1M', timeframe, flags=re.IGNORECASE)
|
||||
|
||||
@staticmethod
|
||||
def rebuild_pair_from_filename(pair: str) -> str:
|
||||
"""
|
||||
|
@@ -41,7 +41,7 @@ class JsonDataHandler(IDataHandler):
|
||||
return [
|
||||
(
|
||||
cls.rebuild_pair_from_filename(match[1]),
|
||||
match[2],
|
||||
cls.rebuild_timeframe_from_filename(match[2]),
|
||||
CandleType.from_string(match[3])
|
||||
) for match in _tmp if match and len(match.groups()) > 1]
|
||||
|
||||
@@ -103,9 +103,14 @@ class JsonDataHandler(IDataHandler):
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
if not filename.exists():
|
||||
return DataFrame(columns=self._columns)
|
||||
# Fallback mode for 1M files
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
|
||||
if not filename.exists():
|
||||
return DataFrame(columns=self._columns)
|
||||
try:
|
||||
pairdata = read_json(filename, orient='values')
|
||||
pairdata.columns = self._columns
|
||||
|
@@ -15,3 +15,9 @@ class ExitCheckTuple:
|
||||
@property
|
||||
def exit_flag(self):
|
||||
return self.exit_type != ExitType.NONE
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.exit_type == other.exit_type and self.exit_reason == other.exit_reason
|
||||
|
||||
def __repr__(self):
|
||||
return f"ExitCheckTuple({self.exit_type}, {self.exit_reason})"
|
||||
|
@@ -52,12 +52,17 @@ class Binance(Exchange):
|
||||
|
||||
ordertype = 'stop' if self.trading_mode == TradingMode.FUTURES else 'stop_loss_limit'
|
||||
|
||||
return order['type'] == ordertype and (
|
||||
(side == "sell" and stop_loss > float(order['info']['stopPrice'])) or
|
||||
(side == "buy" and stop_loss < float(order['info']['stopPrice']))
|
||||
)
|
||||
return (
|
||||
order.get('stopPrice', None) is None
|
||||
or (
|
||||
order['type'] == ordertype
|
||||
and (
|
||||
(side == "sell" and stop_loss > float(order['stopPrice'])) or
|
||||
(side == "buy" and stop_loss < float(order['stopPrice']))
|
||||
)
|
||||
))
|
||||
|
||||
def get_tickers(self, symbols: List[str] = None, cached: bool = False) -> Dict:
|
||||
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
|
||||
tickers = super().get_tickers(symbols=symbols, cached=cached)
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
# Binance's future result has no bid/ask values.
|
||||
@@ -95,7 +100,7 @@ class Binance(Exchange):
|
||||
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||
since_ms: int, candle_type: CandleType,
|
||||
is_new_pair: bool = False, raise_: bool = False,
|
||||
until_ms: int = None
|
||||
until_ms: Optional[int] = None
|
||||
) -> Tuple[str, str, str, List]:
|
||||
"""
|
||||
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -29,3 +29,17 @@ class Bybit(Exchange):
|
||||
# (TradingMode.FUTURES, MarginMode.CROSS),
|
||||
# (TradingMode.FUTURES, MarginMode.ISOLATED)
|
||||
]
|
||||
|
||||
@property
|
||||
def _ccxt_config(self) -> Dict:
|
||||
# Parameters to add directly to ccxt sync/async initialization.
|
||||
# ccxt defaults to swap mode.
|
||||
config = {}
|
||||
if self.trading_mode == TradingMode.SPOT:
|
||||
config.update({
|
||||
"options": {
|
||||
"defaultType": "spot"
|
||||
}
|
||||
})
|
||||
config.update(super()._ccxt_config)
|
||||
return config
|
||||
|
@@ -2,6 +2,7 @@ import asyncio
|
||||
import logging
|
||||
import time
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, Optional, TypeVar, cast, overload
|
||||
|
||||
from freqtrade.exceptions import DDosProtection, RetryableOrderError, TemporaryError
|
||||
from freqtrade.mixins import LoggingMixin
|
||||
@@ -11,6 +12,14 @@ logger = logging.getLogger(__name__)
|
||||
__logging_mixin = None
|
||||
|
||||
|
||||
def _reset_logging_mixin():
|
||||
"""
|
||||
Reset global logging mixin - used in tests only.
|
||||
"""
|
||||
global __logging_mixin
|
||||
__logging_mixin = LoggingMixin(logger)
|
||||
|
||||
|
||||
def _get_logging_mixin():
|
||||
# Logging-mixin to cache kucoin responses
|
||||
# Only to be used in retrier
|
||||
@@ -133,8 +142,22 @@ def retrier_async(f):
|
||||
return wrapper
|
||||
|
||||
|
||||
def retrier(_func=None, retries=API_RETRY_COUNT):
|
||||
def decorator(f):
|
||||
F = TypeVar('F', bound=Callable[..., Any])
|
||||
|
||||
|
||||
# Type shenanigans
|
||||
@overload
|
||||
def retrier(_func: F) -> F:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def retrier(*, retries=API_RETRY_COUNT) -> Callable[[F], F]:
|
||||
...
|
||||
|
||||
|
||||
def retrier(_func: Optional[F] = None, *, retries=API_RETRY_COUNT):
|
||||
def decorator(f: F) -> F:
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
count = kwargs.pop('count', retries)
|
||||
@@ -155,7 +178,7 @@ def retrier(_func=None, retries=API_RETRY_COUNT):
|
||||
else:
|
||||
logger.warning(msg + 'Giving up.')
|
||||
raise ex
|
||||
return wrapper
|
||||
return cast(F, wrapper)
|
||||
# Support both @retrier and @retrier(retries=2) syntax
|
||||
if _func is None:
|
||||
return decorator
|
||||
|
@@ -16,8 +16,7 @@ import arrow
|
||||
import ccxt
|
||||
import ccxt.async_support as ccxt_async
|
||||
from cachetools import TTLCache
|
||||
from ccxt.base.decimal_to_precision import (ROUND_DOWN, ROUND_UP, TICK_SIZE, TRUNCATE,
|
||||
decimal_to_precision)
|
||||
from ccxt import ROUND_DOWN, ROUND_UP, TICK_SIZE, TRUNCATE, Precise, decimal_to_precision
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.constants import (DEFAULT_AMOUNT_RESERVE_PERCENT, NON_OPEN_EXCHANGE_STATES, BuySell,
|
||||
@@ -64,6 +63,7 @@ class Exchange:
|
||||
"time_in_force_parameter": "timeInForce",
|
||||
"ohlcv_params": {},
|
||||
"ohlcv_candle_limit": 500,
|
||||
"ohlcv_has_history": True, # Some exchanges (Kraken) don't provide history via ohlcv
|
||||
"ohlcv_partial_candle": True,
|
||||
"ohlcv_require_since": False,
|
||||
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
|
||||
@@ -92,7 +92,7 @@ class Exchange:
|
||||
it does basic validation whether the specified exchange and pairs are valid.
|
||||
:return: None
|
||||
"""
|
||||
self._api: ccxt.Exchange = None
|
||||
self._api: ccxt.Exchange
|
||||
self._api_async: ccxt_async.Exchange = None
|
||||
self._markets: Dict = {}
|
||||
self._trading_fees: Dict[str, Any] = {}
|
||||
@@ -291,7 +291,7 @@ class Exchange:
|
||||
return self._markets
|
||||
|
||||
@property
|
||||
def precisionMode(self) -> str:
|
||||
def precisionMode(self) -> int:
|
||||
"""exchange ccxt precisionMode"""
|
||||
return self._api.precisionMode
|
||||
|
||||
@@ -308,18 +308,21 @@ class Exchange:
|
||||
if self.log_responses:
|
||||
logger.info(f"API {endpoint}: {response}")
|
||||
|
||||
def ohlcv_candle_limit(self, timeframe: str) -> int:
|
||||
def ohlcv_candle_limit(
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None) -> int:
|
||||
"""
|
||||
Exchange ohlcv candle limit
|
||||
Uses ohlcv_candle_limit_per_timeframe if the exchange has different limits
|
||||
per timeframe (e.g. bittrex), otherwise falls back to ohlcv_candle_limit
|
||||
:param timeframe: Timeframe to check
|
||||
:param candle_type: Candle-type
|
||||
:param since_ms: Starting timestamp
|
||||
:return: Candle limit as integer
|
||||
"""
|
||||
return int(self._ft_has.get('ohlcv_candle_limit_per_timeframe', {}).get(
|
||||
timeframe, self._ft_has.get('ohlcv_candle_limit')))
|
||||
|
||||
def get_markets(self, base_currencies: List[str] = None, quote_currencies: List[str] = None,
|
||||
def get_markets(self, base_currencies: List[str] = [], quote_currencies: List[str] = [],
|
||||
spot_only: bool = False, margin_only: bool = False, futures_only: bool = False,
|
||||
tradable_only: bool = True,
|
||||
active_only: bool = False) -> Dict[str, Any]:
|
||||
@@ -384,7 +387,7 @@ class Exchange:
|
||||
and market.get('base', None) is not None
|
||||
and (self.precisionMode != TICK_SIZE
|
||||
# Too low precision will falsify calculations
|
||||
or market.get('precision', {}).get('price', None) > 1e-11)
|
||||
or market.get('precision', {}).get('price') > 1e-11)
|
||||
and ((self.trading_mode == TradingMode.SPOT and self.market_is_spot(market))
|
||||
or (self.trading_mode == TradingMode.MARGIN and self.market_is_margin(market))
|
||||
or (self.trading_mode == TradingMode.FUTURES and self.market_is_future(market)))
|
||||
@@ -534,7 +537,7 @@ class Exchange:
|
||||
# The internal info array is different for each particular market,
|
||||
# its contents depend on the exchange.
|
||||
# It can also be a string or similar ... so we need to verify that first.
|
||||
elif (isinstance(self.markets[pair].get('info', None), dict)
|
||||
elif (isinstance(self.markets[pair].get('info'), dict)
|
||||
and self.markets[pair].get('info', {}).get('prohibitedIn', False)):
|
||||
# Warn users about restricted pairs in whitelist.
|
||||
# We cannot determine reliably if Users are affected.
|
||||
@@ -615,19 +618,28 @@ class Exchange:
|
||||
Checks if required startup_candles is more than ohlcv_candle_limit().
|
||||
Requires a grace-period of 5 candles - so a startup-period up to 494 is allowed by default.
|
||||
"""
|
||||
candle_limit = self.ohlcv_candle_limit(timeframe)
|
||||
|
||||
candle_limit = self.ohlcv_candle_limit(
|
||||
timeframe, self._config['candle_type_def'],
|
||||
int(date_minus_candles(timeframe, startup_candles).timestamp() * 1000)
|
||||
if timeframe else None)
|
||||
# Require one more candle - to account for the still open candle.
|
||||
candle_count = startup_candles + 1
|
||||
# Allow 5 calls to the exchange per pair
|
||||
required_candle_call_count = int(
|
||||
(candle_count / candle_limit) + (0 if candle_count % candle_limit == 0 else 1))
|
||||
if self._ft_has['ohlcv_has_history']:
|
||||
|
||||
if required_candle_call_count > 5:
|
||||
# Only allow 5 calls per pair to somewhat limit the impact
|
||||
if required_candle_call_count > 5:
|
||||
# Only allow 5 calls per pair to somewhat limit the impact
|
||||
raise OperationalException(
|
||||
f"This strategy requires {startup_candles} candles to start, "
|
||||
"which is more than 5x "
|
||||
f"the amount of candles {self.name} provides for {timeframe}.")
|
||||
elif required_candle_call_count > 1:
|
||||
raise OperationalException(
|
||||
f"This strategy requires {startup_candles} candles to start, which is more than 5x "
|
||||
f"This strategy requires {startup_candles} candles to start, which is more than "
|
||||
f"the amount of candles {self.name} provides for {timeframe}.")
|
||||
|
||||
if required_candle_call_count > 1:
|
||||
logger.warning(f"Using {required_candle_call_count} calls to get OHLCV. "
|
||||
f"This can result in slower operations for the bot. Please check "
|
||||
@@ -691,10 +703,11 @@ class Exchange:
|
||||
# counting_mode=self.precisionMode,
|
||||
# ))
|
||||
if self.precisionMode == TICK_SIZE:
|
||||
precision = self.markets[pair]['precision']['price']
|
||||
missing = price % precision
|
||||
if missing != 0:
|
||||
price = round(price - missing + precision, 10)
|
||||
precision = Precise(str(self.markets[pair]['precision']['price']))
|
||||
price_str = Precise(str(price))
|
||||
missing = price_str % precision
|
||||
if not missing == Precise("0"):
|
||||
price = round(float(str(price_str - missing + precision)), 14)
|
||||
else:
|
||||
symbol_prec = self.markets[pair]['precision']['price']
|
||||
big_price = price * pow(10, symbol_prec)
|
||||
@@ -940,6 +953,12 @@ class Exchange:
|
||||
order = self.check_dry_limit_order_filled(order)
|
||||
return order
|
||||
except KeyError as e:
|
||||
from freqtrade.persistence import Order
|
||||
order = Order.order_by_id(order_id)
|
||||
if order:
|
||||
ccxt_order = order.to_ccxt_object()
|
||||
self._dry_run_open_orders[order_id] = ccxt_order
|
||||
return ccxt_order
|
||||
# Gracefully handle errors with dry-run orders.
|
||||
raise InvalidOrderException(
|
||||
f'Tried to get an invalid dry-run-order (id: {order_id}). Message: {e}') from e
|
||||
@@ -1145,7 +1164,7 @@ class Exchange:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
|
||||
def fetch_order(self, order_id: str, pair: str, params={}) -> Dict:
|
||||
def fetch_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
if self._config['dry_run']:
|
||||
return self.fetch_dry_run_order(order_id)
|
||||
try:
|
||||
@@ -1167,8 +1186,8 @@ class Exchange:
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
# Assign method to fetch_stoploss_order to allow easy overriding in other classes
|
||||
fetch_stoploss_order = fetch_order
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
return self.fetch_order(order_id, pair, params)
|
||||
|
||||
def fetch_order_or_stoploss_order(self, order_id: str, pair: str,
|
||||
stoploss_order: bool = False) -> Dict:
|
||||
@@ -1193,7 +1212,7 @@ class Exchange:
|
||||
and order.get('filled') == 0.0)
|
||||
|
||||
@retrier
|
||||
def cancel_order(self, order_id: str, pair: str, params={}) -> Dict:
|
||||
def cancel_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
if self._config['dry_run']:
|
||||
try:
|
||||
order = self.fetch_dry_run_order(order_id)
|
||||
@@ -1219,8 +1238,8 @@ class Exchange:
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
# Assign method to cancel_stoploss_order to allow easy overriding in other classes
|
||||
cancel_stoploss_order = cancel_order
|
||||
def cancel_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
return self.cancel_order(order_id, pair, params)
|
||||
|
||||
def is_cancel_order_result_suitable(self, corder) -> bool:
|
||||
if not isinstance(corder, dict):
|
||||
@@ -1332,7 +1351,7 @@ class Exchange:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def fetch_bids_asks(self, symbols: List[str] = None, cached: bool = False) -> Dict:
|
||||
def fetch_bids_asks(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
|
||||
"""
|
||||
:param cached: Allow cached result
|
||||
:return: fetch_tickers result
|
||||
@@ -1360,7 +1379,7 @@ class Exchange:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def get_tickers(self, symbols: List[str] = None, cached: bool = False) -> Dict:
|
||||
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
|
||||
"""
|
||||
:param cached: Allow cached result
|
||||
:return: fetch_tickers result
|
||||
@@ -1444,6 +1463,23 @@ class Exchange:
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def _get_price_side(self, side: str, is_short: bool, conf_strategy: Dict) -> str:
|
||||
price_side = conf_strategy['price_side']
|
||||
|
||||
if price_side in ('same', 'other'):
|
||||
price_map = {
|
||||
('entry', 'long', 'same'): 'bid',
|
||||
('entry', 'long', 'other'): 'ask',
|
||||
('entry', 'short', 'same'): 'ask',
|
||||
('entry', 'short', 'other'): 'bid',
|
||||
('exit', 'long', 'same'): 'ask',
|
||||
('exit', 'long', 'other'): 'bid',
|
||||
('exit', 'short', 'same'): 'bid',
|
||||
('exit', 'short', 'other'): 'ask',
|
||||
}
|
||||
price_side = price_map[(side, 'short' if is_short else 'long', price_side)]
|
||||
return price_side
|
||||
|
||||
def get_rate(self, pair: str, refresh: bool,
|
||||
side: EntryExit, is_short: bool) -> float:
|
||||
"""
|
||||
@@ -1470,20 +1506,7 @@ class Exchange:
|
||||
|
||||
conf_strategy = self._config.get(strat_name, {})
|
||||
|
||||
price_side = conf_strategy['price_side']
|
||||
|
||||
if price_side in ('same', 'other'):
|
||||
price_map = {
|
||||
('entry', 'long', 'same'): 'bid',
|
||||
('entry', 'long', 'other'): 'ask',
|
||||
('entry', 'short', 'same'): 'ask',
|
||||
('entry', 'short', 'other'): 'bid',
|
||||
('exit', 'long', 'same'): 'ask',
|
||||
('exit', 'long', 'other'): 'bid',
|
||||
('exit', 'short', 'same'): 'bid',
|
||||
('exit', 'short', 'other'): 'ask',
|
||||
}
|
||||
price_side = price_map[(side, 'short' if is_short else 'long', price_side)]
|
||||
price_side = self._get_price_side(side, is_short, conf_strategy)
|
||||
|
||||
price_side_word = price_side.capitalize()
|
||||
|
||||
@@ -1695,7 +1718,7 @@ class Exchange:
|
||||
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||
since_ms: int, candle_type: CandleType,
|
||||
is_new_pair: bool = False, raise_: bool = False,
|
||||
until_ms: int = None
|
||||
until_ms: Optional[int] = None
|
||||
) -> Tuple[str, str, str, List]:
|
||||
"""
|
||||
Download historic ohlcv
|
||||
@@ -1703,7 +1726,8 @@ class Exchange:
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
"""
|
||||
|
||||
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(timeframe)
|
||||
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(
|
||||
timeframe, candle_type, since_ms)
|
||||
logger.debug(
|
||||
"one_call: %s msecs (%s)",
|
||||
one_call,
|
||||
@@ -1739,7 +1763,8 @@ class Exchange:
|
||||
if (not since_ms
|
||||
and (self._ft_has["ohlcv_require_since"] or self.required_candle_call_count > 1)):
|
||||
# Multiple calls for one pair - to get more history
|
||||
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(timeframe)
|
||||
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(
|
||||
timeframe, candle_type, since_ms)
|
||||
move_to = one_call * self.required_candle_call_count
|
||||
now = timeframe_to_next_date(timeframe)
|
||||
since_ms = int((now - timedelta(seconds=move_to // 1000)).timestamp() * 1000)
|
||||
@@ -1754,7 +1779,7 @@ class Exchange:
|
||||
|
||||
def refresh_latest_ohlcv(self, pair_list: ListPairsWithTimeframes, *,
|
||||
since_ms: Optional[int] = None, cache: bool = True,
|
||||
drop_incomplete: bool = None
|
||||
drop_incomplete: Optional[bool] = None
|
||||
) -> Dict[PairWithTimeframe, DataFrame]:
|
||||
"""
|
||||
Refresh in-memory OHLCV asynchronously and set `_klines` with the result
|
||||
@@ -1857,7 +1882,9 @@ class Exchange:
|
||||
pair, timeframe, since_ms, s
|
||||
)
|
||||
params = deepcopy(self._ft_has.get('ohlcv_params', {}))
|
||||
candle_limit = self.ohlcv_candle_limit(timeframe)
|
||||
candle_limit = self.ohlcv_candle_limit(
|
||||
timeframe, candle_type=candle_type, since_ms=since_ms)
|
||||
|
||||
if candle_type != CandleType.SPOT:
|
||||
params.update({'price': candle_type})
|
||||
if candle_type != CandleType.FUNDING_RATE:
|
||||
@@ -2104,10 +2131,11 @@ class Exchange:
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def get_market_leverage_tiers(self, symbol) -> List[Dict]:
|
||||
@retrier_async
|
||||
async def get_market_leverage_tiers(self, symbol: str) -> Tuple[str, List[Dict]]:
|
||||
try:
|
||||
return self._api.fetch_market_leverage_tiers(symbol)
|
||||
tier = await self._api_async.fetch_market_leverage_tiers(symbol)
|
||||
return symbol, tier
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
@@ -2141,8 +2169,14 @@ class Exchange:
|
||||
f"Initializing leverage_tiers for {len(symbols)} markets. "
|
||||
"This will take about a minute.")
|
||||
|
||||
for symbol in sorted(symbols):
|
||||
tiers[symbol] = self.get_market_leverage_tiers(symbol)
|
||||
coros = [self.get_market_leverage_tiers(symbol) for symbol in sorted(symbols)]
|
||||
|
||||
for input_coro in chunks(coros, 100):
|
||||
|
||||
results = self.loop.run_until_complete(
|
||||
asyncio.gather(*input_coro, return_exceptions=True))
|
||||
for symbol, res in results:
|
||||
tiers[symbol] = res
|
||||
|
||||
logger.info(f"Done initializing {len(symbols)} markets.")
|
||||
|
||||
@@ -2392,14 +2426,35 @@ class Exchange:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def combine_funding_and_mark(funding_rates: DataFrame, mark_rates: DataFrame) -> DataFrame:
|
||||
def combine_funding_and_mark(funding_rates: DataFrame, mark_rates: DataFrame,
|
||||
futures_funding_rate: Optional[int] = None) -> DataFrame:
|
||||
"""
|
||||
Combine funding-rates and mark-rates dataframes
|
||||
:param funding_rates: Dataframe containing Funding rates (Type FUNDING_RATE)
|
||||
:param mark_rates: Dataframe containing Mark rates (Type mark_ohlcv_price)
|
||||
:param futures_funding_rate: Fake funding rate to use if funding_rates are not available
|
||||
"""
|
||||
if futures_funding_rate is None:
|
||||
return mark_rates.merge(
|
||||
funding_rates, on='date', how="inner", suffixes=["_mark", "_fund"])
|
||||
else:
|
||||
if len(funding_rates) == 0:
|
||||
# No funding rate candles - full fillup with fallback variable
|
||||
mark_rates['open_fund'] = futures_funding_rate
|
||||
return mark_rates.rename(
|
||||
columns={'open': 'open_mark',
|
||||
'close': 'close_mark',
|
||||
'high': 'high_mark',
|
||||
'low': 'low_mark',
|
||||
'volume': 'volume_mark'})
|
||||
|
||||
return funding_rates.merge(mark_rates, on='date', how="inner", suffixes=["_fund", "_mark"])
|
||||
else:
|
||||
# Fill up missing funding_rate candles with fallback value
|
||||
combined = mark_rates.merge(
|
||||
funding_rates, on='date', how="outer", suffixes=["_mark", "_fund"]
|
||||
)
|
||||
combined['open_fund'] = combined['open_fund'].fillna(futures_funding_rate)
|
||||
return combined
|
||||
|
||||
def calculate_funding_fees(
|
||||
self,
|
||||
@@ -2674,9 +2729,10 @@ def timeframe_to_msecs(timeframe: str) -> int:
|
||||
|
||||
def timeframe_to_prev_date(timeframe: str, date: datetime = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine last possible candle.
|
||||
Use Timeframe and determine the candle start date for this date.
|
||||
Does not round when given a candle start date.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
:param date: date to use. Defaults to utcnow()
|
||||
:param date: date to use. Defaults to now(utc)
|
||||
:returns: date of previous candle (with utc timezone)
|
||||
"""
|
||||
if not date:
|
||||
@@ -2691,7 +2747,7 @@ def timeframe_to_next_date(timeframe: str, date: datetime = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine next candle.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
:param date: date to use. Defaults to utcnow()
|
||||
:param date: date to use. Defaults to now(utc)
|
||||
:returns: date of next candle (with utc timezone)
|
||||
"""
|
||||
if not date:
|
||||
@@ -2701,6 +2757,23 @@ def timeframe_to_next_date(timeframe: str, date: datetime = None) -> datetime:
|
||||
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
||||
|
||||
|
||||
def date_minus_candles(
|
||||
timeframe: str, candle_count: int, date: Optional[datetime] = None) -> datetime:
|
||||
"""
|
||||
subtract X candles from a date.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
:param candle_count: Amount of candles to subtract.
|
||||
:param date: date to use. Defaults to now(utc)
|
||||
|
||||
"""
|
||||
if not date:
|
||||
date = datetime.now(timezone.utc)
|
||||
|
||||
tf_min = timeframe_to_minutes(timeframe)
|
||||
new_date = timeframe_to_prev_date(timeframe, date) - timedelta(minutes=tf_min * candle_count)
|
||||
return new_date
|
||||
|
||||
|
||||
def market_is_active(market: Dict) -> bool:
|
||||
"""
|
||||
Return True if the market is active.
|
||||
|
@@ -104,7 +104,7 @@ class Ftx(Exchange):
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str) -> Dict:
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
if self._config['dry_run']:
|
||||
return self.fetch_dry_run_order(order_id)
|
||||
|
||||
@@ -145,7 +145,7 @@ class Ftx(Exchange):
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def cancel_stoploss_order(self, order_id: str, pair: str) -> Dict:
|
||||
def cancel_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
if self._config['dry_run']:
|
||||
return {}
|
||||
try:
|
||||
|
@@ -3,6 +3,7 @@ import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import MarginMode, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import Exchange
|
||||
@@ -24,6 +25,8 @@ class Gateio(Exchange):
|
||||
_ft_has: Dict = {
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"ohlcv_volume_currency": "quote",
|
||||
"time_in_force_parameter": "timeInForce",
|
||||
"order_time_in_force": ['gtc', 'ioc'],
|
||||
"stoploss_order_types": {"limit": "limit"},
|
||||
"stoploss_on_exchange": True,
|
||||
}
|
||||
@@ -40,13 +43,33 @@ class Gateio(Exchange):
|
||||
]
|
||||
|
||||
def validate_ordertypes(self, order_types: Dict) -> None:
|
||||
super().validate_ordertypes(order_types)
|
||||
|
||||
if self.trading_mode != TradingMode.FUTURES:
|
||||
if any(v == 'market' for k, v in order_types.items()):
|
||||
raise OperationalException(
|
||||
f'Exchange {self.name} does not support market orders.')
|
||||
|
||||
def _get_params(
|
||||
self,
|
||||
side: BuySell,
|
||||
ordertype: str,
|
||||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = 'gtc',
|
||||
) -> Dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
ordertype=ordertype,
|
||||
leverage=leverage,
|
||||
reduceOnly=reduceOnly,
|
||||
time_in_force=time_in_force,
|
||||
)
|
||||
if ordertype == 'market' and self.trading_mode == TradingMode.FUTURES:
|
||||
params['type'] = 'market'
|
||||
param = self._ft_has.get('time_in_force_parameter', '')
|
||||
params.update({param: 'ioc'})
|
||||
return params
|
||||
|
||||
def get_trades_for_order(self, order_id: str, pair: str, since: datetime,
|
||||
params: Optional[Dict] = None) -> List:
|
||||
trades = super().get_trades_for_order(order_id, pair, since, params)
|
||||
@@ -61,7 +84,8 @@ class Gateio(Exchange):
|
||||
pair_fees = self._trading_fees.get(pair, {})
|
||||
if pair_fees:
|
||||
for idx, trade in enumerate(trades):
|
||||
if trade.get('fee', {}).get('cost') is None:
|
||||
fee = trade.get('fee', {})
|
||||
if fee and fee.get('cost') is None:
|
||||
takerOrMaker = trade.get('takerOrMaker', 'taker')
|
||||
if pair_fees.get(takerOrMaker) is not None:
|
||||
trades[idx]['fee'] = {
|
||||
@@ -71,14 +95,14 @@ class Gateio(Exchange):
|
||||
}
|
||||
return trades
|
||||
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params={}) -> Dict:
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
return self.fetch_order(
|
||||
order_id=order_id,
|
||||
pair=pair,
|
||||
params={'stop': True}
|
||||
)
|
||||
|
||||
def cancel_stoploss_order(self, order_id: str, pair: str, params={}) -> Dict:
|
||||
def cancel_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
return self.cancel_order(
|
||||
order_id=order_id,
|
||||
pair=pair,
|
||||
@@ -90,5 +114,7 @@ class Gateio(Exchange):
|
||||
Verify stop_loss against stoploss-order value (limit or price)
|
||||
Returns True if adjustment is necessary.
|
||||
"""
|
||||
return ((side == "sell" and stop_loss > float(order['stopPrice'])) or
|
||||
(side == "buy" and stop_loss < float(order['stopPrice'])))
|
||||
return (order.get('stopPrice', None) is None or (
|
||||
side == "sell" and stop_loss > float(order['stopPrice'])) or
|
||||
(side == "buy" and stop_loss < float(order['stopPrice']))
|
||||
)
|
||||
|
@@ -27,7 +27,13 @@ class Huobi(Exchange):
|
||||
Verify stop_loss against stoploss-order value (limit or price)
|
||||
Returns True if adjustment is necessary.
|
||||
"""
|
||||
return order['type'] == 'stop' and stop_loss > float(order['stopPrice'])
|
||||
return (
|
||||
order.get('stopPrice', None) is None
|
||||
or (
|
||||
order['type'] == 'stop'
|
||||
and stop_loss > float(order['stopPrice'])
|
||||
)
|
||||
)
|
||||
|
||||
def _get_stop_params(self, ordertype: str, stop_price: float) -> Dict:
|
||||
|
||||
|
@@ -23,6 +23,7 @@ class Kraken(Exchange):
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": True,
|
||||
"ohlcv_candle_limit": 720,
|
||||
"ohlcv_has_history": False,
|
||||
"trades_pagination": "id",
|
||||
"trades_pagination_arg": "since",
|
||||
"mark_ohlcv_timeframe": "4h",
|
||||
@@ -44,7 +45,7 @@ class Kraken(Exchange):
|
||||
return (parent_check and
|
||||
market.get('darkpool', False) is False)
|
||||
|
||||
def get_tickers(self, symbols: List[str] = None, cached: bool = False) -> Dict:
|
||||
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
|
||||
# Only fetch tickers for current stake currency
|
||||
# Otherwise the request for kraken becomes too large.
|
||||
symbols = list(self.get_markets(quote_currencies=[self._config['stake_currency']]))
|
||||
|
@@ -33,7 +33,10 @@ class Kucoin(Exchange):
|
||||
Verify stop_loss against stoploss-order value (limit or price)
|
||||
Returns True if adjustment is necessary.
|
||||
"""
|
||||
return order['info'].get('stop') is not None and stop_loss > float(order['stopPrice'])
|
||||
return (
|
||||
order.get('stopPrice', None) is None
|
||||
or stop_loss > float(order['stopPrice'])
|
||||
)
|
||||
|
||||
def _get_stop_params(self, ordertype: str, stop_price: float) -> Dict:
|
||||
|
||||
|
@@ -1,13 +1,15 @@
|
||||
import logging
|
||||
from typing import Dict, List, Tuple
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import ccxt
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import MarginMode, TradingMode
|
||||
from freqtrade.enums.candletype import CandleType
|
||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.exchange.exchange import date_minus_candles
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -20,7 +22,7 @@ class Okx(Exchange):
|
||||
"""
|
||||
|
||||
_ft_has: Dict = {
|
||||
"ohlcv_candle_limit": 100,
|
||||
"ohlcv_candle_limit": 100, # Warning, special case with data prior to X months
|
||||
"mark_ohlcv_timeframe": "4h",
|
||||
"funding_fee_timeframe": "8h",
|
||||
}
|
||||
@@ -37,6 +39,27 @@ class Okx(Exchange):
|
||||
|
||||
net_only = True
|
||||
|
||||
def ohlcv_candle_limit(
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None) -> int:
|
||||
"""
|
||||
Exchange ohlcv candle limit
|
||||
OKX has the following behaviour:
|
||||
* 300 candles for uptodate data
|
||||
* 100 candles for historic data
|
||||
* 100 candles for additional candles (not futures or spot).
|
||||
:param timeframe: Timeframe to check
|
||||
:param candle_type: Candle-type
|
||||
:param since_ms: Starting timestamp
|
||||
:return: Candle limit as integer
|
||||
"""
|
||||
if (
|
||||
candle_type in (CandleType.FUTURES, CandleType.SPOT) and
|
||||
(not since_ms or since_ms > (date_minus_candles(timeframe, 300).timestamp() * 1000))
|
||||
):
|
||||
return 300
|
||||
|
||||
return super().ohlcv_candle_limit(timeframe, candle_type, since_ms)
|
||||
|
||||
@retrier
|
||||
def additional_exchange_init(self) -> None:
|
||||
"""
|
||||
|
@@ -1,4 +1,3 @@
|
||||
|
||||
import collections
|
||||
import json
|
||||
import logging
|
||||
@@ -27,10 +26,11 @@ class FreqaiDataDrawer:
|
||||
This object remains persistent throughout live/dry, unlike FreqaiDataKitchen, which is
|
||||
reinstantiated for each coin.
|
||||
"""
|
||||
|
||||
def __init__(self, full_path: Path, config: dict, follow_mode: bool = False):
|
||||
|
||||
self.config = config
|
||||
self.freqai_info = config.get('freqai', {})
|
||||
self.freqai_info = config.get("freqai", {})
|
||||
# dictionary holding all pair metadata necessary to load in from disk
|
||||
self.pair_dict: Dict[str, Any] = {}
|
||||
# dictionary holding all actively inferenced models in memory given a model filename
|
||||
@@ -38,7 +38,6 @@ class FreqaiDataDrawer:
|
||||
self.model_return_values: Dict[str, Any] = {}
|
||||
self.pair_data_dict: Dict[str, Any] = {}
|
||||
self.historic_data: Dict[str, Any] = {}
|
||||
# self.populated_historic_data: Dict[str, Any] = {} ?
|
||||
self.follower_dict: Dict[str, Any] = {}
|
||||
self.full_path = full_path
|
||||
self.follow_mode = follow_mode
|
||||
@@ -47,7 +46,6 @@ class FreqaiDataDrawer:
|
||||
self.load_drawer_from_disk()
|
||||
self.training_queue: Dict[str, int] = {}
|
||||
self.history_lock = threading.Lock()
|
||||
# self.create_training_queue(pair_whitelist)
|
||||
|
||||
def load_drawer_from_disk(self):
|
||||
"""
|
||||
@@ -56,15 +54,17 @@ class FreqaiDataDrawer:
|
||||
:returns:
|
||||
exists: bool = whether or not the drawer was located
|
||||
"""
|
||||
exists = Path(self.full_path / str('pair_dictionary.json')).resolve().exists()
|
||||
exists = Path(self.full_path / str("pair_dictionary.json")).resolve().exists()
|
||||
if exists:
|
||||
with open(self.full_path / str('pair_dictionary.json'), "r") as fp:
|
||||
with open(self.full_path / str("pair_dictionary.json"), "r") as fp:
|
||||
self.pair_dict = json.load(fp)
|
||||
elif not self.follow_mode:
|
||||
logger.info("Could not find existing datadrawer, starting from scratch")
|
||||
else:
|
||||
logger.warning(f'Follower could not find pair_dictionary at {self.full_path} '
|
||||
'sending null values back to strategy')
|
||||
logger.warning(
|
||||
f"Follower could not find pair_dictionary at {self.full_path} "
|
||||
"sending null values back to strategy"
|
||||
)
|
||||
|
||||
return exists
|
||||
|
||||
@@ -72,36 +72,41 @@ class FreqaiDataDrawer:
|
||||
"""
|
||||
Save data drawer full of all pair model metadata in present model folder.
|
||||
"""
|
||||
with open(self.full_path / str('pair_dictionary.json'), "w") as fp:
|
||||
with open(self.full_path / str("pair_dictionary.json"), "w") as fp:
|
||||
json.dump(self.pair_dict, fp, default=self.np_encoder)
|
||||
|
||||
def save_follower_dict_to_disk(self):
|
||||
"""
|
||||
Save follower dictionary to disk (used by strategy for persistent prediction targets)
|
||||
"""
|
||||
follower_name = self.config.get('bot_name', 'follower1')
|
||||
with open(self.full_path / str('follower_dictionary-' +
|
||||
follower_name + '.json'), "w") as fp:
|
||||
follower_name = self.config.get("bot_name", "follower1")
|
||||
with open(
|
||||
self.full_path / str("follower_dictionary-" + follower_name + ".json"), "w"
|
||||
) as fp:
|
||||
json.dump(self.follower_dict, fp, default=self.np_encoder)
|
||||
|
||||
def create_follower_dict(self):
|
||||
"""
|
||||
Create or dictionary for each follower to maintain unique persistent prediction targets
|
||||
"""
|
||||
follower_name = self.config.get('bot_name', 'follower1')
|
||||
whitelist_pairs = self.config.get('exchange', {}).get('pair_whitelist')
|
||||
follower_name = self.config.get("bot_name", "follower1")
|
||||
whitelist_pairs = self.config.get("exchange", {}).get("pair_whitelist")
|
||||
|
||||
exists = Path(self.full_path / str('follower_dictionary-' +
|
||||
follower_name + '.json')).resolve().exists()
|
||||
exists = (
|
||||
Path(self.full_path / str("follower_dictionary-" + follower_name + ".json"))
|
||||
.resolve()
|
||||
.exists()
|
||||
)
|
||||
|
||||
if exists:
|
||||
logger.info('Found an existing follower dictionary')
|
||||
logger.info("Found an existing follower dictionary")
|
||||
|
||||
for pair in whitelist_pairs:
|
||||
self.follower_dict[pair] = {}
|
||||
|
||||
with open(self.full_path / str('follower_dictionary-' +
|
||||
follower_name + '.json'), "w") as fp:
|
||||
with open(
|
||||
self.full_path / str("follower_dictionary-" + follower_name + ".json"), "w"
|
||||
) as fp:
|
||||
json.dump(self.follower_dict, fp, default=self.np_encoder)
|
||||
|
||||
def np_encoder(self, object):
|
||||
@@ -122,46 +127,48 @@ class FreqaiDataDrawer:
|
||||
return_null_array: bool = Follower could not find pair metadata
|
||||
"""
|
||||
pair_in_dict = self.pair_dict.get(pair)
|
||||
data_path_set = self.pair_dict.get(pair, {}).get('data_path', None)
|
||||
data_path_set = self.pair_dict.get(pair, {}).get("data_path", None)
|
||||
return_null_array = False
|
||||
|
||||
if pair_in_dict:
|
||||
model_filename = self.pair_dict[pair]['model_filename']
|
||||
trained_timestamp = self.pair_dict[pair]['trained_timestamp']
|
||||
coin_first = self.pair_dict[pair]['first']
|
||||
model_filename = self.pair_dict[pair]["model_filename"]
|
||||
trained_timestamp = self.pair_dict[pair]["trained_timestamp"]
|
||||
coin_first = self.pair_dict[pair]["first"]
|
||||
elif not self.follow_mode:
|
||||
self.pair_dict[pair] = {}
|
||||
model_filename = self.pair_dict[pair]['model_filename'] = ''
|
||||
coin_first = self.pair_dict[pair]['first'] = True
|
||||
trained_timestamp = self.pair_dict[pair]['trained_timestamp'] = 0
|
||||
self.pair_dict[pair]['priority'] = len(self.pair_dict)
|
||||
model_filename = self.pair_dict[pair]["model_filename"] = ""
|
||||
coin_first = self.pair_dict[pair]["first"] = True
|
||||
trained_timestamp = self.pair_dict[pair]["trained_timestamp"] = 0
|
||||
self.pair_dict[pair]["priority"] = len(self.pair_dict)
|
||||
|
||||
if not data_path_set and self.follow_mode:
|
||||
logger.warning(f'Follower could not find current pair {pair} in '
|
||||
f'pair_dictionary at path {self.full_path}, sending null values '
|
||||
'back to strategy.')
|
||||
logger.warning(
|
||||
f"Follower could not find current pair {pair} in "
|
||||
f"pair_dictionary at path {self.full_path}, sending null values "
|
||||
"back to strategy."
|
||||
)
|
||||
return_null_array = True
|
||||
|
||||
return model_filename, trained_timestamp, coin_first, return_null_array
|
||||
|
||||
def set_pair_dict_info(self, metadata: dict) -> None:
|
||||
pair_in_dict = self.pair_dict.get(metadata['pair'])
|
||||
pair_in_dict = self.pair_dict.get(metadata["pair"])
|
||||
if pair_in_dict:
|
||||
return
|
||||
else:
|
||||
self.pair_dict[metadata['pair']] = {}
|
||||
self.pair_dict[metadata['pair']]['model_filename'] = ''
|
||||
self.pair_dict[metadata['pair']]['first'] = True
|
||||
self.pair_dict[metadata['pair']]['trained_timestamp'] = 0
|
||||
self.pair_dict[metadata['pair']]['priority'] = len(self.pair_dict)
|
||||
self.pair_dict[metadata["pair"]] = {}
|
||||
self.pair_dict[metadata["pair"]]["model_filename"] = ""
|
||||
self.pair_dict[metadata["pair"]]["first"] = True
|
||||
self.pair_dict[metadata["pair"]]["trained_timestamp"] = 0
|
||||
self.pair_dict[metadata["pair"]]["priority"] = len(self.pair_dict)
|
||||
return
|
||||
|
||||
def pair_to_end_of_training_queue(self, pair: str) -> None:
|
||||
# march all pairs up in the queue
|
||||
for p in self.pair_dict:
|
||||
self.pair_dict[p]['priority'] -= 1
|
||||
self.pair_dict[p]["priority"] -= 1
|
||||
# send pair to end of queue
|
||||
self.pair_dict[pair]['priority'] = len(self.pair_dict)
|
||||
self.pair_dict[pair]["priority"] = len(self.pair_dict)
|
||||
|
||||
def set_initial_return_values(self, pair: str, dk, pred_df, do_preds) -> None:
|
||||
"""
|
||||
@@ -172,16 +179,15 @@ class FreqaiDataDrawer:
|
||||
self.model_return_values[pair] = pd.DataFrame()
|
||||
for label in dk.label_list:
|
||||
self.model_return_values[pair][label] = pred_df[label]
|
||||
self.model_return_values[pair][f'{label}_mean'] = dk.data['labels_mean'][label]
|
||||
self.model_return_values[pair][f'{label}_std'] = dk.data['labels_std'][label]
|
||||
self.model_return_values[pair][f"{label}_mean"] = dk.data["labels_mean"][label]
|
||||
self.model_return_values[pair][f"{label}_std"] = dk.data["labels_std"][label]
|
||||
|
||||
if self.freqai_info.get('feature_parameters', {}).get('DI_threshold', 0) > 0:
|
||||
self.model_return_values[pair]['DI_values'] = dk.DI_values
|
||||
if self.freqai_info.get("feature_parameters", {}).get("DI_threshold", 0) > 0:
|
||||
self.model_return_values[pair]["DI_values"] = dk.DI_values
|
||||
|
||||
self.model_return_values[pair]['do_predict'] = do_preds
|
||||
self.model_return_values[pair]["do_predict"] = do_preds
|
||||
|
||||
def append_model_predictions(self, pair: str, predictions, do_preds,
|
||||
dk, len_df) -> None:
|
||||
def append_model_predictions(self, pair: str, predictions, do_preds, dk, len_df) -> None:
|
||||
|
||||
# strat seems to feed us variable sized dataframes - and since we are trying to build our
|
||||
# own return array in the same shape, we need to figure out how the size has changed
|
||||
@@ -198,17 +204,18 @@ class FreqaiDataDrawer:
|
||||
|
||||
for label in dk.label_list:
|
||||
df[label].iloc[-1] = predictions[label].iloc[-1]
|
||||
df[f"{label}_mean"].iloc[-1] = dk.data['labels_mean'][label]
|
||||
df[f"{label}_std"].iloc[-1] = dk.data['labels_std'][label]
|
||||
df[f"{label}_mean"].iloc[-1] = dk.data["labels_mean"][label]
|
||||
df[f"{label}_std"].iloc[-1] = dk.data["labels_std"][label]
|
||||
# df['prediction'].iloc[-1] = predictions[-1]
|
||||
df['do_predict'].iloc[-1] = do_preds[-1]
|
||||
df["do_predict"].iloc[-1] = do_preds[-1]
|
||||
|
||||
if self.freqai_info.get('feature_parameters', {}).get('DI_threshold', 0) > 0:
|
||||
df['DI_values'].iloc[-1] = dk.DI_values[-1]
|
||||
if self.freqai_info.get("feature_parameters", {}).get("DI_threshold", 0) > 0:
|
||||
df["DI_values"].iloc[-1] = dk.DI_values[-1]
|
||||
|
||||
if length_difference < 0:
|
||||
prepend_df = pd.DataFrame(np.zeros((abs(length_difference) - 1, len(df.columns))),
|
||||
columns=df.columns)
|
||||
prepend_df = pd.DataFrame(
|
||||
np.zeros((abs(length_difference) - 1, len(df.columns))), columns=df.columns
|
||||
)
|
||||
df = pd.concat([prepend_df, df], axis=0)
|
||||
|
||||
def attach_return_values_to_return_dataframe(self, pair: str, dataframe) -> DataFrame:
|
||||
@@ -220,7 +227,7 @@ class FreqaiDataDrawer:
|
||||
dataframe: DataFrame = strat dataframe with return values attached
|
||||
"""
|
||||
df = self.model_return_values[pair]
|
||||
to_keep = [col for col in dataframe.columns if not col.startswith('&')]
|
||||
to_keep = [col for col in dataframe.columns if not col.startswith("&")]
|
||||
dataframe = pd.concat([dataframe[to_keep], df], axis=1)
|
||||
return dataframe
|
||||
|
||||
@@ -237,10 +244,10 @@ class FreqaiDataDrawer:
|
||||
dataframe[f"{label}_std"] = 0
|
||||
|
||||
# dataframe['prediction'] = 0
|
||||
dataframe['do_predict'] = 0
|
||||
dataframe["do_predict"] = 0
|
||||
|
||||
if self.freqai_info.get('feature_parameters', {}).get('DI_threshold', 0) > 0:
|
||||
dataframe['DI_value'] = 0
|
||||
if self.freqai_info.get("feature_parameters", {}).get("DI_threshold", 0) > 0:
|
||||
dataframe["DI_value"] = 0
|
||||
|
||||
dk.return_dataframe = dataframe
|
||||
|
||||
@@ -261,29 +268,30 @@ class FreqaiDataDrawer:
|
||||
|
||||
if coin not in delete_dict:
|
||||
delete_dict[coin] = {}
|
||||
delete_dict[coin]['num_folders'] = 1
|
||||
delete_dict[coin]['timestamps'] = {int(timestamp): dir}
|
||||
delete_dict[coin]["num_folders"] = 1
|
||||
delete_dict[coin]["timestamps"] = {int(timestamp): dir}
|
||||
else:
|
||||
delete_dict[coin]['num_folders'] += 1
|
||||
delete_dict[coin]['timestamps'][int(timestamp)] = dir
|
||||
delete_dict[coin]["num_folders"] += 1
|
||||
delete_dict[coin]["timestamps"][int(timestamp)] = dir
|
||||
|
||||
for coin in delete_dict:
|
||||
if delete_dict[coin]['num_folders'] > 2:
|
||||
if delete_dict[coin]["num_folders"] > 2:
|
||||
sorted_dict = collections.OrderedDict(
|
||||
sorted(delete_dict[coin]['timestamps'].items()))
|
||||
sorted(delete_dict[coin]["timestamps"].items())
|
||||
)
|
||||
num_delete = len(sorted_dict) - 2
|
||||
deleted = 0
|
||||
for k, v in sorted_dict.items():
|
||||
if deleted >= num_delete:
|
||||
break
|
||||
logger.info(f'Freqai purging old model file {v}')
|
||||
logger.info(f"Freqai purging old model file {v}")
|
||||
shutil.rmtree(v)
|
||||
deleted += 1
|
||||
|
||||
def update_follower_metadata(self):
|
||||
# follower needs to load from disk to get any changes made by leader to pair_dict
|
||||
self.load_drawer_from_disk()
|
||||
if self.config.get('freqai', {}).get('purge_old_models', False):
|
||||
if self.config.get("freqai", {}).get("purge_old_models", False):
|
||||
self.purge_old_models()
|
||||
|
||||
# to be used if we want to send predictions directly to the follower instead of forcing
|
||||
|
@@ -37,21 +37,28 @@ class FreqaiDataKitchen:
|
||||
author: Robert Caulk, rob.caulk@gmail.com
|
||||
"""
|
||||
|
||||
def __init__(self, config: Dict[str, Any], data_drawer: FreqaiDataDrawer, live: bool = False,
|
||||
pair: str = ''):
|
||||
def __init__(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
data_drawer: FreqaiDataDrawer,
|
||||
live: bool = False,
|
||||
pair: str = "",
|
||||
):
|
||||
self.data: Dict[Any, Any] = {}
|
||||
self.data_dictionary: Dict[Any, Any] = {}
|
||||
self.config = config
|
||||
self.freqai_config = config["freqai"]
|
||||
self.predictions: npt.ArrayLike = np.array([])
|
||||
self.do_predict: npt.ArrayLike = np.array([])
|
||||
self.target_mean: npt.ArrayLike = np.array([])
|
||||
self.target_std: npt.ArrayLike = np.array([])
|
||||
self.full_predictions: npt.ArrayLike = np.array([])
|
||||
self.full_do_predict: npt.ArrayLike = np.array([])
|
||||
self.full_DI_values: npt.ArrayLike = np.array([])
|
||||
self.full_target_mean: npt.ArrayLike = np.array([])
|
||||
self.full_target_std: npt.ArrayLike = np.array([])
|
||||
# self.predictions: npt.ArrayLike = np.array([])
|
||||
# self.do_predict: npt.ArrayLike = np.array([])
|
||||
# self.target_mean: npt.ArrayLike = np.array([])
|
||||
# self.target_std: npt.ArrayLike = np.array([])
|
||||
# self.full_predictions: npt.ArrayLike = np.array([])
|
||||
# self.full_do_predict: npt.ArrayLike = np.array([])
|
||||
# self.full_DI_values: npt.ArrayLike = np.array([])
|
||||
# self.full_target_mean: npt.ArrayLike = np.array([])
|
||||
# self.full_target_std: npt.ArrayLike = np.array([])
|
||||
self.full_df: DataFrame = DataFrame()
|
||||
self.append_df: DataFrame = DataFrame()
|
||||
self.data_path = Path()
|
||||
self.label_list: List = []
|
||||
self.model_filename: str = ""
|
||||
@@ -60,9 +67,9 @@ class FreqaiDataKitchen:
|
||||
self.svm_model: linear_model.SGDOneClassSVM = None
|
||||
self.set_all_pairs()
|
||||
if not self.live:
|
||||
self.full_timerange = self.create_fulltimerange(self.config["timerange"],
|
||||
self.freqai_config.get("train_period")
|
||||
)
|
||||
self.full_timerange = self.create_fulltimerange(
|
||||
self.config["timerange"], self.freqai_config.get("train_period")
|
||||
)
|
||||
|
||||
(self.training_timeranges, self.backtesting_timeranges) = self.split_timerange(
|
||||
self.full_timerange,
|
||||
@@ -72,24 +79,28 @@ class FreqaiDataKitchen:
|
||||
# self.strat_dataframe: DataFrame = strat_dataframe
|
||||
self.dd = data_drawer
|
||||
|
||||
def set_paths(self, pair: str, trained_timestamp: int = None,) -> None:
|
||||
def set_paths(
|
||||
self,
|
||||
pair: str,
|
||||
trained_timestamp: int = None,
|
||||
) -> None:
|
||||
"""
|
||||
Set the paths to the data for the present coin/botloop
|
||||
:params:
|
||||
metadata: dict = strategy furnished pair metadata
|
||||
trained_timestamp: int = timestamp of most recent training
|
||||
"""
|
||||
self.full_path = Path(self.config['user_data_dir'] /
|
||||
"models" /
|
||||
str(self.freqai_config.get('identifier')))
|
||||
self.full_path = Path(
|
||||
self.config["user_data_dir"] / "models" / str(self.freqai_config.get("identifier"))
|
||||
)
|
||||
|
||||
self.data_path = Path(self.full_path / str("sub-train" + "-" +
|
||||
pair.split("/")[0] +
|
||||
str(trained_timestamp)))
|
||||
self.data_path = Path(
|
||||
self.full_path / str("sub-train" + "-" + pair.split("/")[0] + str(trained_timestamp))
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
def save_data(self, model: Any, coin: str = '', keras_model=False, label=None) -> None:
|
||||
def save_data(self, model: Any, coin: str = "", keras_model=False, label=None) -> None:
|
||||
"""
|
||||
Saves all data associated with a model for a single sub-train time range
|
||||
:params:
|
||||
@@ -114,7 +125,7 @@ class FreqaiDataKitchen:
|
||||
self.data["data_path"] = str(self.data_path)
|
||||
self.data["model_filename"] = str(self.model_filename)
|
||||
self.data["training_features_list"] = list(self.data_dictionary["train_features"].columns)
|
||||
self.data['label_list'] = self.label_list
|
||||
self.data["label_list"] = self.label_list
|
||||
# store the metadata
|
||||
with open(save_path / str(self.model_filename + "_metadata.json"), "w") as fp:
|
||||
json.dump(self.data, fp, default=self.np_encoder)
|
||||
@@ -124,14 +135,15 @@ class FreqaiDataKitchen:
|
||||
save_path / str(self.model_filename + "_trained_df.pkl")
|
||||
)
|
||||
|
||||
if self.freqai_config.get('feature_parameters', {}).get('principal_component_analysis'):
|
||||
pk.dump(self.pca, open(self.data_path /
|
||||
str(self.model_filename + "_pca_object.pkl"), "wb"))
|
||||
if self.freqai_config.get("feature_parameters", {}).get("principal_component_analysis"):
|
||||
pk.dump(
|
||||
self.pca, open(self.data_path / str(self.model_filename + "_pca_object.pkl"), "wb")
|
||||
)
|
||||
|
||||
# if self.live:
|
||||
self.dd.model_dictionary[self.model_filename] = model
|
||||
self.dd.pair_dict[coin]['model_filename'] = self.model_filename
|
||||
self.dd.pair_dict[coin]['data_path'] = str(self.data_path)
|
||||
self.dd.pair_dict[coin]["model_filename"] = self.model_filename
|
||||
self.dd.pair_dict[coin]["data_path"] = str(self.data_path)
|
||||
self.dd.save_drawer_to_disk()
|
||||
|
||||
# TODO add a helper function to let user save/load any data they are custom adding. We
|
||||
@@ -149,29 +161,32 @@ class FreqaiDataKitchen:
|
||||
|
||||
return
|
||||
|
||||
def load_data(self, coin: str = '', keras_model=False) -> Any:
|
||||
def load_data(self, coin: str = "", keras_model=False) -> Any:
|
||||
"""
|
||||
loads all data required to make a prediction on a sub-train time range
|
||||
:returns:
|
||||
:model: User trained model which can be inferenced for new predictions
|
||||
"""
|
||||
|
||||
if not self.dd.pair_dict[coin]['model_filename']:
|
||||
if not self.dd.pair_dict[coin]["model_filename"]:
|
||||
return None
|
||||
|
||||
if self.live:
|
||||
self.model_filename = self.dd.pair_dict[coin]['model_filename']
|
||||
self.data_path = Path(self.dd.pair_dict[coin]['data_path'])
|
||||
if self.freqai_config.get('follow_mode', False):
|
||||
self.model_filename = self.dd.pair_dict[coin]["model_filename"]
|
||||
self.data_path = Path(self.dd.pair_dict[coin]["data_path"])
|
||||
if self.freqai_config.get("follow_mode", False):
|
||||
# follower can be on a different system which is rsynced to the leader:
|
||||
self.data_path = Path(self.config["user_data_dir"] /
|
||||
"models" / self.data_path.parts[-2] /
|
||||
self.data_path.parts[-1])
|
||||
self.data_path = Path(
|
||||
self.config["user_data_dir"]
|
||||
/ "models"
|
||||
/ self.data_path.parts[-2]
|
||||
/ self.data_path.parts[-1]
|
||||
)
|
||||
|
||||
with open(self.data_path / str(self.model_filename + "_metadata.json"), "r") as fp:
|
||||
self.data = json.load(fp)
|
||||
self.training_features_list = self.data["training_features_list"]
|
||||
self.label_list = self.data['label_list']
|
||||
self.label_list = self.data["label_list"]
|
||||
|
||||
self.data_dictionary["train_features"] = pd.read_pickle(
|
||||
self.data_path / str(self.model_filename + "_trained_df.pkl")
|
||||
@@ -200,17 +215,16 @@ class FreqaiDataKitchen:
|
||||
model = load(self.data_path / str(self.model_filename + "_model.joblib"))
|
||||
else:
|
||||
from tensorflow import keras
|
||||
|
||||
model = keras.models.load_model(self.data_path / str(self.model_filename + "_model.h5"))
|
||||
|
||||
if Path(self.data_path / str(self.model_filename +
|
||||
"_svm_model.joblib")).resolve().exists():
|
||||
if Path(self.data_path / str(self.model_filename + "_svm_model.joblib")).resolve().exists():
|
||||
self.svm_model = load(self.data_path / str(self.model_filename + "_svm_model.joblib"))
|
||||
|
||||
if not model:
|
||||
raise OperationalException(
|
||||
f"Unable to load model, ensure model exists at "
|
||||
f"{self.data_path} "
|
||||
)
|
||||
f"Unable to load model, ensure model exists at " f"{self.data_path} "
|
||||
)
|
||||
|
||||
if self.config["freqai"]["feature_parameters"]["principal_component_analysis"]:
|
||||
self.pca = pk.load(
|
||||
@@ -257,7 +271,7 @@ class FreqaiDataKitchen:
|
||||
weights,
|
||||
stratify=stratification,
|
||||
# shuffle=False,
|
||||
**self.config["freqai"]["data_split_parameters"]
|
||||
**self.config["freqai"]["data_split_parameters"],
|
||||
)
|
||||
|
||||
return self.build_data_dictionary(
|
||||
@@ -309,14 +323,14 @@ class FreqaiDataKitchen:
|
||||
(drop_index == 0) & (drop_index_labels == 0)
|
||||
] # assuming the labels depend entirely on the dataframe here.
|
||||
logger.info(
|
||||
f'dropped {len(unfiltered_dataframe) - len(filtered_dataframe)} training points'
|
||||
f' due to NaNs in populated dataset {len(unfiltered_dataframe)}.'
|
||||
f"dropped {len(unfiltered_dataframe) - len(filtered_dataframe)} training points"
|
||||
f" due to NaNs in populated dataset {len(unfiltered_dataframe)}."
|
||||
)
|
||||
if (1 - len(filtered_dataframe) / len(unfiltered_dataframe)) > 0.1 and self.live:
|
||||
logger.warning(
|
||||
f' {(1 - len(filtered_dataframe)/len(unfiltered_dataframe)) * 100:.2f} percent'
|
||||
' of training data dropped due to NaNs, model may perform inconsistent'
|
||||
'with expectations'
|
||||
f" {(1 - len(filtered_dataframe)/len(unfiltered_dataframe)) * 100:.2f} percent"
|
||||
" of training data dropped due to NaNs, model may perform inconsistent"
|
||||
"with expectations"
|
||||
)
|
||||
self.data["filter_drop_index_training"] = drop_index
|
||||
|
||||
@@ -372,21 +386,27 @@ class FreqaiDataKitchen:
|
||||
# standardize the data by training stats
|
||||
train_max = data_dictionary["train_features"].max()
|
||||
train_min = data_dictionary["train_features"].min()
|
||||
data_dictionary["train_features"] = 2 * (
|
||||
data_dictionary["train_features"] - train_min
|
||||
) / (train_max - train_min) - 1
|
||||
data_dictionary["test_features"] = 2 * (
|
||||
data_dictionary["test_features"] - train_min
|
||||
) / (train_max - train_min) - 1
|
||||
data_dictionary["train_features"] = (
|
||||
2 * (data_dictionary["train_features"] - train_min) / (train_max - train_min) - 1
|
||||
)
|
||||
data_dictionary["test_features"] = (
|
||||
2 * (data_dictionary["test_features"] - train_min) / (train_max - train_min) - 1
|
||||
)
|
||||
|
||||
train_labels_max = data_dictionary["train_labels"].max()
|
||||
train_labels_min = data_dictionary["train_labels"].min()
|
||||
data_dictionary["train_labels"] = 2 * (
|
||||
data_dictionary["train_labels"] - train_labels_min
|
||||
) / (train_labels_max - train_labels_min) - 1
|
||||
data_dictionary["test_labels"] = 2 * (
|
||||
data_dictionary["test_labels"] - train_labels_min
|
||||
) / (train_labels_max - train_labels_min) - 1
|
||||
data_dictionary["train_labels"] = (
|
||||
2
|
||||
* (data_dictionary["train_labels"] - train_labels_min)
|
||||
/ (train_labels_max - train_labels_min)
|
||||
- 1
|
||||
)
|
||||
data_dictionary["test_labels"] = (
|
||||
2
|
||||
* (data_dictionary["test_labels"] - train_labels_min)
|
||||
/ (train_labels_max - train_labels_min)
|
||||
- 1
|
||||
)
|
||||
|
||||
for item in train_max.keys():
|
||||
self.data[item + "_max"] = train_max[item]
|
||||
@@ -406,8 +426,12 @@ class FreqaiDataKitchen:
|
||||
"""
|
||||
|
||||
for item in df.keys():
|
||||
df[item] = 2 * (df[item] - self.data[item + "_min"]) / (self.data[item + "_max"] -
|
||||
self.data[item + '_min']) - 1
|
||||
df[item] = (
|
||||
2
|
||||
* (df[item] - self.data[item + "_min"])
|
||||
/ (self.data[item + "_max"] - self.data[item + "_min"])
|
||||
- 1
|
||||
)
|
||||
|
||||
return df
|
||||
|
||||
@@ -429,8 +453,9 @@ class FreqaiDataKitchen:
|
||||
full_timerange = TimeRange.parse_timerange(tr)
|
||||
config_timerange = TimeRange.parse_timerange(self.config["timerange"])
|
||||
if config_timerange.stopts == 0:
|
||||
config_timerange.stopts = int(datetime.datetime.now(
|
||||
tz=datetime.timezone.utc).timestamp())
|
||||
config_timerange.stopts = int(
|
||||
datetime.datetime.now(tz=datetime.timezone.utc).timestamp()
|
||||
)
|
||||
timerange_train = copy.deepcopy(full_timerange)
|
||||
timerange_backtest = copy.deepcopy(full_timerange)
|
||||
|
||||
@@ -518,7 +543,7 @@ class FreqaiDataKitchen:
|
||||
|
||||
# keeping a copy of the non-transformed features so we can check for errors during
|
||||
# model load from disk
|
||||
self.data['training_features_list_raw'] = copy.deepcopy(self.training_features_list)
|
||||
self.data["training_features_list_raw"] = copy.deepcopy(self.training_features_list)
|
||||
self.training_features_list = self.data_dictionary["train_features"].columns
|
||||
|
||||
self.data_dictionary["test_features"] = pd.DataFrame(
|
||||
@@ -530,7 +555,7 @@ class FreqaiDataKitchen:
|
||||
self.data["n_kept_components"] = n_keep_components
|
||||
self.pca = pca2
|
||||
|
||||
logger.info(f'PCA reduced total features from {n_components} to {n_keep_components}')
|
||||
logger.info(f"PCA reduced total features from {n_components} to {n_keep_components}")
|
||||
|
||||
if not self.data_path.is_dir():
|
||||
self.data_path.mkdir(parents=True, exist_ok=True)
|
||||
@@ -557,10 +582,10 @@ class FreqaiDataKitchen:
|
||||
for prediction confidence in the Dissimilarity Index
|
||||
"""
|
||||
logger.info("computing average mean distance for all training points")
|
||||
tc = self.freqai_config.get('model_training_parameters', {}).get('thread_count', -1)
|
||||
tc = self.freqai_config.get("model_training_parameters", {}).get("thread_count", -1)
|
||||
pairwise = pairwise_distances(self.data_dictionary["train_features"], n_jobs=tc)
|
||||
avg_mean_dist = pairwise.mean(axis=1).mean()
|
||||
logger.info(f'avg_mean_dist {avg_mean_dist:.2f}')
|
||||
logger.info(f"avg_mean_dist {avg_mean_dist:.2f}")
|
||||
|
||||
return avg_mean_dist
|
||||
|
||||
@@ -579,45 +604,49 @@ class FreqaiDataKitchen:
|
||||
|
||||
if (len(do_predict) - do_predict.sum()) > 0:
|
||||
logger.info(
|
||||
f'svm_remove_outliers() tossed {len(do_predict) - do_predict.sum()} predictions'
|
||||
f"svm_remove_outliers() tossed {len(do_predict) - do_predict.sum()} predictions"
|
||||
)
|
||||
self.do_predict += do_predict
|
||||
self.do_predict -= 1
|
||||
|
||||
else:
|
||||
# use SGDOneClassSVM to increase speed?
|
||||
nu = self.freqai_config.get('feature_parameters', {}).get('svm_nu', 0.2)
|
||||
nu = self.freqai_config.get("feature_parameters", {}).get("svm_nu", 0.2)
|
||||
self.svm_model = linear_model.SGDOneClassSVM(nu=nu).fit(
|
||||
self.data_dictionary["train_features"]
|
||||
)
|
||||
self.data_dictionary["train_features"]
|
||||
)
|
||||
y_pred = self.svm_model.predict(self.data_dictionary["train_features"])
|
||||
dropped_points = np.where(y_pred == -1, 0, y_pred)
|
||||
# keep_index = np.where(y_pred == 1)
|
||||
self.data_dictionary["train_features"] = self.data_dictionary[
|
||||
"train_features"][(y_pred == 1)]
|
||||
self.data_dictionary["train_labels"] = self.data_dictionary[
|
||||
"train_labels"][(y_pred == 1)]
|
||||
self.data_dictionary["train_weights"] = self.data_dictionary[
|
||||
"train_weights"][(y_pred == 1)]
|
||||
self.data_dictionary["train_features"] = self.data_dictionary["train_features"][
|
||||
(y_pred == 1)
|
||||
]
|
||||
self.data_dictionary["train_labels"] = self.data_dictionary["train_labels"][
|
||||
(y_pred == 1)
|
||||
]
|
||||
self.data_dictionary["train_weights"] = self.data_dictionary["train_weights"][
|
||||
(y_pred == 1)
|
||||
]
|
||||
|
||||
logger.info(
|
||||
f'svm_remove_outliers() tossed {len(y_pred) - dropped_points.sum()}'
|
||||
f' train points from {len(y_pred)}'
|
||||
f"svm_remove_outliers() tossed {len(y_pred) - dropped_points.sum()}"
|
||||
f" train points from {len(y_pred)}"
|
||||
)
|
||||
|
||||
# same for test data
|
||||
y_pred = self.svm_model.predict(self.data_dictionary["test_features"])
|
||||
dropped_points = np.where(y_pred == -1, 0, y_pred)
|
||||
self.data_dictionary["test_features"] = self.data_dictionary[
|
||||
"test_features"][(y_pred == 1)]
|
||||
self.data_dictionary["test_labels"] = self.data_dictionary[
|
||||
"test_labels"][(y_pred == 1)]
|
||||
self.data_dictionary["test_weights"] = self.data_dictionary[
|
||||
"test_weights"][(y_pred == 1)]
|
||||
self.data_dictionary["test_features"] = self.data_dictionary["test_features"][
|
||||
(y_pred == 1)
|
||||
]
|
||||
self.data_dictionary["test_labels"] = self.data_dictionary["test_labels"][(y_pred == 1)]
|
||||
self.data_dictionary["test_weights"] = self.data_dictionary["test_weights"][
|
||||
(y_pred == 1)
|
||||
]
|
||||
|
||||
logger.info(
|
||||
f'svm_remove_outliers() tossed {len(y_pred) - dropped_points.sum()}'
|
||||
f' test points from {len(y_pred)}'
|
||||
f"svm_remove_outliers() tossed {len(y_pred) - dropped_points.sum()}"
|
||||
f" test points from {len(y_pred)}"
|
||||
)
|
||||
|
||||
return
|
||||
@@ -631,8 +660,8 @@ class FreqaiDataKitchen:
|
||||
features: list = the features to be used for training/prediction
|
||||
"""
|
||||
column_names = dataframe.columns
|
||||
features = [c for c in column_names if '%' in c]
|
||||
labels = [c for c in column_names if '&' in c]
|
||||
features = [c for c in column_names if "%" in c]
|
||||
labels = [c for c in column_names if "&" in c]
|
||||
if not features:
|
||||
raise OperationalException("Could not find any features!")
|
||||
|
||||
@@ -657,16 +686,15 @@ class FreqaiDataKitchen:
|
||||
self.DI_values = distance.min(axis=0) / self.data["avg_mean_dist"]
|
||||
|
||||
do_predict = np.where(
|
||||
self.DI_values
|
||||
< self.freqai_config.get("feature_parameters", {}).get("DI_threshold"),
|
||||
self.DI_values < self.freqai_config.get("feature_parameters", {}).get("DI_threshold"),
|
||||
1,
|
||||
0,
|
||||
)
|
||||
|
||||
if (len(do_predict) - do_predict.sum()) > 0:
|
||||
logger.info(
|
||||
f'DI tossed {len(do_predict) - do_predict.sum():.2f} predictions for '
|
||||
'being too far from training data'
|
||||
f"DI tossed {len(do_predict) - do_predict.sum():.2f} predictions for "
|
||||
"being too far from training data"
|
||||
)
|
||||
|
||||
self.do_predict += do_predict
|
||||
@@ -690,31 +718,56 @@ class FreqaiDataKitchen:
|
||||
Append backtest prediction from current backtest period to all previous periods
|
||||
"""
|
||||
|
||||
ones = np.ones(len(predictions))
|
||||
target_mean, target_std = ones * self.data["target_mean"], ones * self.data["target_std"]
|
||||
# ones = np.ones(len(predictions))
|
||||
# target_mean, target_std = ones * self.data["target_mean"], ones * self.data["target_std"]
|
||||
self.append_df = DataFrame()
|
||||
for label in self.label_list:
|
||||
self.append_df[label] = predictions[label]
|
||||
self.append_df[f"{label}_mean"] = self.data["labels_mean"][label]
|
||||
self.append_df[f"{label}_std"] = self.data["labels_std"][label]
|
||||
|
||||
self.full_predictions = np.append(self.full_predictions, predictions)
|
||||
self.full_do_predict = np.append(self.full_do_predict, do_predict)
|
||||
if self.freqai_config.get('feature_parameters', {}).get('DI_threshold', 0) > 0:
|
||||
self.full_DI_values = np.append(self.full_DI_values, self.DI_values)
|
||||
self.full_target_mean = np.append(self.full_target_mean, target_mean)
|
||||
self.full_target_std = np.append(self.full_target_std, target_std)
|
||||
self.append_df["do_predict"] = do_predict
|
||||
if self.freqai_config.get("feature_parameters", {}).get("DI_threshold", 0) > 0:
|
||||
self.append_df["DI_values"] = self.DI_values
|
||||
|
||||
if self.full_df.empty:
|
||||
self.full_df = self.append_df
|
||||
else:
|
||||
self.full_df = pd.concat([self.full_df, self.append_df], axis=0)
|
||||
|
||||
# self.full_predictions = np.append(self.full_predictions, predictions)
|
||||
# self.full_do_predict = np.append(self.full_do_predict, do_predict)
|
||||
# if self.freqai_config.get("feature_parameters", {}).get("DI_threshold", 0) > 0:
|
||||
# self.full_DI_values = np.append(self.full_DI_values, self.DI_values)
|
||||
# self.full_target_mean = np.append(self.full_target_mean, target_mean)
|
||||
# self.full_target_std = np.append(self.full_target_std, target_std)
|
||||
|
||||
return
|
||||
|
||||
def fill_predictions(self, len_dataframe):
|
||||
def fill_predictions(self, dataframe):
|
||||
"""
|
||||
Back fill values to before the backtesting range so that the dataframe matches size
|
||||
when it goes back to the strategy. These rows are not included in the backtest.
|
||||
"""
|
||||
|
||||
filler = np.zeros(len_dataframe - len(self.full_predictions)) # startup_candle_count
|
||||
self.full_predictions = np.append(filler, self.full_predictions)
|
||||
self.full_do_predict = np.append(filler, self.full_do_predict)
|
||||
if self.freqai_config.get('feature_parameters', {}).get('DI_threshold', 0) > 0:
|
||||
self.full_DI_values = np.append(filler, self.full_DI_values)
|
||||
self.full_target_mean = np.append(filler, self.full_target_mean)
|
||||
self.full_target_std = np.append(filler, self.full_target_std)
|
||||
len_filler = len(dataframe) - len(self.full_df.index) # startup_candle_count
|
||||
filler_df = pd.DataFrame(
|
||||
np.zeros((len_filler, len(self.full_df.columns))), columns=self.full_df.columns
|
||||
)
|
||||
|
||||
self.full_df = pd.concat([filler_df, self.full_df], axis=0, ignore_index=True)
|
||||
|
||||
to_keep = [col for col in dataframe.columns if not col.startswith("&")]
|
||||
self.return_dataframe = pd.concat([dataframe[to_keep], self.full_df], axis=1)
|
||||
|
||||
self.append_df = DataFrame()
|
||||
self.full_df = DataFrame()
|
||||
# self.full_predictions = np.append(filler, self.full_predictions)
|
||||
# self.full_do_predict = np.append(filler, self.full_do_predict)
|
||||
# if self.freqai_config.get("feature_parameters", {}).get("DI_threshold", 0) > 0:
|
||||
# self.full_DI_values = np.append(filler, self.full_DI_values)
|
||||
# self.full_target_mean = np.append(filler, self.full_target_mean)
|
||||
# self.full_target_std = np.append(filler, self.full_target_std)
|
||||
|
||||
return
|
||||
|
||||
@@ -722,8 +775,9 @@ class FreqaiDataKitchen:
|
||||
backtest_timerange = TimeRange.parse_timerange(backtest_tr)
|
||||
|
||||
if backtest_timerange.stopts == 0:
|
||||
backtest_timerange.stopts = int(datetime.datetime.now(
|
||||
tz=datetime.timezone.utc).timestamp())
|
||||
backtest_timerange.stopts = int(
|
||||
datetime.datetime.now(tz=datetime.timezone.utc).timestamp()
|
||||
)
|
||||
|
||||
backtest_timerange.startts = backtest_timerange.startts - backtest_period * SECONDS_IN_DAY
|
||||
start = datetime.datetime.utcfromtimestamp(backtest_timerange.startts)
|
||||
@@ -731,9 +785,7 @@ class FreqaiDataKitchen:
|
||||
full_timerange = start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d")
|
||||
|
||||
self.full_path = Path(
|
||||
self.config["user_data_dir"]
|
||||
/ "models"
|
||||
/ str(self.freqai_config.get("identifier"))
|
||||
self.config["user_data_dir"] / "models" / str(self.freqai_config.get("identifier"))
|
||||
)
|
||||
|
||||
config_path = Path(self.config["config_files"][0])
|
||||
@@ -758,61 +810,71 @@ class FreqaiDataKitchen:
|
||||
"""
|
||||
time = datetime.datetime.now(tz=datetime.timezone.utc).timestamp()
|
||||
elapsed_time = (time - trained_timestamp) / 3600 # hours
|
||||
max_time = self.freqai_config.get('expiration_hours', 0)
|
||||
max_time = self.freqai_config.get("expiration_hours", 0)
|
||||
if max_time > 0:
|
||||
return elapsed_time > max_time
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_if_new_training_required(self, trained_timestamp: int) -> Tuple[bool,
|
||||
TimeRange, TimeRange]:
|
||||
def check_if_new_training_required(
|
||||
self, trained_timestamp: int
|
||||
) -> Tuple[bool, TimeRange, TimeRange]:
|
||||
|
||||
time = datetime.datetime.now(tz=datetime.timezone.utc).timestamp()
|
||||
trained_timerange = TimeRange()
|
||||
data_load_timerange = TimeRange()
|
||||
|
||||
# find the max indicator length required
|
||||
max_timeframe_chars = self.freqai_config.get('timeframes')[-1]
|
||||
max_period = self.freqai_config.get('feature_parameters', {}).get(
|
||||
'indicator_max_period', 50)
|
||||
max_timeframe_chars = self.freqai_config.get("timeframes")[-1]
|
||||
max_period = self.freqai_config.get("feature_parameters", {}).get(
|
||||
"indicator_max_period", 50
|
||||
)
|
||||
additional_seconds = 0
|
||||
if max_timeframe_chars[-1] == 'd':
|
||||
if max_timeframe_chars[-1] == "d":
|
||||
additional_seconds = max_period * SECONDS_IN_DAY * int(max_timeframe_chars[-2])
|
||||
elif max_timeframe_chars[-1] == 'h':
|
||||
elif max_timeframe_chars[-1] == "h":
|
||||
additional_seconds = max_period * 3600 * int(max_timeframe_chars[-2])
|
||||
elif max_timeframe_chars[-1] == 'm':
|
||||
elif max_timeframe_chars[-1] == "m":
|
||||
if len(max_timeframe_chars) == 2:
|
||||
additional_seconds = max_period * 60 * int(max_timeframe_chars[-2])
|
||||
elif len(max_timeframe_chars) == 3:
|
||||
additional_seconds = max_period * 60 * int(float(max_timeframe_chars[0:2]))
|
||||
else:
|
||||
logger.warning('FreqAI could not detect max timeframe and therefore may not '
|
||||
'download the proper amount of data for training')
|
||||
logger.warning(
|
||||
"FreqAI could not detect max timeframe and therefore may not "
|
||||
"download the proper amount of data for training"
|
||||
)
|
||||
|
||||
# logger.info(f'Extending data download by {additional_seconds/SECONDS_IN_DAY:.2f} days')
|
||||
|
||||
if trained_timestamp != 0:
|
||||
elapsed_time = (time - trained_timestamp) / SECONDS_IN_DAY
|
||||
retrain = elapsed_time > self.freqai_config.get('backtest_period')
|
||||
retrain = elapsed_time > self.freqai_config.get("backtest_period")
|
||||
if retrain:
|
||||
trained_timerange.startts = int(time - self.freqai_config.get(
|
||||
'train_period', 0) * SECONDS_IN_DAY)
|
||||
trained_timerange.startts = int(
|
||||
time - self.freqai_config.get("train_period", 0) * SECONDS_IN_DAY
|
||||
)
|
||||
trained_timerange.stopts = int(time)
|
||||
# we want to load/populate indicators on more data than we plan to train on so
|
||||
# because most of the indicators have a rolling timeperiod, and are thus NaNs
|
||||
# unless they have data further back in time before the start of the train period
|
||||
data_load_timerange.startts = int(time - self.freqai_config.get(
|
||||
'train_period', 0) * SECONDS_IN_DAY
|
||||
- additional_seconds)
|
||||
data_load_timerange.startts = int(
|
||||
time
|
||||
- self.freqai_config.get("train_period", 0) * SECONDS_IN_DAY
|
||||
- additional_seconds
|
||||
)
|
||||
data_load_timerange.stopts = int(time)
|
||||
else: # user passed no live_trained_timerange in config
|
||||
trained_timerange.startts = int(time - self.freqai_config.get('train_period') *
|
||||
SECONDS_IN_DAY)
|
||||
trained_timerange.startts = int(
|
||||
time - self.freqai_config.get("train_period") * SECONDS_IN_DAY
|
||||
)
|
||||
trained_timerange.stopts = int(time)
|
||||
|
||||
data_load_timerange.startts = int(time - self.freqai_config.get(
|
||||
'train_period', 0) * SECONDS_IN_DAY
|
||||
- additional_seconds)
|
||||
data_load_timerange.startts = int(
|
||||
time
|
||||
- self.freqai_config.get("train_period", 0) * SECONDS_IN_DAY
|
||||
- additional_seconds
|
||||
)
|
||||
data_load_timerange.stopts = int(time)
|
||||
retrain = True
|
||||
|
||||
@@ -822,9 +884,10 @@ class FreqaiDataKitchen:
|
||||
|
||||
coin, _ = pair.split("/")
|
||||
# set the new data_path
|
||||
self.data_path = Path(self.full_path / str("sub-train" + "-" +
|
||||
pair.split("/")[0] +
|
||||
str(int(trained_timerange.stopts))))
|
||||
self.data_path = Path(
|
||||
self.full_path
|
||||
/ str("sub-train" + "-" + pair.split("/")[0] + str(int(trained_timerange.stopts)))
|
||||
)
|
||||
|
||||
self.model_filename = "cb_" + coin.lower() + "_" + str(int(trained_timerange.stopts))
|
||||
|
||||
@@ -860,20 +923,24 @@ class FreqaiDataKitchen:
|
||||
timerange: TimeRange = The full data timerange for populating the indicators
|
||||
and training the model.
|
||||
"""
|
||||
exchange = ExchangeResolver.load_exchange(self.config['exchange']['name'],
|
||||
self.config, validate=False, freqai=True)
|
||||
exchange = ExchangeResolver.load_exchange(
|
||||
self.config["exchange"]["name"], self.config, validate=False, freqai=True
|
||||
)
|
||||
|
||||
new_pairs_days = int((timerange.stopts - timerange.startts) / SECONDS_IN_DAY)
|
||||
|
||||
refresh_backtest_ohlcv_data(
|
||||
exchange, pairs=self.all_pairs,
|
||||
timeframes=self.freqai_config.get('timeframes'),
|
||||
datadir=self.config['datadir'], timerange=timerange,
|
||||
new_pairs_days=new_pairs_days,
|
||||
erase=False, data_format=self.config.get('dataformat_ohlcv', 'json'),
|
||||
trading_mode=self.config.get('trading_mode', 'spot'),
|
||||
prepend=self.config.get('prepend_data', False)
|
||||
)
|
||||
exchange,
|
||||
pairs=self.all_pairs,
|
||||
timeframes=self.freqai_config.get("timeframes"),
|
||||
datadir=self.config["datadir"],
|
||||
timerange=timerange,
|
||||
new_pairs_days=new_pairs_days,
|
||||
erase=False,
|
||||
data_format=self.config.get("dataformat_ohlcv", "json"),
|
||||
trading_mode=self.config.get("trading_mode", "spot"),
|
||||
prepend=self.config.get("prepend_data", False),
|
||||
)
|
||||
|
||||
def update_historic_data(self, strategy: IStrategy) -> None:
|
||||
"""
|
||||
@@ -888,34 +955,36 @@ class FreqaiDataKitchen:
|
||||
history_data = self.dd.historic_data
|
||||
|
||||
for pair in self.all_pairs:
|
||||
for tf in self.freqai_config.get('timeframes'):
|
||||
for tf in self.freqai_config.get("timeframes"):
|
||||
|
||||
# check if newest candle is already appended
|
||||
df_dp = strategy.dp.get_pair_dataframe(pair, tf)
|
||||
if len(df_dp.index) == 0:
|
||||
continue
|
||||
if (
|
||||
str(history_data[pair][tf].iloc[-1]['date']) ==
|
||||
str(df_dp.iloc[-1:]['date'].iloc[-1])
|
||||
):
|
||||
if str(history_data[pair][tf].iloc[-1]["date"]) == str(
|
||||
df_dp.iloc[-1:]["date"].iloc[-1]
|
||||
):
|
||||
continue
|
||||
|
||||
index = df_dp.loc[
|
||||
df_dp['date'] ==
|
||||
history_data[pair][tf].iloc[-1]['date']
|
||||
].index[0] + 1
|
||||
index = (
|
||||
df_dp.loc[df_dp["date"] == history_data[pair][tf].iloc[-1]["date"]].index[0]
|
||||
+ 1
|
||||
)
|
||||
history_data[pair][tf] = pd.concat(
|
||||
[history_data[pair][tf],
|
||||
strategy.dp.get_pair_dataframe(pair, tf).iloc[index:]],
|
||||
ignore_index=True, axis=0
|
||||
)
|
||||
[
|
||||
history_data[pair][tf],
|
||||
strategy.dp.get_pair_dataframe(pair, tf).iloc[index:],
|
||||
],
|
||||
ignore_index=True,
|
||||
axis=0,
|
||||
)
|
||||
|
||||
# logger.info(f'Length of history data {len(history_data[pair][tf])}')
|
||||
|
||||
def set_all_pairs(self) -> None:
|
||||
|
||||
self.all_pairs = copy.deepcopy(self.freqai_config.get('corr_pairlist', []))
|
||||
for pair in self.config.get('exchange', '').get('pair_whitelist'):
|
||||
self.all_pairs = copy.deepcopy(self.freqai_config.get("corr_pairlist", []))
|
||||
for pair in self.config.get("exchange", "").get("pair_whitelist"):
|
||||
if pair not in self.all_pairs:
|
||||
self.all_pairs.append(pair)
|
||||
|
||||
@@ -932,17 +1001,19 @@ class FreqaiDataKitchen:
|
||||
for pair in self.all_pairs:
|
||||
if pair not in history_data:
|
||||
history_data[pair] = {}
|
||||
for tf in self.freqai_config.get('timeframes'):
|
||||
history_data[pair][tf] = load_pair_history(datadir=self.config['datadir'],
|
||||
timeframe=tf,
|
||||
pair=pair, timerange=timerange,
|
||||
data_format=self.config.get(
|
||||
'dataformat_ohlcv', 'json'),
|
||||
candle_type=self.config.get(
|
||||
'trading_mode', 'spot'))
|
||||
for tf in self.freqai_config.get("timeframes"):
|
||||
history_data[pair][tf] = load_pair_history(
|
||||
datadir=self.config["datadir"],
|
||||
timeframe=tf,
|
||||
pair=pair,
|
||||
timerange=timerange,
|
||||
data_format=self.config.get("dataformat_ohlcv", "json"),
|
||||
candle_type=self.config.get("trading_mode", "spot"),
|
||||
)
|
||||
|
||||
def get_base_and_corr_dataframes(self, timerange: TimeRange,
|
||||
pair: str) -> Tuple[Dict[Any, Any], Dict[Any, Any]]:
|
||||
def get_base_and_corr_dataframes(
|
||||
self, timerange: TimeRange, pair: str
|
||||
) -> Tuple[Dict[Any, Any], Dict[Any, Any]]:
|
||||
"""
|
||||
Searches through our historic_data in memory and returns the dataframes relevant
|
||||
to the present pair.
|
||||
@@ -956,21 +1027,19 @@ class FreqaiDataKitchen:
|
||||
corr_dataframes: Dict[Any, Any] = {}
|
||||
base_dataframes: Dict[Any, Any] = {}
|
||||
historic_data = self.dd.historic_data
|
||||
pairs = self.freqai_config.get('corr_pairlist', [])
|
||||
pairs = self.freqai_config.get("corr_pairlist", [])
|
||||
|
||||
for tf in self.freqai_config.get('timeframes'):
|
||||
base_dataframes[tf] = self.slice_dataframe(
|
||||
timerange,
|
||||
historic_data[pair][tf]
|
||||
)
|
||||
for tf in self.freqai_config.get("timeframes"):
|
||||
base_dataframes[tf] = self.slice_dataframe(timerange, historic_data[pair][tf])
|
||||
if pairs:
|
||||
for p in pairs:
|
||||
if pair in p:
|
||||
continue # dont repeat anything from whitelist
|
||||
if p not in corr_dataframes:
|
||||
corr_dataframes[p] = {}
|
||||
corr_dataframes[p][tf] = self.slice_dataframe(timerange,
|
||||
historic_data[p][tf])
|
||||
corr_dataframes[p][tf] = self.slice_dataframe(
|
||||
timerange, historic_data[p][tf]
|
||||
)
|
||||
|
||||
return corr_dataframes, base_dataframes
|
||||
|
||||
@@ -1006,10 +1075,9 @@ class FreqaiDataKitchen:
|
||||
|
||||
# return corr_dataframes, base_dataframes
|
||||
|
||||
def use_strategy_to_populate_indicators(self, strategy: IStrategy,
|
||||
corr_dataframes: dict,
|
||||
base_dataframes: dict,
|
||||
pair: str) -> DataFrame:
|
||||
def use_strategy_to_populate_indicators(
|
||||
self, strategy: IStrategy, corr_dataframes: dict, base_dataframes: dict, pair: str
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Use the user defined strategy for populating indicators during
|
||||
retrain
|
||||
@@ -1023,30 +1091,32 @@ class FreqaiDataKitchen:
|
||||
:returns:
|
||||
dataframe: DataFrame = dataframe containing populated indicators
|
||||
"""
|
||||
dataframe = base_dataframes[self.config['timeframe']].copy()
|
||||
dataframe = base_dataframes[self.config["timeframe"]].copy()
|
||||
pairs = self.freqai_config.get("corr_pairlist", [])
|
||||
|
||||
sgi = True
|
||||
for tf in self.freqai_config.get("timeframes"):
|
||||
dataframe = strategy.populate_any_indicators(
|
||||
pair,
|
||||
pair,
|
||||
dataframe.copy(),
|
||||
tf,
|
||||
base_dataframes[tf],
|
||||
coin=pair.split("/")[0] + "-"
|
||||
)
|
||||
pair,
|
||||
pair,
|
||||
dataframe.copy(),
|
||||
tf,
|
||||
base_dataframes[tf],
|
||||
coin=pair.split("/")[0] + "-",
|
||||
set_generalized_indicators=sgi,
|
||||
)
|
||||
sgi = False
|
||||
if pairs:
|
||||
for i in pairs:
|
||||
if pair in i:
|
||||
continue # dont repeat anything from whitelist
|
||||
dataframe = strategy.populate_any_indicators(
|
||||
pair,
|
||||
i,
|
||||
dataframe.copy(),
|
||||
tf,
|
||||
corr_dataframes[i][tf],
|
||||
coin=i.split("/")[0] + "-"
|
||||
)
|
||||
pair,
|
||||
i,
|
||||
dataframe.copy(),
|
||||
tf,
|
||||
corr_dataframes[i][tf],
|
||||
coin=i.split("/")[0] + "-",
|
||||
)
|
||||
|
||||
return dataframe
|
||||
|
||||
@@ -1056,7 +1126,7 @@ class FreqaiDataKitchen:
|
||||
"""
|
||||
import scipy as spy
|
||||
|
||||
self.data['labels_mean'], self.data['labels_std'] = {}, {}
|
||||
self.data["labels_mean"], self.data["labels_std"] = {}, {}
|
||||
for label in self.label_list:
|
||||
f = spy.stats.norm.fit(self.data_dictionary["train_labels"][label])
|
||||
self.data["labels_mean"][label], self.data["labels_std"][label] = f[0], f[1]
|
||||
|
@@ -29,6 +29,7 @@ logger = logging.getLogger(__name__)
|
||||
def threaded(fn):
|
||||
def wrapper(*args, **kwargs):
|
||||
threading.Thread(target=fn, args=args, kwargs=kwargs).start()
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -46,7 +47,7 @@ class IFreqaiModel(ABC):
|
||||
self.config = config
|
||||
self.assert_config(self.config)
|
||||
self.freqai_info = config["freqai"]
|
||||
self.data_split_parameters = config.get('freqai', {}).get("data_split_parameters")
|
||||
self.data_split_parameters = config.get("freqai", {}).get("data_split_parameters")
|
||||
self.model_training_parameters = config.get("freqai", {}).get("model_training_parameters")
|
||||
self.feature_parameters = config.get("freqai", {}).get("feature_parameters")
|
||||
self.time_last_trained = None
|
||||
@@ -58,23 +59,21 @@ class IFreqaiModel(ABC):
|
||||
self.first = True
|
||||
self.update_historic_data = 0
|
||||
self.set_full_path()
|
||||
self.follow_mode = self.freqai_info.get('follow_mode', False)
|
||||
self.follow_mode = self.freqai_info.get("follow_mode", False)
|
||||
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config, self.follow_mode)
|
||||
self.lock = threading.Lock()
|
||||
self.follow_mode = self.freqai_info.get('follow_mode', False)
|
||||
self.identifier = self.freqai_info.get('identifier', 'no_id_provided')
|
||||
self.follow_mode = self.freqai_info.get("follow_mode", False)
|
||||
self.identifier = self.freqai_info.get("identifier", "no_id_provided")
|
||||
self.scanning = False
|
||||
self.ready_to_scan = False
|
||||
self.first = True
|
||||
self.keras = self.freqai_info.get('keras', False)
|
||||
self.CONV_WIDTH = self.freqai_info.get('conv_width', 2)
|
||||
self.keras = self.freqai_info.get("keras", False)
|
||||
self.CONV_WIDTH = self.freqai_info.get("conv_width", 2)
|
||||
|
||||
def assert_config(self, config: Dict[str, Any]) -> None:
|
||||
|
||||
if not config.get('freqai', {}):
|
||||
raise OperationalException(
|
||||
"No freqai parameters found in configuration file."
|
||||
)
|
||||
if not config.get("freqai", {}):
|
||||
raise OperationalException("No freqai parameters found in configuration file.")
|
||||
|
||||
def start(self, dataframe: DataFrame, metadata: dict, strategy: IStrategy) -> DataFrame:
|
||||
"""
|
||||
@@ -92,8 +91,7 @@ class IFreqaiModel(ABC):
|
||||
self.dd.set_pair_dict_info(metadata)
|
||||
|
||||
if self.live:
|
||||
self.dk = FreqaiDataKitchen(self.config, self.dd,
|
||||
self.live, metadata["pair"])
|
||||
self.dk = FreqaiDataKitchen(self.config, self.dd, self.live, metadata["pair"])
|
||||
dk = self.start_live(dataframe, metadata, strategy, self.dk)
|
||||
|
||||
# For backtesting, each pair enters and then gets trained for each window along the
|
||||
@@ -103,7 +101,7 @@ class IFreqaiModel(ABC):
|
||||
# the concatenated results for the full backtesting period back to the strategy.
|
||||
elif not self.follow_mode:
|
||||
self.dk = FreqaiDataKitchen(self.config, self.dd, self.live, metadata["pair"])
|
||||
logger.info(f'Training {len(self.dk.training_timeranges)} timeranges')
|
||||
logger.info(f"Training {len(self.dk.training_timeranges)} timeranges")
|
||||
dk = self.start_backtesting(dataframe, metadata, self.dk)
|
||||
|
||||
dataframe = self.remove_features_from_df(dk.return_dataframe)
|
||||
@@ -120,14 +118,13 @@ class IFreqaiModel(ABC):
|
||||
"""
|
||||
while 1:
|
||||
time.sleep(1)
|
||||
for pair in self.config.get('exchange', {}).get('pair_whitelist'):
|
||||
for pair in self.config.get("exchange", {}).get("pair_whitelist"):
|
||||
|
||||
(_, trained_timestamp, _, _) = self.dd.get_pair_dict_info(pair)
|
||||
|
||||
if self.dd.pair_dict[pair]['priority'] != 1:
|
||||
if self.dd.pair_dict[pair]["priority"] != 1:
|
||||
continue
|
||||
dk = FreqaiDataKitchen(self.config, self.dd,
|
||||
self.live, pair)
|
||||
dk = FreqaiDataKitchen(self.config, self.dd, self.live, pair)
|
||||
|
||||
# file_exists = False
|
||||
|
||||
@@ -138,17 +135,21 @@ class IFreqaiModel(ABC):
|
||||
# model_filename=model_filename,
|
||||
# scanning=True)
|
||||
|
||||
(retrain,
|
||||
new_trained_timerange,
|
||||
data_load_timerange) = dk.check_if_new_training_required(trained_timestamp)
|
||||
(
|
||||
retrain,
|
||||
new_trained_timerange,
|
||||
data_load_timerange,
|
||||
) = dk.check_if_new_training_required(trained_timestamp)
|
||||
dk.set_paths(pair, new_trained_timerange.stopts)
|
||||
|
||||
if retrain: # or not file_exists:
|
||||
self.train_model_in_series(new_trained_timerange, pair,
|
||||
strategy, dk, data_load_timerange)
|
||||
self.train_model_in_series(
|
||||
new_trained_timerange, pair, strategy, dk, data_load_timerange
|
||||
)
|
||||
|
||||
def start_backtesting(self, dataframe: DataFrame, metadata: dict,
|
||||
dk: FreqaiDataKitchen) -> FreqaiDataKitchen:
|
||||
def start_backtesting(
|
||||
self, dataframe: DataFrame, metadata: dict, dk: FreqaiDataKitchen
|
||||
) -> FreqaiDataKitchen:
|
||||
"""
|
||||
The main broad execution for backtesting. For backtesting, each pair enters and then gets
|
||||
trained for each window along the sliding window defined by "train_period" (training window)
|
||||
@@ -169,10 +170,8 @@ class IFreqaiModel(ABC):
|
||||
# tr_backtest is the backtesting time range e.g. the week directly
|
||||
# following tr_train. Both of these windows slide through the
|
||||
# entire backtest
|
||||
for tr_train, tr_backtest in zip(
|
||||
dk.training_timeranges, dk.backtesting_timeranges
|
||||
):
|
||||
(_, _, _, _) = self.dd.get_pair_dict_info(metadata['pair'])
|
||||
for tr_train, tr_backtest in zip(dk.training_timeranges, dk.backtesting_timeranges):
|
||||
(_, _, _, _) = self.dd.get_pair_dict_info(metadata["pair"])
|
||||
gc.collect()
|
||||
dk.data = {} # clean the pair specific data between training window sliding
|
||||
self.training_timerange = tr_train
|
||||
@@ -181,40 +180,49 @@ class IFreqaiModel(ABC):
|
||||
dataframe_backtest = dk.slice_dataframe(tr_backtest, dataframe)
|
||||
|
||||
trained_timestamp = tr_train # TimeRange.parse_timerange(tr_train)
|
||||
tr_train_startts_str = datetime.datetime.utcfromtimestamp(
|
||||
tr_train.startts).strftime('%Y-%m-%d %H:%M:%S')
|
||||
tr_train_stopts_str = datetime.datetime.utcfromtimestamp(
|
||||
tr_train.stopts).strftime('%Y-%m-%d %H:%M:%S')
|
||||
tr_train_startts_str = datetime.datetime.utcfromtimestamp(tr_train.startts).strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
)
|
||||
tr_train_stopts_str = datetime.datetime.utcfromtimestamp(tr_train.stopts).strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
)
|
||||
logger.info("Training %s", metadata["pair"])
|
||||
logger.info(f'Training {tr_train_startts_str} to {tr_train_stopts_str}')
|
||||
logger.info(f"Training {tr_train_startts_str} to {tr_train_stopts_str}")
|
||||
|
||||
dk.data_path = Path(dk.full_path /
|
||||
str("sub-train" + "-" + metadata['pair'].split("/")[0] +
|
||||
str(int(trained_timestamp.stopts))))
|
||||
if not self.model_exists(metadata["pair"], dk,
|
||||
trained_timestamp=trained_timestamp.stopts):
|
||||
self.model = self.train(dataframe_train, metadata['pair'], dk)
|
||||
self.dd.pair_dict[metadata['pair']][
|
||||
'trained_timestamp'] = trained_timestamp.stopts
|
||||
dk.set_new_model_names(metadata['pair'], trained_timestamp)
|
||||
dk.save_data(self.model, metadata['pair'], keras_model=self.keras)
|
||||
dk.data_path = Path(
|
||||
dk.full_path
|
||||
/ str(
|
||||
"sub-train"
|
||||
+ "-"
|
||||
+ metadata["pair"].split("/")[0]
|
||||
+ str(int(trained_timestamp.stopts))
|
||||
)
|
||||
)
|
||||
if not self.model_exists(
|
||||
metadata["pair"], dk, trained_timestamp=trained_timestamp.stopts
|
||||
):
|
||||
dk.find_features(dataframe_train)
|
||||
self.model = self.train(dataframe_train, metadata["pair"], dk)
|
||||
self.dd.pair_dict[metadata["pair"]]["trained_timestamp"] = trained_timestamp.stopts
|
||||
dk.set_new_model_names(metadata["pair"], trained_timestamp)
|
||||
dk.save_data(self.model, metadata["pair"], keras_model=self.keras)
|
||||
else:
|
||||
self.model = dk.load_data(metadata['pair'], keras_model=self.keras)
|
||||
self.model = dk.load_data(metadata["pair"], keras_model=self.keras)
|
||||
|
||||
self.check_if_feature_list_matches_strategy(dataframe_train, dk)
|
||||
|
||||
preds, do_preds = self.predict(dataframe_backtest, dk)
|
||||
pred_df, do_preds = self.predict(dataframe_backtest, dk)
|
||||
|
||||
dk.append_predictions(preds, do_preds, len(dataframe_backtest))
|
||||
print('predictions', len(dk.full_predictions),
|
||||
'do_predict', len(dk.full_do_predict))
|
||||
dk.append_predictions(pred_df, do_preds, len(dataframe_backtest))
|
||||
# print("predictions", len(dk.full_predictions), "do_predict", len(dk.full_do_predict))
|
||||
|
||||
dk.fill_predictions(len(dataframe))
|
||||
dk.fill_predictions(dataframe)
|
||||
|
||||
return dk
|
||||
|
||||
def start_live(self, dataframe: DataFrame, metadata: dict,
|
||||
strategy: IStrategy, dk: FreqaiDataKitchen) -> FreqaiDataKitchen:
|
||||
def start_live(
|
||||
self, dataframe: DataFrame, metadata: dict, strategy: IStrategy, dk: FreqaiDataKitchen
|
||||
) -> FreqaiDataKitchen:
|
||||
"""
|
||||
The main broad execution for dry/live. This function will check if a retraining should be
|
||||
performed, and if so, retrain and reset the model.
|
||||
@@ -232,14 +240,11 @@ class IFreqaiModel(ABC):
|
||||
self.dd.update_follower_metadata()
|
||||
|
||||
# get the model metadata associated with the current pair
|
||||
(_,
|
||||
trained_timestamp,
|
||||
_,
|
||||
return_null_array) = self.dd.get_pair_dict_info(metadata['pair'])
|
||||
(_, trained_timestamp, _, return_null_array) = self.dd.get_pair_dict_info(metadata["pair"])
|
||||
|
||||
# if the metadata doesnt exist, the follower returns null arrays to strategy
|
||||
if self.follow_mode and return_null_array:
|
||||
logger.info('Returning null array from follower to strategy')
|
||||
logger.info("Returning null array from follower to strategy")
|
||||
self.dd.return_null_values_to_strategy(dataframe, dk)
|
||||
return dk
|
||||
|
||||
@@ -253,16 +258,18 @@ class IFreqaiModel(ABC):
|
||||
# if not trainable, load existing data
|
||||
if not self.follow_mode:
|
||||
|
||||
(_,
|
||||
new_trained_timerange,
|
||||
data_load_timerange) = dk.check_if_new_training_required(trained_timestamp)
|
||||
dk.set_paths(metadata['pair'], new_trained_timerange.stopts)
|
||||
(_, new_trained_timerange, data_load_timerange) = dk.check_if_new_training_required(
|
||||
trained_timestamp
|
||||
)
|
||||
dk.set_paths(metadata["pair"], new_trained_timerange.stopts)
|
||||
|
||||
# download candle history if it is not already in memory
|
||||
if not self.dd.historic_data:
|
||||
logger.info('Downloading all training data for all pairs in whitelist and '
|
||||
'corr_pairlist, this may take a while if you do not have the '
|
||||
'data saved')
|
||||
logger.info(
|
||||
"Downloading all training data for all pairs in whitelist and "
|
||||
"corr_pairlist, this may take a while if you do not have the "
|
||||
"data saved"
|
||||
)
|
||||
dk.download_all_data_for_training(data_load_timerange)
|
||||
dk.load_all_pair_histories(data_load_timerange)
|
||||
|
||||
@@ -271,53 +278,47 @@ class IFreqaiModel(ABC):
|
||||
self.start_scanning(strategy)
|
||||
|
||||
elif self.follow_mode:
|
||||
dk.set_paths(metadata['pair'], trained_timestamp)
|
||||
logger.info('FreqAI instance set to follow_mode, finding existing pair'
|
||||
f'using { self.identifier }')
|
||||
dk.set_paths(metadata["pair"], trained_timestamp)
|
||||
logger.info(
|
||||
"FreqAI instance set to follow_mode, finding existing pair"
|
||||
f"using { self.identifier }"
|
||||
)
|
||||
|
||||
# load the model and associated data into the data kitchen
|
||||
self.model = dk.load_data(coin=metadata['pair'], keras_model=self.keras)
|
||||
self.model = dk.load_data(coin=metadata["pair"], keras_model=self.keras)
|
||||
|
||||
if not self.model:
|
||||
logger.warning('No model ready, returning null values to strategy.')
|
||||
logger.warning("No model ready, returning null values to strategy.")
|
||||
self.dd.return_null_values_to_strategy(dataframe, dk)
|
||||
return dk
|
||||
|
||||
# ensure user is feeding the correct indicators to the model
|
||||
self.check_if_feature_list_matches_strategy(dataframe, dk)
|
||||
|
||||
self.build_strategy_return_arrays(dataframe, dk, metadata['pair'], trained_timestamp)
|
||||
self.build_strategy_return_arrays(dataframe, dk, metadata["pair"], trained_timestamp)
|
||||
|
||||
return dk
|
||||
|
||||
def build_strategy_return_arrays(self, dataframe: DataFrame,
|
||||
dk: FreqaiDataKitchen, pair: str,
|
||||
trained_timestamp: int) -> None:
|
||||
def build_strategy_return_arrays(
|
||||
self, dataframe: DataFrame, dk: FreqaiDataKitchen, pair: str, trained_timestamp: int
|
||||
) -> None:
|
||||
|
||||
# hold the historical predictions in memory so we are sending back
|
||||
# correct array to strategy
|
||||
|
||||
if pair not in self.dd.model_return_values:
|
||||
pred_df, do_preds = self.predict(dataframe, dk)
|
||||
# mypy doesnt like the typing in else statement, so we need to explicitly add to
|
||||
# dataframe separately
|
||||
|
||||
# for label in dk.label_list:
|
||||
# dataframe[label] = pred_df[label]
|
||||
|
||||
# dataframe['do_predict'] = do_preds
|
||||
|
||||
# dk.append_predictions(preds, do_preds, len(dataframe))
|
||||
# dk.fill_predictions(len(dataframe))
|
||||
self.dd.set_initial_return_values(pair, dk, pred_df, do_preds)
|
||||
dk.return_dataframe = self.dd.attach_return_values_to_return_dataframe(pair, dataframe)
|
||||
return
|
||||
elif self.dk.check_if_model_expired(trained_timestamp):
|
||||
pred_df = DataFrame(np.zeros((2, len(dk.label_list))), columns=dk.label_list)
|
||||
do_preds, dk.DI_values = np.ones(2) * 2, np.zeros(2)
|
||||
logger.warning('Model expired, returning null values to strategy. Strategy '
|
||||
'construction should take care to consider this event with '
|
||||
'prediction == 0 and do_predict == 2')
|
||||
logger.warning(
|
||||
"Model expired, returning null values to strategy. Strategy "
|
||||
"construction should take care to consider this event with "
|
||||
"prediction == 0 and do_predict == 2"
|
||||
)
|
||||
else:
|
||||
# Only feed in the most recent candle for prediction in live scenario
|
||||
pred_df, do_preds = self.predict(dataframe.iloc[-self.CONV_WIDTH:], dk, first=False)
|
||||
@@ -327,8 +328,9 @@ class IFreqaiModel(ABC):
|
||||
|
||||
return
|
||||
|
||||
def check_if_feature_list_matches_strategy(self, dataframe: DataFrame,
|
||||
dk: FreqaiDataKitchen) -> None:
|
||||
def check_if_feature_list_matches_strategy(
|
||||
self, dataframe: DataFrame, dk: FreqaiDataKitchen
|
||||
) -> None:
|
||||
"""
|
||||
Ensure user is passing the proper feature set if they are reusing an `identifier` pointing
|
||||
to a folder holding existing models.
|
||||
@@ -337,16 +339,18 @@ class IFreqaiModel(ABC):
|
||||
dk: FreqaiDataKitchen = non-persistent data container/analyzer for current coin/bot loop
|
||||
"""
|
||||
dk.find_features(dataframe)
|
||||
if 'training_features_list_raw' in dk.data:
|
||||
feature_list = dk.data['training_features_list_raw']
|
||||
if "training_features_list_raw" in dk.data:
|
||||
feature_list = dk.data["training_features_list_raw"]
|
||||
else:
|
||||
feature_list = dk.training_features_list
|
||||
if dk.training_features_list != feature_list:
|
||||
raise OperationalException("Trying to access pretrained model with `identifier` "
|
||||
"but found different features furnished by current strategy."
|
||||
"Change `identifer` to train from scratch, or ensure the"
|
||||
"strategy is furnishing the same features as the pretrained"
|
||||
"model")
|
||||
raise OperationalException(
|
||||
"Trying to access pretrained model with `identifier` "
|
||||
"but found different features furnished by current strategy."
|
||||
"Change `identifer` to train from scratch, or ensure the"
|
||||
"strategy is furnishing the same features as the pretrained"
|
||||
"model"
|
||||
)
|
||||
|
||||
def data_cleaning_train(self, dk: FreqaiDataKitchen) -> None:
|
||||
"""
|
||||
@@ -356,13 +360,13 @@ class IFreqaiModel(ABC):
|
||||
of how outlier data points are dropped from the dataframe used for training.
|
||||
"""
|
||||
|
||||
if self.freqai_info.get('feature_parameters', {}).get('principal_component_analysis'):
|
||||
if self.freqai_info.get("feature_parameters", {}).get("principal_component_analysis"):
|
||||
dk.principal_component_analysis()
|
||||
|
||||
if self.freqai_info.get('feature_parameters', {}).get('use_SVM_to_remove_outliers'):
|
||||
if self.freqai_info.get("feature_parameters", {}).get("use_SVM_to_remove_outliers"):
|
||||
dk.use_SVM_to_remove_outliers(predict=False)
|
||||
|
||||
if self.freqai_info.get('feature_parameters', {}).get('DI_threshold'):
|
||||
if self.freqai_info.get("feature_parameters", {}).get("DI_threshold"):
|
||||
dk.data["avg_mean_dist"] = dk.compute_distances()
|
||||
|
||||
# if self.feature_parameters["determine_statistical_distributions"]:
|
||||
@@ -381,13 +385,13 @@ class IFreqaiModel(ABC):
|
||||
of how the do_predict vector is modified. do_predict is ultimately passed back to strategy
|
||||
for buy signals.
|
||||
"""
|
||||
if self.freqai_info.get('feature_parameters', {}).get('principal_component_analysis'):
|
||||
if self.freqai_info.get("feature_parameters", {}).get("principal_component_analysis"):
|
||||
dk.pca_transform(dataframe)
|
||||
|
||||
if self.freqai_info.get('feature_parameters', {}).get('use_SVM_to_remove_outliers'):
|
||||
if self.freqai_info.get("feature_parameters", {}).get("use_SVM_to_remove_outliers"):
|
||||
dk.use_SVM_to_remove_outliers(predict=True)
|
||||
|
||||
if self.freqai_info.get('feature_parameters', {}).get('DI_threshold'):
|
||||
if self.freqai_info.get("feature_parameters", {}).get("DI_threshold"):
|
||||
dk.check_if_pred_in_training_spaces()
|
||||
|
||||
# if self.feature_parameters["determine_statistical_distributions"]:
|
||||
@@ -395,8 +399,14 @@ class IFreqaiModel(ABC):
|
||||
# if self.feature_parameters["remove_outliers"]:
|
||||
# dk.remove_outliers(predict=True) # creates dropped index
|
||||
|
||||
def model_exists(self, pair: str, dk: FreqaiDataKitchen, trained_timestamp: int = None,
|
||||
model_filename: str = '', scanning: bool = False) -> bool:
|
||||
def model_exists(
|
||||
self,
|
||||
pair: str,
|
||||
dk: FreqaiDataKitchen,
|
||||
trained_timestamp: int = None,
|
||||
model_filename: str = "",
|
||||
scanning: bool = False,
|
||||
) -> bool:
|
||||
"""
|
||||
Given a pair and path, check if a model already exists
|
||||
:param pair: pair e.g. BTC/USD
|
||||
@@ -416,25 +426,33 @@ class IFreqaiModel(ABC):
|
||||
return file_exists
|
||||
|
||||
def set_full_path(self) -> None:
|
||||
self.full_path = Path(self.config['user_data_dir'] /
|
||||
"models" /
|
||||
str(self.freqai_info.get('identifier')))
|
||||
self.full_path = Path(
|
||||
self.config["user_data_dir"] / "models" / str(self.freqai_info.get("identifier"))
|
||||
)
|
||||
self.full_path.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy(self.config['config_files'][0], Path(self.full_path,
|
||||
Path(self.config['config_files'][0]).name))
|
||||
shutil.copy(
|
||||
self.config["config_files"][0],
|
||||
Path(self.full_path, Path(self.config["config_files"][0]).name),
|
||||
)
|
||||
|
||||
def remove_features_from_df(self, dataframe: DataFrame) -> DataFrame:
|
||||
"""
|
||||
Remove the features from the dataframe before returning it to strategy. This keeps it
|
||||
compact for Frequi purposes.
|
||||
"""
|
||||
to_keep = [col for col in dataframe.columns
|
||||
if not col.startswith('%') or col.startswith('%%')]
|
||||
to_keep = [
|
||||
col for col in dataframe.columns if not col.startswith("%") or col.startswith("%%")
|
||||
]
|
||||
return dataframe[to_keep]
|
||||
|
||||
def train_model_in_series(self, new_trained_timerange: TimeRange, pair: str,
|
||||
strategy: IStrategy, dk: FreqaiDataKitchen,
|
||||
data_load_timerange: TimeRange):
|
||||
def train_model_in_series(
|
||||
self,
|
||||
new_trained_timerange: TimeRange,
|
||||
pair: str,
|
||||
strategy: IStrategy,
|
||||
dk: FreqaiDataKitchen,
|
||||
data_load_timerange: TimeRange,
|
||||
):
|
||||
"""
|
||||
Retreive data and train model in single threaded mode (only used if model directory is empty
|
||||
upon startup for dry/live )
|
||||
@@ -447,13 +465,13 @@ class IFreqaiModel(ABC):
|
||||
(larger than new_trained_timerange so that new_trained_timerange does not contain any NaNs)
|
||||
"""
|
||||
|
||||
corr_dataframes, base_dataframes = dk.get_base_and_corr_dataframes(data_load_timerange,
|
||||
pair)
|
||||
corr_dataframes, base_dataframes = dk.get_base_and_corr_dataframes(
|
||||
data_load_timerange, pair
|
||||
)
|
||||
|
||||
unfiltered_dataframe = dk.use_strategy_to_populate_indicators(strategy,
|
||||
corr_dataframes,
|
||||
base_dataframes,
|
||||
pair)
|
||||
unfiltered_dataframe = dk.use_strategy_to_populate_indicators(
|
||||
strategy, corr_dataframes, base_dataframes, pair
|
||||
)
|
||||
|
||||
unfiltered_dataframe = dk.slice_dataframe(new_trained_timerange, unfiltered_dataframe)
|
||||
|
||||
@@ -462,15 +480,15 @@ class IFreqaiModel(ABC):
|
||||
|
||||
model = self.train(unfiltered_dataframe, pair, dk)
|
||||
|
||||
self.dd.pair_dict[pair]['trained_timestamp'] = new_trained_timerange.stopts
|
||||
self.dd.pair_dict[pair]["trained_timestamp"] = new_trained_timerange.stopts
|
||||
dk.set_new_model_names(pair, new_trained_timerange)
|
||||
self.dd.pair_dict[pair]['first'] = False
|
||||
if self.dd.pair_dict[pair]['priority'] == 1 and self.scanning:
|
||||
self.dd.pair_dict[pair]["first"] = False
|
||||
if self.dd.pair_dict[pair]["priority"] == 1 and self.scanning:
|
||||
with self.lock:
|
||||
self.dd.pair_to_end_of_training_queue(pair)
|
||||
dk.save_data(model, coin=pair, keras_model=self.keras)
|
||||
|
||||
if self.freqai_info.get('purge_old_models', False):
|
||||
if self.freqai_info.get("purge_old_models", False):
|
||||
self.dd.purge_old_models()
|
||||
# self.retrain = False
|
||||
|
||||
@@ -503,8 +521,9 @@ class IFreqaiModel(ABC):
|
||||
return
|
||||
|
||||
@abstractmethod
|
||||
def predict(self, dataframe: DataFrame,
|
||||
dk: FreqaiDataKitchen, first: bool = True) -> Tuple[DataFrame, npt.ArrayLike]:
|
||||
def predict(
|
||||
self, dataframe: DataFrame, dk: FreqaiDataKitchen, first: bool = True
|
||||
) -> Tuple[DataFrame, npt.ArrayLike]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param:
|
||||
|
@@ -45,8 +45,9 @@ class CatboostPredictionModel(IFreqaiModel):
|
||||
|
||||
return dataframe["s"]
|
||||
|
||||
def train(self, unfiltered_dataframe: DataFrame,
|
||||
pair: str, dk: FreqaiDataKitchen) -> Tuple[DataFrame, DataFrame]:
|
||||
def train(
|
||||
self, unfiltered_dataframe: DataFrame, pair: str, dk: FreqaiDataKitchen
|
||||
) -> Tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datahkitchen
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
@@ -57,8 +58,7 @@ class CatboostPredictionModel(IFreqaiModel):
|
||||
:model: Trained model which can be used to inference (self.predict)
|
||||
"""
|
||||
|
||||
logger.info('--------------------Starting training '
|
||||
f'{pair} --------------------')
|
||||
logger.info("--------------------Starting training " f"{pair} --------------------")
|
||||
|
||||
# unfiltered_labels = self.make_labels(unfiltered_dataframe, dk)
|
||||
# filter the features requested by user in the configuration file and elegantly handle NaNs
|
||||
@@ -78,13 +78,14 @@ class CatboostPredictionModel(IFreqaiModel):
|
||||
# optional additional data cleaning/analysis
|
||||
self.data_cleaning_train(dk)
|
||||
|
||||
logger.info(f'Training model on {len(dk.data_dictionary["train_features"].columns)}'
|
||||
' features')
|
||||
logger.info(
|
||||
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
|
||||
)
|
||||
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
|
||||
|
||||
model = self.fit(data_dictionary)
|
||||
|
||||
logger.info(f'--------------------done training {pair}--------------------')
|
||||
logger.info(f"--------------------done training {pair}--------------------")
|
||||
|
||||
return model
|
||||
|
||||
@@ -110,14 +111,17 @@ class CatboostPredictionModel(IFreqaiModel):
|
||||
|
||||
model = CatBoostRegressor(
|
||||
allow_writing_files=False,
|
||||
verbose=100, early_stopping_rounds=400, **self.model_training_parameters
|
||||
verbose=100,
|
||||
early_stopping_rounds=400,
|
||||
**self.model_training_parameters,
|
||||
)
|
||||
model.fit(X=train_data, eval_set=test_data)
|
||||
|
||||
return model
|
||||
|
||||
def predict(self, unfiltered_dataframe: DataFrame,
|
||||
dk: FreqaiDataKitchen, first: bool = False) -> Tuple[DataFrame, DataFrame]:
|
||||
def predict(
|
||||
self, unfiltered_dataframe: DataFrame, dk: FreqaiDataKitchen, first: bool = False
|
||||
) -> Tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param: unfiltered_dataframe: Full dataframe for the current backtest period.
|
||||
@@ -141,8 +145,10 @@ class CatboostPredictionModel(IFreqaiModel):
|
||||
pred_df = DataFrame(predictions, columns=dk.label_list)
|
||||
|
||||
for label in dk.label_list:
|
||||
pred_df[label] = ((pred_df[label] + 1) *
|
||||
(dk.data["labels_max"][label] -
|
||||
dk.data["labels_min"][label]) / 2) + dk.data["labels_min"][label]
|
||||
pred_df[label] = (
|
||||
(pred_df[label] + 1)
|
||||
* (dk.data["labels_max"][label] - dk.data["labels_min"][label])
|
||||
/ 2
|
||||
) + dk.data["labels_min"][label]
|
||||
|
||||
return (pred_df, dk.do_predict)
|
||||
|
@@ -28,8 +28,9 @@ class CatboostPredictionMultiModel(IFreqaiModel):
|
||||
|
||||
return dataframe
|
||||
|
||||
def train(self, unfiltered_dataframe: DataFrame,
|
||||
pair: str, dk: FreqaiDataKitchen) -> Tuple[DataFrame, DataFrame]:
|
||||
def train(
|
||||
self, unfiltered_dataframe: DataFrame, pair: str, dk: FreqaiDataKitchen
|
||||
) -> Tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datahkitchen
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
@@ -40,8 +41,7 @@ class CatboostPredictionMultiModel(IFreqaiModel):
|
||||
:model: Trained model which can be used to inference (self.predict)
|
||||
"""
|
||||
|
||||
logger.info('--------------------Starting training '
|
||||
f'{pair} --------------------')
|
||||
logger.info("--------------------Starting training " f"{pair} --------------------")
|
||||
|
||||
# unfiltered_labels = self.make_labels(unfiltered_dataframe, dk)
|
||||
# filter the features requested by user in the configuration file and elegantly handle NaNs
|
||||
@@ -61,13 +61,14 @@ class CatboostPredictionMultiModel(IFreqaiModel):
|
||||
# optional additional data cleaning/analysis
|
||||
self.data_cleaning_train(dk)
|
||||
|
||||
logger.info(f'Training model on {len(dk.data_dictionary["train_features"].columns)}'
|
||||
' features')
|
||||
logger.info(
|
||||
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
|
||||
)
|
||||
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
|
||||
|
||||
model = self.fit(data_dictionary)
|
||||
|
||||
logger.info(f'--------------------done training {pair}--------------------')
|
||||
logger.info(f"--------------------done training {pair}--------------------")
|
||||
|
||||
return model
|
||||
|
||||
@@ -80,22 +81,26 @@ class CatboostPredictionMultiModel(IFreqaiModel):
|
||||
"""
|
||||
|
||||
cbr = CatBoostRegressor(
|
||||
allow_writing_files=False, gpu_ram_part=0.5,
|
||||
verbose=100, early_stopping_rounds=400, **self.model_training_parameters
|
||||
allow_writing_files=False,
|
||||
gpu_ram_part=0.5,
|
||||
verbose=100,
|
||||
early_stopping_rounds=400,
|
||||
**self.model_training_parameters,
|
||||
)
|
||||
|
||||
X = data_dictionary["train_features"]
|
||||
y = data_dictionary["train_labels"]
|
||||
# eval_set = (data_dictionary["test_features"], data_dictionary["test_labels"])
|
||||
sample_weight = data_dictionary['train_weights']
|
||||
sample_weight = data_dictionary["train_weights"]
|
||||
|
||||
model = MultiOutputRegressor(estimator=cbr)
|
||||
model.fit(X=X, y=y, sample_weight=sample_weight) # , eval_set=eval_set)
|
||||
|
||||
return model
|
||||
|
||||
def predict(self, unfiltered_dataframe: DataFrame,
|
||||
dk: FreqaiDataKitchen, first: bool = False) -> Tuple[DataFrame, DataFrame]:
|
||||
def predict(
|
||||
self, unfiltered_dataframe: DataFrame, dk: FreqaiDataKitchen, first: bool = False
|
||||
) -> Tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param: unfiltered_dataframe: Full dataframe for the current backtest period.
|
||||
@@ -119,8 +124,10 @@ class CatboostPredictionMultiModel(IFreqaiModel):
|
||||
pred_df = DataFrame(predictions, columns=dk.label_list)
|
||||
|
||||
for label in dk.label_list:
|
||||
pred_df[label] = ((pred_df[label] + 1) *
|
||||
(dk.data["labels_max"][label] -
|
||||
dk.data["labels_min"][label]) / 2) + dk.data["labels_min"][label]
|
||||
pred_df[label] = (
|
||||
(pred_df[label] + 1)
|
||||
* (dk.data["labels_max"][label] - dk.data["labels_min"][label])
|
||||
/ 2
|
||||
) + dk.data["labels_min"][label]
|
||||
|
||||
return (pred_df, dk.do_predict)
|
||||
|
@@ -27,8 +27,9 @@ class LightGBMPredictionModel(IFreqaiModel):
|
||||
|
||||
return dataframe
|
||||
|
||||
def train(self, unfiltered_dataframe: DataFrame,
|
||||
pair: str, dk: FreqaiDataKitchen) -> Tuple[DataFrame, DataFrame]:
|
||||
def train(
|
||||
self, unfiltered_dataframe: DataFrame, pair: str, dk: FreqaiDataKitchen
|
||||
) -> Tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datahkitchen
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
@@ -39,8 +40,7 @@ class LightGBMPredictionModel(IFreqaiModel):
|
||||
:model: Trained model which can be used to inference (self.predict)
|
||||
"""
|
||||
|
||||
logger.info('--------------------Starting training '
|
||||
f'{pair} --------------------')
|
||||
logger.info("--------------------Starting training " f"{pair} --------------------")
|
||||
|
||||
# unfiltered_labels = self.make_labels(unfiltered_dataframe, dk)
|
||||
# filter the features requested by user in the configuration file and elegantly handle NaNs
|
||||
@@ -60,13 +60,14 @@ class LightGBMPredictionModel(IFreqaiModel):
|
||||
# optional additional data cleaning/analysis
|
||||
self.data_cleaning_train(dk)
|
||||
|
||||
logger.info(f'Training model on {len(dk.data_dictionary["train_features"].columns)}'
|
||||
' features')
|
||||
logger.info(
|
||||
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
|
||||
)
|
||||
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
|
||||
|
||||
model = self.fit(data_dictionary)
|
||||
|
||||
logger.info(f'--------------------done training {pair}--------------------')
|
||||
logger.info(f"--------------------done training {pair}--------------------")
|
||||
|
||||
return model
|
||||
|
||||
@@ -84,13 +85,14 @@ class LightGBMPredictionModel(IFreqaiModel):
|
||||
X = data_dictionary["train_features"]
|
||||
y = data_dictionary["train_labels"]
|
||||
|
||||
model = LGBMRegressor(seed=42, n_estimators=2000, verbosity=1, force_col_wise=True)
|
||||
model = LGBMRegressor(**self.model_training_parameters)
|
||||
model.fit(X=X, y=y, eval_set=eval_set)
|
||||
|
||||
return model
|
||||
|
||||
def predict(self, unfiltered_dataframe: DataFrame,
|
||||
dk: FreqaiDataKitchen) -> Tuple[DataFrame, DataFrame]:
|
||||
def predict(
|
||||
self, unfiltered_dataframe: DataFrame, dk: FreqaiDataKitchen
|
||||
) -> Tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param: unfiltered_dataframe: Full dataframe for the current backtest period.
|
||||
@@ -116,8 +118,10 @@ class LightGBMPredictionModel(IFreqaiModel):
|
||||
pred_df = DataFrame(predictions, columns=dk.label_list)
|
||||
|
||||
for label in dk.label_list:
|
||||
pred_df[label] = ((pred_df[label] + 1) *
|
||||
(dk.data["labels_max"][label] -
|
||||
dk.data["labels_min"][label]) / 2) + dk.data["labels_min"][label]
|
||||
pred_df[label] = (
|
||||
(pred_df[label] + 1)
|
||||
* (dk.data["labels_max"][label] - dk.data["labels_min"][label])
|
||||
/ 2
|
||||
) + dk.data["labels_min"][label]
|
||||
|
||||
return (pred_df, dk.do_predict)
|
||||
|
@@ -4,7 +4,7 @@ Freqtrade is the main module of this bot. It contains the class Freqtrade()
|
||||
import copy
|
||||
import logging
|
||||
import traceback
|
||||
from datetime import datetime, time, timezone
|
||||
from datetime import datetime, time, timedelta, timezone
|
||||
from math import isclose
|
||||
from threading import Lock
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
@@ -67,14 +67,12 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
self.exchange = ExchangeResolver.load_exchange(self.config['exchange']['name'], self.config)
|
||||
|
||||
init_db(self.config.get('db_url', None), clean_open_orders=self.config['dry_run'])
|
||||
init_db(self.config['db_url'])
|
||||
|
||||
self.wallets = Wallets(self.config, self.exchange)
|
||||
|
||||
PairLocks.timeframe = self.config['timeframe']
|
||||
|
||||
self.protections = ProtectionManager(self.config, self.strategy.protections)
|
||||
|
||||
# RPC runs in separate threads, can start handling external commands just after
|
||||
# initialization, even before Freqtradebot has a chance to start its throttling,
|
||||
# so anything in the Freqtradebot instance should be ready (initialized), including
|
||||
@@ -123,7 +121,9 @@ class FreqtradeBot(LoggingMixin):
|
||||
self._schedule.every().day.at(t).do(update)
|
||||
self.last_process = datetime(1970, 1, 1, tzinfo=timezone.utc)
|
||||
|
||||
self.strategy.bot_start()
|
||||
self.strategy.ft_bot_start()
|
||||
# Initialize protections AFTER bot start - otherwise parameters are not loaded.
|
||||
self.protections = ProtectionManager(self.config, self.strategy.protections)
|
||||
|
||||
def notify_status(self, msg: str) -> None:
|
||||
"""
|
||||
@@ -227,7 +227,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
Notify the user when the bot is stopped (not reloaded)
|
||||
and there are still open trades active.
|
||||
"""
|
||||
open_trades = Trade.get_trades([Trade.is_open.is_(True)]).all()
|
||||
open_trades = Trade.get_open_trades()
|
||||
|
||||
if len(open_trades) != 0 and self.state != State.RELOAD_CONFIG:
|
||||
msg = {
|
||||
@@ -299,7 +299,17 @@ class FreqtradeBot(LoggingMixin):
|
||||
fo = self.exchange.fetch_order_or_stoploss_order(order.order_id, order.ft_pair,
|
||||
order.ft_order_side == 'stoploss')
|
||||
|
||||
self.update_trade_state(order.trade, order.order_id, fo)
|
||||
self.update_trade_state(order.trade, order.order_id, fo,
|
||||
stoploss_order=(order.ft_order_side == 'stoploss'))
|
||||
|
||||
except InvalidOrderException as e:
|
||||
logger.warning(f"Error updating Order {order.order_id} due to {e}.")
|
||||
if order.order_date_utc - timedelta(days=5) < datetime.now(timezone.utc):
|
||||
logger.warning(
|
||||
"Order is older than 5 days. Assuming order was fully cancelled.")
|
||||
fo = order.to_ccxt_object()
|
||||
fo['status'] = 'canceled'
|
||||
self.handle_timedout_order(fo, order.trade)
|
||||
|
||||
except ExchangeError as e:
|
||||
|
||||
@@ -638,7 +648,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
)
|
||||
order_obj = Order.parse_from_ccxt_object(order, pair, side)
|
||||
order_id = order['id']
|
||||
order_status = order.get('status', None)
|
||||
order_status = order.get('status')
|
||||
logger.info(f"Order #{order_id} was created for {pair} and status is {order_status}.")
|
||||
|
||||
# we assume the order is executed at the price requested
|
||||
@@ -780,7 +790,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
current_rate=enter_limit_requested,
|
||||
proposed_leverage=1.0,
|
||||
max_leverage=max_leverage,
|
||||
side=trade_side,
|
||||
side=trade_side, entry_tag=entry_tag,
|
||||
) if self.trading_mode != TradingMode.SPOT else 1.0
|
||||
# Cap leverage between 1.0 and max_leverage.
|
||||
leverage = min(max(leverage, 1.0), max_leverage)
|
||||
@@ -949,6 +959,29 @@ class FreqtradeBot(LoggingMixin):
|
||||
logger.debug(f'Found no {exit_signal_type} signal for %s.', trade)
|
||||
return False
|
||||
|
||||
def _check_and_execute_exit(self, trade: Trade, exit_rate: float,
|
||||
enter: bool, exit_: bool, exit_tag: Optional[str]) -> bool:
|
||||
"""
|
||||
Check and execute trade exit
|
||||
"""
|
||||
exits: List[ExitCheckTuple] = self.strategy.should_exit(
|
||||
trade,
|
||||
exit_rate,
|
||||
datetime.now(timezone.utc),
|
||||
enter=enter,
|
||||
exit_=exit_,
|
||||
force_stoploss=self.edge.stoploss(trade.pair) if self.edge else 0
|
||||
)
|
||||
for should_exit in exits:
|
||||
if should_exit.exit_flag:
|
||||
exit_tag1 = exit_tag if should_exit.exit_type == ExitType.EXIT_SIGNAL else None
|
||||
logger.info(f'Exit for {trade.pair} detected. Reason: {should_exit.exit_type}'
|
||||
f'{f" Tag: {exit_tag1}" if exit_tag1 is not None else ""}')
|
||||
exited = self.execute_trade_exit(trade, exit_rate, should_exit, exit_tag=exit_tag1)
|
||||
if exited:
|
||||
return True
|
||||
return False
|
||||
|
||||
def create_stoploss_order(self, trade: Trade, stop_price: float) -> bool:
|
||||
"""
|
||||
Abstracts creating stoploss orders from the logic.
|
||||
@@ -1018,7 +1051,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# Lock pair for one candle to prevent immediate rebuys
|
||||
self.strategy.lock_pair(trade.pair, datetime.now(timezone.utc),
|
||||
reason='Auto lock')
|
||||
self._notify_exit(trade, "stoploss")
|
||||
self._notify_exit(trade, "stoploss", True)
|
||||
return True
|
||||
|
||||
if trade.open_order_id or not trade.is_open:
|
||||
@@ -1100,27 +1133,6 @@ class FreqtradeBot(LoggingMixin):
|
||||
logger.warning(f"Could not create trailing stoploss order "
|
||||
f"for pair {trade.pair}.")
|
||||
|
||||
def _check_and_execute_exit(self, trade: Trade, exit_rate: float,
|
||||
enter: bool, exit_: bool, exit_tag: Optional[str]) -> bool:
|
||||
"""
|
||||
Check and execute trade exit
|
||||
"""
|
||||
should_exit: ExitCheckTuple = self.strategy.should_exit(
|
||||
trade,
|
||||
exit_rate,
|
||||
datetime.now(timezone.utc),
|
||||
enter=enter,
|
||||
exit_=exit_,
|
||||
force_stoploss=self.edge.stoploss(trade.pair) if self.edge else 0
|
||||
)
|
||||
|
||||
if should_exit.exit_flag:
|
||||
logger.info(f'Exit for {trade.pair} detected. Reason: {should_exit.exit_type}'
|
||||
f'Tag: {exit_tag if exit_tag is not None else "None"}')
|
||||
self.execute_trade_exit(trade, exit_rate, should_exit, exit_tag=exit_tag)
|
||||
return True
|
||||
return False
|
||||
|
||||
def manage_open_orders(self) -> None:
|
||||
"""
|
||||
Management of open orders on exchange. Unfilled orders might be cancelled if timeout
|
||||
@@ -1201,15 +1213,15 @@ class FreqtradeBot(LoggingMixin):
|
||||
current_order_rate=order_obj.price, entry_tag=trade.enter_tag,
|
||||
side=trade.entry_side)
|
||||
|
||||
full_cancel = False
|
||||
replacing = True
|
||||
cancel_reason = constants.CANCEL_REASON['REPLACE']
|
||||
if not adjusted_entry_price:
|
||||
full_cancel = True if trade.nr_of_successful_entries == 0 else False
|
||||
replacing = False
|
||||
cancel_reason = constants.CANCEL_REASON['USER_CANCEL']
|
||||
if order_obj.price != adjusted_entry_price:
|
||||
# cancel existing order if new price is supplied or None
|
||||
self.handle_cancel_enter(trade, order, cancel_reason,
|
||||
allow_full_cancel=full_cancel)
|
||||
replacing=replacing)
|
||||
if adjusted_entry_price:
|
||||
# place new order only if new price is supplied
|
||||
self.execute_entry(
|
||||
@@ -1243,10 +1255,11 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
def handle_cancel_enter(
|
||||
self, trade: Trade, order: Dict, reason: str,
|
||||
allow_full_cancel: Optional[bool] = True
|
||||
replacing: Optional[bool] = False
|
||||
) -> bool:
|
||||
"""
|
||||
Buy cancel - cancel order
|
||||
:param replacing: Replacing order - prevent trade deletion.
|
||||
:return: True if order was fully cancelled
|
||||
"""
|
||||
was_trade_fully_canceled = False
|
||||
@@ -1284,7 +1297,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
if isclose(filled_amount, 0.0, abs_tol=constants.MATH_CLOSE_PREC):
|
||||
# if trade is not partially completed and it's the only order, just delete the trade
|
||||
open_order_count = len([order for order in trade.orders if order.status == 'open'])
|
||||
if open_order_count <= 1 and allow_full_cancel:
|
||||
if open_order_count <= 1 and trade.nr_of_successful_entries == 0 and not replacing:
|
||||
logger.info(f'{side} order fully cancelled. Removing {trade} from database.')
|
||||
trade.delete()
|
||||
was_trade_fully_canceled = True
|
||||
@@ -1293,7 +1306,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# FIXME TODO: This could possibly reworked to not duplicate the code 15 lines below.
|
||||
self.update_trade_state(trade, trade.open_order_id, corder)
|
||||
trade.open_order_id = None
|
||||
logger.info(f'Partial {side} order timeout for {trade}.')
|
||||
logger.info(f'{side} Order timeout for {trade}.')
|
||||
else:
|
||||
# if trade is partially complete, edit the stake details for the trade
|
||||
# and close the order
|
||||
@@ -1405,7 +1418,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
:param trade: Trade instance
|
||||
:param limit: limit rate for the sell order
|
||||
:param exit_check: CheckTuple with signal and reason
|
||||
:return: True if it succeeds (supported) False (not supported)
|
||||
:return: True if it succeeds False
|
||||
"""
|
||||
trade.funding_fees = self.exchange.get_funding_fees(
|
||||
pair=trade.pair,
|
||||
@@ -1414,6 +1427,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
open_date=trade.open_date_utc,
|
||||
)
|
||||
exit_type = 'exit'
|
||||
exit_reason = exit_tag or exit_check.exit_reason
|
||||
if exit_check.exit_type in (ExitType.STOP_LOSS, ExitType.TRAILING_STOP_LOSS):
|
||||
exit_type = 'stoploss'
|
||||
|
||||
@@ -1431,7 +1445,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
pair=trade.pair, trade=trade,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
proposed_rate=proposed_limit_rate, current_profit=current_profit,
|
||||
exit_tag=exit_check.exit_reason)
|
||||
exit_tag=exit_reason)
|
||||
|
||||
limit = self.get_valid_price(custom_exit_price, proposed_limit_rate)
|
||||
|
||||
@@ -1448,10 +1462,10 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
if not strategy_safe_wrapper(self.strategy.confirm_trade_exit, default_retval=True)(
|
||||
pair=trade.pair, trade=trade, order_type=order_type, amount=amount, rate=limit,
|
||||
time_in_force=time_in_force, exit_reason=exit_check.exit_reason,
|
||||
sell_reason=exit_check.exit_reason, # sellreason -> compatibility
|
||||
time_in_force=time_in_force, exit_reason=exit_reason,
|
||||
sell_reason=exit_reason, # sellreason -> compatibility
|
||||
current_time=datetime.now(timezone.utc)):
|
||||
logger.info(f"User requested abortion of exiting {trade.pair}")
|
||||
logger.info(f"User requested abortion of {trade.pair} exit.")
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -1478,7 +1492,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
trade.open_order_id = order['id']
|
||||
trade.exit_order_status = ''
|
||||
trade.close_rate_requested = limit
|
||||
trade.exit_reason = exit_tag or exit_check.exit_reason
|
||||
trade.exit_reason = exit_reason
|
||||
|
||||
# Lock pair for one candle to prevent immediate re-trading
|
||||
self.strategy.lock_pair(trade.pair, datetime.now(timezone.utc),
|
||||
@@ -1528,7 +1542,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
'open_date': trade.open_date,
|
||||
'close_date': trade.close_date or datetime.utcnow(),
|
||||
'stake_currency': self.config['stake_currency'],
|
||||
'fiat_currency': self.config.get('fiat_display_currency', None),
|
||||
'fiat_currency': self.config.get('fiat_display_currency'),
|
||||
}
|
||||
|
||||
if 'fiat_display_currency' in self.config:
|
||||
@@ -1639,7 +1653,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
if order['status'] in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
# If a entry order was closed, force update on stoploss on exchange
|
||||
if order.get('side', None) == trade.entry_side:
|
||||
if order.get('side') == trade.entry_side:
|
||||
trade = self.cancel_stoploss_on_exchange(trade)
|
||||
# TODO: Margin will need to use interest_rate as well.
|
||||
# interest_rate = self.exchange.get_interest_rate()
|
||||
@@ -1662,7 +1676,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
if send_msg and not stoploss_order and not trade.open_order_id:
|
||||
self._notify_exit(trade, '', True)
|
||||
self.handle_protections(trade.pair, trade.trade_direction)
|
||||
elif send_msg and not trade.open_order_id:
|
||||
elif send_msg and not trade.open_order_id and not stoploss_order:
|
||||
# Enter fill
|
||||
self._notify_enter(trade, order, fill=True)
|
||||
|
||||
|
@@ -87,7 +87,7 @@ class Backtesting:
|
||||
self.exchange = ExchangeResolver.load_exchange(self._exchange_name, self.config)
|
||||
self.dataprovider = DataProvider(self.config, self.exchange)
|
||||
|
||||
if self.config.get('strategy_list', None):
|
||||
if self.config.get('strategy_list'):
|
||||
for strat in list(self.config['strategy_list']):
|
||||
stratconf = deepcopy(self.config)
|
||||
stratconf['strategy'] = strat
|
||||
@@ -187,7 +187,9 @@ class Backtesting:
|
||||
# since a "perfect" stoploss-exit is assumed anyway
|
||||
# And the regular "stoploss" function would not apply to that case
|
||||
self.strategy.order_types['stoploss_on_exchange'] = False
|
||||
self.strategy.bot_start()
|
||||
|
||||
self.strategy.ft_bot_start()
|
||||
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)()
|
||||
|
||||
def _load_protections(self, strategy: IStrategy):
|
||||
if self.config.get('enable_protections', False):
|
||||
@@ -280,8 +282,12 @@ class Backtesting:
|
||||
if pair not in self.exchange._leverage_tiers:
|
||||
unavailable_pairs.append(pair)
|
||||
continue
|
||||
self.futures_data[pair] = funding_rates_dict[pair].merge(
|
||||
mark_rates_dict[pair], on='date', how="inner", suffixes=["_fund", "_mark"])
|
||||
|
||||
self.futures_data[pair] = self.exchange.combine_funding_and_mark(
|
||||
funding_rates=funding_rates_dict[pair],
|
||||
mark_rates=mark_rates_dict[pair],
|
||||
futures_funding_rate=self.config.get('futures_funding_rate', None),
|
||||
)
|
||||
|
||||
if unavailable_pairs:
|
||||
raise OperationalException(
|
||||
@@ -302,6 +308,9 @@ class Backtesting:
|
||||
self.rejected_trades = 0
|
||||
self.timedout_entry_orders = 0
|
||||
self.timedout_exit_orders = 0
|
||||
self.canceled_trade_entries = 0
|
||||
self.canceled_entry_orders = 0
|
||||
self.replaced_entry_orders = 0
|
||||
self.dataprovider.clear_cache()
|
||||
if enable_protections:
|
||||
self._load_protections(self.strategy)
|
||||
@@ -498,7 +507,8 @@ class Backtesting:
|
||||
stake_available = self.wallets.get_available_stake_amount()
|
||||
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
|
||||
default_retval=None)(
|
||||
trade=trade, current_time=row[DATE_IDX].to_pydatetime(), current_rate=row[OPEN_IDX],
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
current_time=row[DATE_IDX].to_pydatetime(), current_rate=row[OPEN_IDX],
|
||||
current_profit=current_profit, min_stake=min_stake,
|
||||
max_stake=min(max_stake, stake_available))
|
||||
|
||||
@@ -529,64 +539,76 @@ class Backtesting:
|
||||
if check_adjust_entry:
|
||||
trade = self._get_adjust_trade_entry_for_candle(trade, row)
|
||||
|
||||
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX]
|
||||
exit_sig = row[ESHORT_IDX] if trade.is_short else row[ELONG_IDX]
|
||||
exit_ = self.strategy.should_exit(
|
||||
trade, row[OPEN_IDX], exit_candle_time, # type: ignore
|
||||
exits = self.strategy.should_exit(
|
||||
trade, row[OPEN_IDX], row[DATE_IDX].to_pydatetime(), # type: ignore
|
||||
enter=enter, exit_=exit_sig,
|
||||
low=row[LOW_IDX], high=row[HIGH_IDX]
|
||||
)
|
||||
for exit_ in exits:
|
||||
t = self._get_exit_for_signal(trade, row, exit_)
|
||||
if t:
|
||||
return t
|
||||
return None
|
||||
|
||||
def _get_exit_for_signal(self, trade: LocalTrade, row: Tuple,
|
||||
exit_: ExitCheckTuple) -> Optional[LocalTrade]:
|
||||
|
||||
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
if exit_.exit_flag:
|
||||
trade.close_date = exit_candle_time
|
||||
exit_reason = exit_.exit_reason
|
||||
|
||||
trade_dur = int((trade.close_date_utc - trade.open_date_utc).total_seconds() // 60)
|
||||
try:
|
||||
closerate = self._get_close_rate(row, trade, exit_, trade_dur)
|
||||
close_rate = self._get_close_rate(row, trade, exit_, trade_dur)
|
||||
except ValueError:
|
||||
return None
|
||||
# call the custom exit price,with default value as previous closerate
|
||||
current_profit = trade.calc_profit_ratio(closerate)
|
||||
# call the custom exit price,with default value as previous close_rate
|
||||
current_profit = trade.calc_profit_ratio(close_rate)
|
||||
order_type = self.strategy.order_types['exit']
|
||||
if exit_.exit_type in (ExitType.EXIT_SIGNAL, ExitType.CUSTOM_EXIT):
|
||||
# Checks and adds an exit tag, after checking that the length of the
|
||||
# row has the length for an exit tag column
|
||||
if(
|
||||
len(row) > EXIT_TAG_IDX
|
||||
and row[EXIT_TAG_IDX] is not None
|
||||
and len(row[EXIT_TAG_IDX]) > 0
|
||||
and exit_.exit_type in (ExitType.EXIT_SIGNAL,)
|
||||
):
|
||||
exit_reason = row[EXIT_TAG_IDX]
|
||||
# Custom exit pricing only for exit-signals
|
||||
if order_type == 'limit':
|
||||
closerate = strategy_safe_wrapper(self.strategy.custom_exit_price,
|
||||
default_retval=closerate)(
|
||||
pair=trade.pair, trade=trade,
|
||||
close_rate = strategy_safe_wrapper(self.strategy.custom_exit_price,
|
||||
default_retval=close_rate)(
|
||||
pair=trade.pair,
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
current_time=exit_candle_time,
|
||||
proposed_rate=closerate, current_profit=current_profit,
|
||||
exit_tag=exit_.exit_reason)
|
||||
proposed_rate=close_rate, current_profit=current_profit,
|
||||
exit_tag=exit_reason)
|
||||
# We can't place orders lower than current low.
|
||||
# freqtrade does not support this in live, and the order would fill immediately
|
||||
if trade.is_short:
|
||||
closerate = min(closerate, row[HIGH_IDX])
|
||||
close_rate = min(close_rate, row[HIGH_IDX])
|
||||
else:
|
||||
closerate = max(closerate, row[LOW_IDX])
|
||||
close_rate = max(close_rate, row[LOW_IDX])
|
||||
# Confirm trade exit:
|
||||
time_in_force = self.strategy.order_time_in_force['exit']
|
||||
|
||||
if not strategy_safe_wrapper(self.strategy.confirm_trade_exit, default_retval=True)(
|
||||
pair=trade.pair, trade=trade, order_type='limit', amount=trade.amount,
|
||||
rate=closerate,
|
||||
pair=trade.pair,
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
order_type='limit',
|
||||
amount=trade.amount,
|
||||
rate=close_rate,
|
||||
time_in_force=time_in_force,
|
||||
sell_reason=exit_.exit_reason, # deprecated
|
||||
exit_reason=exit_.exit_reason,
|
||||
sell_reason=exit_reason, # deprecated
|
||||
exit_reason=exit_reason,
|
||||
current_time=exit_candle_time):
|
||||
return None
|
||||
|
||||
trade.exit_reason = exit_.exit_reason
|
||||
|
||||
# Checks and adds an exit tag, after checking that the length of the
|
||||
# row has the length for an exit tag column
|
||||
if(
|
||||
len(row) > EXIT_TAG_IDX
|
||||
and row[EXIT_TAG_IDX] is not None
|
||||
and len(row[EXIT_TAG_IDX]) > 0
|
||||
and exit_.exit_type in (ExitType.EXIT_SIGNAL,)
|
||||
):
|
||||
trade.exit_reason = row[EXIT_TAG_IDX]
|
||||
trade.exit_reason = exit_reason
|
||||
|
||||
self.order_id_counter += 1
|
||||
order = Order(
|
||||
@@ -602,12 +624,12 @@ class Backtesting:
|
||||
side=trade.exit_side,
|
||||
order_type=order_type,
|
||||
status="open",
|
||||
price=closerate,
|
||||
average=closerate,
|
||||
price=close_rate,
|
||||
average=close_rate,
|
||||
amount=trade.amount,
|
||||
filled=0,
|
||||
remaining=trade.amount,
|
||||
cost=trade.amount * closerate,
|
||||
cost=trade.amount * close_rate,
|
||||
)
|
||||
trade.orders.append(order)
|
||||
return trade
|
||||
@@ -654,7 +676,7 @@ class Backtesting:
|
||||
return self._get_exit_trade_entry_for_candle(trade, row)
|
||||
|
||||
def get_valid_price_and_stake(
|
||||
self, pair: str, row: Tuple, propose_rate: float, stake_amount: Optional[float],
|
||||
self, pair: str, row: Tuple, propose_rate: float, stake_amount: float,
|
||||
direction: LongShort, current_time: datetime, entry_tag: Optional[str],
|
||||
trade: Optional[LocalTrade], order_type: str
|
||||
) -> Tuple[float, float, float, float]:
|
||||
@@ -688,7 +710,7 @@ class Backtesting:
|
||||
current_rate=row[OPEN_IDX],
|
||||
proposed_leverage=1.0,
|
||||
max_leverage=max_leverage,
|
||||
side=direction,
|
||||
side=direction, entry_tag=entry_tag,
|
||||
) if self._can_short else 1.0
|
||||
# Cap leverage between 1.0 and max_leverage.
|
||||
leverage = min(max(leverage, 1.0), max_leverage)
|
||||
@@ -728,8 +750,9 @@ class Backtesting:
|
||||
order_type = self.strategy.order_types['entry']
|
||||
pos_adjust = trade is not None and requested_rate is None
|
||||
|
||||
stake_amount_ = stake_amount or (trade.stake_amount if trade else 0.0)
|
||||
propose_rate, stake_amount, leverage, min_stake_amount = self.get_valid_price_and_stake(
|
||||
pair, row, row[OPEN_IDX], stake_amount, direction, current_time, entry_tag, trade,
|
||||
pair, row, row[OPEN_IDX], stake_amount_, direction, current_time, entry_tag, trade,
|
||||
order_type
|
||||
)
|
||||
|
||||
@@ -878,27 +901,34 @@ class Backtesting:
|
||||
self.protections.stop_per_pair(pair, current_time, side)
|
||||
self.protections.global_stop(current_time, side)
|
||||
|
||||
def manage_open_orders(self, trade: LocalTrade, current_time, row: Tuple) -> bool:
|
||||
def manage_open_orders(self, trade: LocalTrade, current_time: datetime, row: Tuple) -> bool:
|
||||
"""
|
||||
Check if any open order needs to be cancelled or replaced.
|
||||
Returns True if the trade should be deleted.
|
||||
"""
|
||||
for order in [o for o in trade.orders if o.ft_is_open]:
|
||||
if self.check_order_cancel(trade, order, current_time):
|
||||
oc = self.check_order_cancel(trade, order, current_time)
|
||||
if oc:
|
||||
# delete trade due to order timeout
|
||||
return True
|
||||
elif self.check_order_replace(trade, order, current_time, row):
|
||||
elif oc is None and self.check_order_replace(trade, order, current_time, row):
|
||||
# delete trade due to user request
|
||||
self.canceled_trade_entries += 1
|
||||
return True
|
||||
# default maintain trade
|
||||
return False
|
||||
|
||||
def check_order_cancel(self, trade: LocalTrade, order: Order, current_time) -> bool:
|
||||
def check_order_cancel(
|
||||
self, trade: LocalTrade, order: Order, current_time: datetime) -> Optional[bool]:
|
||||
"""
|
||||
Check if current analyzed order has to be canceled.
|
||||
Returns True if the trade should be Deleted (initial order was canceled).
|
||||
Returns True if the trade should be Deleted (initial order was canceled),
|
||||
False if it's Canceled
|
||||
None if the order is still active.
|
||||
"""
|
||||
timedout = self.strategy.ft_check_timed_out(trade, order, current_time)
|
||||
timedout = self.strategy.ft_check_timed_out(
|
||||
trade, # type: ignore[arg-type]
|
||||
order, current_time)
|
||||
if timedout:
|
||||
if order.side == trade.entry_side:
|
||||
self.timedout_entry_orders += 1
|
||||
@@ -908,12 +938,15 @@ class Backtesting:
|
||||
else:
|
||||
# Close additional entry order
|
||||
del trade.orders[trade.orders.index(order)]
|
||||
trade.open_order_id = None
|
||||
return False
|
||||
if order.side == trade.exit_side:
|
||||
self.timedout_exit_orders += 1
|
||||
# Close exit order and retry exiting on next signal.
|
||||
del trade.orders[trade.orders.index(order)]
|
||||
|
||||
return False
|
||||
trade.open_order_id = None
|
||||
return False
|
||||
return None
|
||||
|
||||
def check_order_replace(self, trade: LocalTrade, order: Order, current_time,
|
||||
row: Tuple) -> bool:
|
||||
@@ -927,7 +960,8 @@ class Backtesting:
|
||||
if order.side == trade.entry_side and current_time > order.order_date_utc:
|
||||
requested_rate = strategy_safe_wrapper(self.strategy.adjust_entry_price,
|
||||
default_retval=order.price)(
|
||||
trade=trade, order=order, pair=trade.pair, current_time=current_time,
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
order=order, pair=trade.pair, current_time=current_time,
|
||||
proposed_rate=row[OPEN_IDX], current_order_rate=order.price,
|
||||
entry_tag=trade.enter_tag, side=trade.trade_direction
|
||||
) # default value is current order price
|
||||
@@ -938,6 +972,8 @@ class Backtesting:
|
||||
return False
|
||||
else:
|
||||
del trade.orders[trade.orders.index(order)]
|
||||
trade.open_order_id = None
|
||||
self.canceled_entry_orders += 1
|
||||
|
||||
# place new order if result was not None
|
||||
if requested_rate:
|
||||
@@ -945,6 +981,7 @@ class Backtesting:
|
||||
requested_rate=requested_rate,
|
||||
requested_stake=(order.remaining * order.price),
|
||||
direction='short' if trade.is_short else 'long')
|
||||
self.replaced_entry_orders += 1
|
||||
else:
|
||||
# assumption: there can't be multiple open entry orders at any given time
|
||||
return (trade.nr_of_successful_entries == 0)
|
||||
@@ -1024,6 +1061,7 @@ class Backtesting:
|
||||
# Close trade
|
||||
open_trade_count -= 1
|
||||
open_trades[pair].remove(t)
|
||||
LocalTrade.trades_open.remove(t)
|
||||
self.wallets.update()
|
||||
|
||||
# 2. Process entries.
|
||||
@@ -1047,6 +1085,8 @@ class Backtesting:
|
||||
open_trade_count += 1
|
||||
# logger.debug(f"{pair} - Emulate creation of new trade: {trade}.")
|
||||
open_trades[pair].append(trade)
|
||||
LocalTrade.add_bt_trade(trade)
|
||||
self.wallets.update()
|
||||
|
||||
for trade in list(open_trades[pair]):
|
||||
# 3. Process entry orders.
|
||||
@@ -1054,7 +1094,6 @@ class Backtesting:
|
||||
if order and self._get_order_filled(order.price, row):
|
||||
order.close_bt_order(current_time, trade)
|
||||
trade.open_order_id = None
|
||||
LocalTrade.add_bt_trade(trade)
|
||||
self.wallets.update()
|
||||
|
||||
# 4. Create exit orders (if any)
|
||||
@@ -1064,6 +1103,7 @@ class Backtesting:
|
||||
# 5. Process exit orders.
|
||||
order = trade.select_order(trade.exit_side, is_open=True)
|
||||
if order and self._get_order_filled(order.price, row):
|
||||
order.close_bt_order(current_time, trade)
|
||||
trade.open_order_id = None
|
||||
trade.close_date = current_time
|
||||
trade.close(order.price, show_msg=False)
|
||||
@@ -1092,6 +1132,9 @@ class Backtesting:
|
||||
'rejected_signals': self.rejected_trades,
|
||||
'timedout_entry_orders': self.timedout_entry_orders,
|
||||
'timedout_exit_orders': self.timedout_exit_orders,
|
||||
'canceled_trade_entries': self.canceled_trade_entries,
|
||||
'canceled_entry_orders': self.canceled_entry_orders,
|
||||
'replaced_entry_orders': self.replaced_entry_orders,
|
||||
'final_balance': self.wallets.get_total(self.strategy.config['stake_currency']),
|
||||
}
|
||||
|
||||
@@ -1103,8 +1146,6 @@ class Backtesting:
|
||||
backtest_start_time = datetime.now(timezone.utc)
|
||||
self._set_strategy(strat)
|
||||
|
||||
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)()
|
||||
|
||||
# Use max_open_trades in backtesting, except --disable-max-market-positions is set
|
||||
if self.config.get('use_max_market_positions', True):
|
||||
# Must come from strategy config, as the strategy may modify this setting.
|
||||
@@ -1229,13 +1270,14 @@ class Backtesting:
|
||||
self.results['strategy_comparison'].extend(results['strategy_comparison'])
|
||||
else:
|
||||
self.results = results
|
||||
|
||||
dt_appendix = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
if self.config.get('export', 'none') in ('trades', 'signals'):
|
||||
store_backtest_stats(self.config['exportfilename'], self.results)
|
||||
store_backtest_stats(self.config['exportfilename'], self.results, dt_appendix)
|
||||
|
||||
if (self.config.get('export', 'none') == 'signals' and
|
||||
self.dataprovider.runmode == RunMode.BACKTEST):
|
||||
store_backtest_signal_candles(self.config['exportfilename'], self.processed_dfs)
|
||||
store_backtest_signal_candles(
|
||||
self.config['exportfilename'], self.processed_dfs, dt_appendix)
|
||||
|
||||
# Results may be mixed up now. Sort them so they follow --strategy-list order.
|
||||
if 'strategy_list' in self.config and len(self.results) > 0:
|
||||
|
@@ -44,7 +44,7 @@ class EdgeCli:
|
||||
|
||||
self.edge._timerange = TimeRange.parse_timerange(None if self.config.get(
|
||||
'timerange') is None else str(self.config.get('timerange')))
|
||||
self.strategy.bot_start()
|
||||
self.strategy.ft_bot_start()
|
||||
|
||||
def start(self) -> None:
|
||||
result = self.edge.calculate(self.config['exchange']['pair_whitelist'])
|
||||
|
@@ -27,8 +27,7 @@ from freqtrade.misc import deep_merge_dicts, file_dump_json, plural
|
||||
from freqtrade.optimize.backtesting import Backtesting
|
||||
# Import IHyperOpt and IHyperOptLoss to allow unpickling classes from these modules
|
||||
from freqtrade.optimize.hyperopt_auto import HyperOptAuto
|
||||
from freqtrade.optimize.hyperopt_interface import IHyperOpt # noqa: F401
|
||||
from freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss # noqa: F401
|
||||
from freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss
|
||||
from freqtrade.optimize.hyperopt_tools import HyperoptTools, hyperopt_serializer
|
||||
from freqtrade.optimize.optimize_reports import generate_strategy_stats
|
||||
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
|
||||
@@ -62,7 +61,6 @@ class Hyperopt:
|
||||
hyperopt = Hyperopt(config)
|
||||
hyperopt.start()
|
||||
"""
|
||||
custom_hyperopt: IHyperOpt
|
||||
|
||||
def __init__(self, config: Dict[str, Any]) -> None:
|
||||
self.buy_space: List[Dimension] = []
|
||||
@@ -77,6 +75,7 @@ class Hyperopt:
|
||||
|
||||
self.backtesting = Backtesting(self.config)
|
||||
self.pairlist = self.backtesting.pairlists.whitelist
|
||||
self.custom_hyperopt: HyperOptAuto
|
||||
|
||||
if not self.config.get('hyperopt'):
|
||||
self.custom_hyperopt = HyperOptAuto(self.config)
|
||||
@@ -88,7 +87,8 @@ class Hyperopt:
|
||||
self.backtesting._set_strategy(self.backtesting.strategylist[0])
|
||||
self.custom_hyperopt.strategy = self.backtesting.strategy
|
||||
|
||||
self.custom_hyperoptloss = HyperOptLossResolver.load_hyperoptloss(self.config)
|
||||
self.custom_hyperoptloss: IHyperOptLoss = HyperOptLossResolver.load_hyperoptloss(
|
||||
self.config)
|
||||
self.calculate_loss = self.custom_hyperoptloss.hyperopt_loss_function
|
||||
time_now = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
strategy = str(self.config['strategy'])
|
||||
@@ -429,7 +429,7 @@ class Hyperopt:
|
||||
return new_list
|
||||
i = 0
|
||||
asked_non_tried: List[List[Any]] = []
|
||||
is_random: List[bool] = []
|
||||
is_random_non_tried: List[bool] = []
|
||||
while i < 5 and len(asked_non_tried) < n_points:
|
||||
if i < 3:
|
||||
self.opt.cache_ = {}
|
||||
@@ -438,9 +438,9 @@ class Hyperopt:
|
||||
else:
|
||||
asked = unique_list(self.opt.space.rvs(n_samples=n_points * 5))
|
||||
is_random = [True for _ in range(len(asked))]
|
||||
is_random += [rand for x, rand in zip(asked, is_random)
|
||||
if x not in self.opt.Xi
|
||||
and x not in asked_non_tried]
|
||||
is_random_non_tried += [rand for x, rand in zip(asked, is_random)
|
||||
if x not in self.opt.Xi
|
||||
and x not in asked_non_tried]
|
||||
asked_non_tried += [x for x in asked
|
||||
if x not in self.opt.Xi
|
||||
and x not in asked_non_tried]
|
||||
@@ -449,13 +449,13 @@ class Hyperopt:
|
||||
if asked_non_tried:
|
||||
return (
|
||||
asked_non_tried[:min(len(asked_non_tried), n_points)],
|
||||
is_random[:min(len(asked_non_tried), n_points)]
|
||||
is_random_non_tried[:min(len(asked_non_tried), n_points)]
|
||||
)
|
||||
else:
|
||||
return self.opt.ask(n_points=n_points), [False for _ in range(n_points)]
|
||||
|
||||
def start(self) -> None:
|
||||
self.random_state = self._set_random_state(self.config.get('hyperopt_random_state', None))
|
||||
self.random_state = self._set_random_state(self.config.get('hyperopt_random_state'))
|
||||
logger.info(f"Using optimizer random state: {self.random_state}")
|
||||
self.hyperopt_table_header = -1
|
||||
# Initialize spaces ...
|
||||
|
@@ -127,14 +127,14 @@ class HyperoptTools():
|
||||
'only_profitable': config.get('hyperopt_list_profitable', False),
|
||||
'filter_min_trades': config.get('hyperopt_list_min_trades', 0),
|
||||
'filter_max_trades': config.get('hyperopt_list_max_trades', 0),
|
||||
'filter_min_avg_time': config.get('hyperopt_list_min_avg_time', None),
|
||||
'filter_max_avg_time': config.get('hyperopt_list_max_avg_time', None),
|
||||
'filter_min_avg_profit': config.get('hyperopt_list_min_avg_profit', None),
|
||||
'filter_max_avg_profit': config.get('hyperopt_list_max_avg_profit', None),
|
||||
'filter_min_total_profit': config.get('hyperopt_list_min_total_profit', None),
|
||||
'filter_max_total_profit': config.get('hyperopt_list_max_total_profit', None),
|
||||
'filter_min_objective': config.get('hyperopt_list_min_objective', None),
|
||||
'filter_max_objective': config.get('hyperopt_list_max_objective', None),
|
||||
'filter_min_avg_time': config.get('hyperopt_list_min_avg_time'),
|
||||
'filter_max_avg_time': config.get('hyperopt_list_max_avg_time'),
|
||||
'filter_min_avg_profit': config.get('hyperopt_list_min_avg_profit'),
|
||||
'filter_max_avg_profit': config.get('hyperopt_list_max_avg_profit'),
|
||||
'filter_min_total_profit': config.get('hyperopt_list_min_total_profit'),
|
||||
'filter_max_total_profit': config.get('hyperopt_list_max_total_profit'),
|
||||
'filter_min_objective': config.get('hyperopt_list_min_objective'),
|
||||
'filter_max_objective': config.get('hyperopt_list_max_objective'),
|
||||
}
|
||||
if not HyperoptTools._test_hyperopt_results_exist(results_file):
|
||||
# No file found.
|
||||
|
@@ -4,7 +4,6 @@ from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
from numpy import int64
|
||||
from pandas import DataFrame, to_datetime
|
||||
from tabulate import tabulate
|
||||
|
||||
@@ -18,21 +17,21 @@ from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def store_backtest_stats(recordfilename: Path, stats: Dict[str, DataFrame]) -> None:
|
||||
def store_backtest_stats(
|
||||
recordfilename: Path, stats: Dict[str, DataFrame], dtappendix: str) -> None:
|
||||
"""
|
||||
Stores backtest results
|
||||
:param recordfilename: Path object, which can either be a filename or a directory.
|
||||
Filenames will be appended with a timestamp right before the suffix
|
||||
while for directories, <directory>/backtest-result-<datetime>.json will be used as filename
|
||||
:param stats: Dataframe containing the backtesting statistics
|
||||
:param dtappendix: Datetime to use for the filename
|
||||
"""
|
||||
if recordfilename.is_dir():
|
||||
filename = (recordfilename /
|
||||
f'backtest-result-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}.json')
|
||||
filename = (recordfilename / f'backtest-result-{dtappendix}.json')
|
||||
else:
|
||||
filename = Path.joinpath(
|
||||
recordfilename.parent,
|
||||
f'{recordfilename.stem}-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}'
|
||||
recordfilename.parent, f'{recordfilename.stem}-{dtappendix}'
|
||||
).with_suffix(recordfilename.suffix)
|
||||
|
||||
# Store metadata separately.
|
||||
@@ -45,7 +44,8 @@ def store_backtest_stats(recordfilename: Path, stats: Dict[str, DataFrame]) -> N
|
||||
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
|
||||
|
||||
|
||||
def store_backtest_signal_candles(recordfilename: Path, candles: Dict[str, Dict]) -> Path:
|
||||
def store_backtest_signal_candles(
|
||||
recordfilename: Path, candles: Dict[str, Dict], dtappendix: str) -> Path:
|
||||
"""
|
||||
Stores backtest trade signal candles
|
||||
:param recordfilename: Path object, which can either be a filename or a directory.
|
||||
@@ -53,14 +53,13 @@ def store_backtest_signal_candles(recordfilename: Path, candles: Dict[str, Dict]
|
||||
while for directories, <directory>/backtest-result-<datetime>_signals.pkl will be used
|
||||
as filename
|
||||
:param stats: Dict containing the backtesting signal candles
|
||||
:param dtappendix: Datetime to use for the filename
|
||||
"""
|
||||
if recordfilename.is_dir():
|
||||
filename = (recordfilename /
|
||||
f'backtest-result-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}_signals.pkl')
|
||||
filename = (recordfilename / f'backtest-result-{dtappendix}_signals.pkl')
|
||||
else:
|
||||
filename = Path.joinpath(
|
||||
recordfilename.parent,
|
||||
f'{recordfilename.stem}-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}_signals.pkl'
|
||||
recordfilename.parent, f'{recordfilename.stem}-{dtappendix}_signals.pkl'
|
||||
)
|
||||
|
||||
file_dump_joblib(filename, candles)
|
||||
@@ -417,9 +416,9 @@ def generate_strategy_stats(pairlist: List[str],
|
||||
key=lambda x: x['profit_sum']) if len(pair_results) > 1 else None
|
||||
worst_pair = min([pair for pair in pair_results if pair['key'] != 'TOTAL'],
|
||||
key=lambda x: x['profit_sum']) if len(pair_results) > 1 else None
|
||||
if not results.empty:
|
||||
results['open_timestamp'] = results['open_date'].view(int64) // 1e6
|
||||
results['close_timestamp'] = results['close_date'].view(int64) // 1e6
|
||||
winning_profit = results.loc[results['profit_abs'] > 0, 'profit_abs'].sum()
|
||||
losing_profit = results.loc[results['profit_abs'] < 0, 'profit_abs'].sum()
|
||||
profit_factor = winning_profit / abs(losing_profit) if losing_profit else 0.0
|
||||
|
||||
backtest_days = (max_date - min_date).days or 1
|
||||
strat_stats = {
|
||||
@@ -447,6 +446,7 @@ def generate_strategy_stats(pairlist: List[str],
|
||||
'profit_total_long_abs': results.loc[~results['is_short'], 'profit_abs'].sum(),
|
||||
'profit_total_short_abs': results.loc[results['is_short'], 'profit_abs'].sum(),
|
||||
'cagr': calculate_cagr(backtest_days, start_balance, content['final_balance']),
|
||||
'profit_factor': profit_factor,
|
||||
'backtest_start': min_date.strftime(DATETIME_PRINT_FORMAT),
|
||||
'backtest_start_ts': int(min_date.timestamp() * 1000),
|
||||
'backtest_end': max_date.strftime(DATETIME_PRINT_FORMAT),
|
||||
@@ -468,6 +468,9 @@ def generate_strategy_stats(pairlist: List[str],
|
||||
'rejected_signals': content['rejected_signals'],
|
||||
'timedout_entry_orders': content['timedout_entry_orders'],
|
||||
'timedout_exit_orders': content['timedout_exit_orders'],
|
||||
'canceled_trade_entries': content['canceled_trade_entries'],
|
||||
'canceled_entry_orders': content['canceled_entry_orders'],
|
||||
'replaced_entry_orders': content['replaced_entry_orders'],
|
||||
'max_open_trades': max_open_trades,
|
||||
'max_open_trades_setting': (config['max_open_trades']
|
||||
if config['max_open_trades'] != float('inf') else -1),
|
||||
@@ -498,8 +501,10 @@ def generate_strategy_stats(pairlist: List[str],
|
||||
(drawdown_abs, drawdown_start, drawdown_end, high_val, low_val,
|
||||
max_drawdown) = calculate_max_drawdown(
|
||||
results, value_col='profit_abs', starting_balance=start_balance)
|
||||
# max_relative_drawdown = Underwater
|
||||
(_, _, _, _, _, max_relative_drawdown) = calculate_max_drawdown(
|
||||
results, value_col='profit_abs', starting_balance=start_balance, relative=True)
|
||||
|
||||
strat_stats.update({
|
||||
'max_drawdown': max_drawdown_legacy, # Deprecated - do not use
|
||||
'max_drawdown_account': max_drawdown,
|
||||
@@ -753,6 +758,12 @@ def text_table_add_metrics(strat_results: Dict) -> str:
|
||||
('Drawdown End', strat_results['drawdown_end']),
|
||||
])
|
||||
|
||||
entry_adjustment_metrics = [
|
||||
('Canceled Trade Entries', strat_results.get('canceled_trade_entries', 'N/A')),
|
||||
('Canceled Entry Orders', strat_results.get('canceled_entry_orders', 'N/A')),
|
||||
('Replaced Entry Orders', strat_results.get('replaced_entry_orders', 'N/A')),
|
||||
] if strat_results.get('canceled_entry_orders', 0) > 0 else []
|
||||
|
||||
# Newly added fields should be ignored if they are missing in strat_results. hyperopt-show
|
||||
# command stores these results and newer version of freqtrade must be able to handle old
|
||||
# results with missing new fields.
|
||||
@@ -772,6 +783,8 @@ def text_table_add_metrics(strat_results: Dict) -> str:
|
||||
strat_results['stake_currency'])),
|
||||
('Total profit %', f"{strat_results['profit_total']:.2%}"),
|
||||
('CAGR %', f"{strat_results['cagr']:.2%}" if 'cagr' in strat_results else 'N/A'),
|
||||
('Profit factor', f'{strat_results["profit_factor"]:.2f}' if 'profit_factor'
|
||||
in strat_results else 'N/A'),
|
||||
('Trades per day', strat_results['trades_per_day']),
|
||||
('Avg. daily profit %',
|
||||
f"{(strat_results['profit_total'] / strat_results['backtest_days']):.2%}"),
|
||||
@@ -801,6 +814,7 @@ def text_table_add_metrics(strat_results: Dict) -> str:
|
||||
('Entry/Exit Timeouts',
|
||||
f"{strat_results.get('timedout_entry_orders', 'N/A')} / "
|
||||
f"{strat_results.get('timedout_exit_orders', 'N/A')}"),
|
||||
*entry_adjustment_metrics,
|
||||
('', ''), # Empty line to improve readability
|
||||
|
||||
('Min balance', round_coin_value(strat_results['csum_min'],
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# flake8: noqa: F401
|
||||
|
||||
from freqtrade.persistence.models import clean_dry_run_db, cleanup_db, init_db
|
||||
from freqtrade.persistence.models import cleanup_db, init_db
|
||||
from freqtrade.persistence.pairlock_middleware import PairLocks
|
||||
from freqtrade.persistence.trade_model import LocalTrade, Order, Trade
|
||||
|
@@ -201,16 +201,18 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
|
||||
|
||||
ft_fee_base = get_column_def(cols_order, 'ft_fee_base', 'null')
|
||||
average = get_column_def(cols_order, 'average', 'null')
|
||||
stop_price = get_column_def(cols_order, 'stop_price', 'null')
|
||||
|
||||
# sqlite does not support literals for booleans
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text(f"""
|
||||
insert into orders (id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
status, symbol, order_type, side, price, amount, filled, average, remaining, cost,
|
||||
order_date, order_filled_date, order_update_date, ft_fee_base)
|
||||
stop_price, order_date, order_filled_date, order_update_date, ft_fee_base)
|
||||
select id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
status, symbol, order_type, side, price, amount, filled, {average} average, remaining,
|
||||
cost, order_date, order_filled_date, order_update_date, {ft_fee_base} ft_fee_base
|
||||
cost, {stop_price} stop_price, order_date, order_filled_date,
|
||||
order_update_date, {ft_fee_base} ft_fee_base
|
||||
from {table_back_name}
|
||||
"""))
|
||||
|
||||
@@ -247,6 +249,35 @@ def set_sqlite_to_wal(engine):
|
||||
connection.execute(text("PRAGMA journal_mode=wal"))
|
||||
|
||||
|
||||
def fix_old_dry_orders(engine):
|
||||
with engine.begin() as connection:
|
||||
connection.execute(
|
||||
text(
|
||||
"""
|
||||
update orders
|
||||
set ft_is_open = 0
|
||||
where ft_is_open = 1 and (ft_trade_id, order_id) not in (
|
||||
select id, stoploss_order_id from trades where stoploss_order_id is not null
|
||||
) and ft_order_side = 'stoploss'
|
||||
and order_id like 'dry_%'
|
||||
"""
|
||||
)
|
||||
)
|
||||
connection.execute(
|
||||
text(
|
||||
"""
|
||||
update orders
|
||||
set ft_is_open = 0
|
||||
where ft_is_open = 1
|
||||
and (ft_trade_id, order_id) not in (
|
||||
select id, open_order_id from trades where open_order_id is not null
|
||||
) and ft_order_side != 'stoploss'
|
||||
and order_id like 'dry_%'
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
"""
|
||||
Checks if migration is necessary and migrates if necessary
|
||||
@@ -265,9 +296,8 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
|
||||
# Check if migration necessary
|
||||
# Migrates both trades and orders table!
|
||||
# if ('orders' not in previous_tables
|
||||
# or not has_column(cols_orders, 'leverage')):
|
||||
if not has_column(cols_trades, 'base_currency'):
|
||||
if not has_column(cols_orders, 'stop_price'):
|
||||
# if not has_column(cols_trades, 'base_currency'):
|
||||
logger.info(f"Running database migration for trades - "
|
||||
f"backup: {table_back_name}, {order_table_bak_name}")
|
||||
migrate_trades_and_orders_table(
|
||||
@@ -288,3 +318,4 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
"start with a fresh database.")
|
||||
|
||||
set_sqlite_to_wal(engine)
|
||||
fix_old_dry_orders(engine)
|
||||
|
@@ -21,14 +21,12 @@ logger = logging.getLogger(__name__)
|
||||
_SQL_DOCS_URL = 'http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls'
|
||||
|
||||
|
||||
def init_db(db_url: str, clean_open_orders: bool = False) -> None:
|
||||
def init_db(db_url: str) -> None:
|
||||
"""
|
||||
Initializes this module with the given config,
|
||||
registers all known command handlers
|
||||
and starts polling for message updates
|
||||
:param db_url: Database to use
|
||||
:param clean_open_orders: Remove open orders from the database.
|
||||
Useful for dry-run or if all orders have been reset on the exchange.
|
||||
:return: None
|
||||
"""
|
||||
kwargs = {}
|
||||
@@ -64,10 +62,6 @@ def init_db(db_url: str, clean_open_orders: bool = False) -> None:
|
||||
_DECL_BASE.metadata.create_all(engine)
|
||||
check_migrate(engine, decl_base=_DECL_BASE, previous_tables=previous_tables)
|
||||
|
||||
# Clean dry_run DB if the db is not in-memory
|
||||
if clean_open_orders and db_url != 'sqlite://':
|
||||
clean_dry_run_db()
|
||||
|
||||
|
||||
def cleanup_db() -> None:
|
||||
"""
|
||||
@@ -75,15 +69,3 @@ def cleanup_db() -> None:
|
||||
:return: None
|
||||
"""
|
||||
Trade.commit()
|
||||
|
||||
|
||||
def clean_dry_run_db() -> None:
|
||||
"""
|
||||
Remove open_order_id from a Dry_run DB
|
||||
:return: None
|
||||
"""
|
||||
for trade in Trade.query.filter(Trade.open_order_id.isnot(None)).all():
|
||||
# Check we are updating only a dry_run order not a prod one
|
||||
if 'dry_run' in trade.open_order_id:
|
||||
trade.open_order_id = None
|
||||
Trade.commit()
|
||||
|
@@ -8,7 +8,7 @@ from typing import Any, Dict, List, Optional
|
||||
|
||||
from sqlalchemy import (Boolean, Column, DateTime, Enum, Float, ForeignKey, Integer, String,
|
||||
UniqueConstraint, desc, func)
|
||||
from sqlalchemy.orm import Query, relationship
|
||||
from sqlalchemy.orm import Query, lazyload, relationship
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, NON_OPEN_EXCHANGE_STATES, BuySell, LongShort
|
||||
from freqtrade.enums import ExitType, TradingMode
|
||||
@@ -57,6 +57,7 @@ class Order(_DECL_BASE):
|
||||
filled = Column(Float, nullable=True)
|
||||
remaining = Column(Float, nullable=True)
|
||||
cost = Column(Float, nullable=True)
|
||||
stop_price = Column(Float, nullable=True)
|
||||
order_date = Column(DateTime, nullable=True, default=datetime.utcnow)
|
||||
order_filled_date = Column(DateTime, nullable=True)
|
||||
order_update_date = Column(DateTime, nullable=True)
|
||||
@@ -74,7 +75,7 @@ class Order(_DECL_BASE):
|
||||
|
||||
@property
|
||||
def safe_filled(self) -> float:
|
||||
return self.filled or self.amount or 0.0
|
||||
return self.filled if self.filled is not None else self.amount or 0.0
|
||||
|
||||
@property
|
||||
def safe_fee_base(self) -> float:
|
||||
@@ -107,6 +108,7 @@ class Order(_DECL_BASE):
|
||||
self.average = order.get('average', self.average)
|
||||
self.remaining = order.get('remaining', self.remaining)
|
||||
self.cost = order.get('cost', self.cost)
|
||||
self.stop_price = order.get('stopPrice', self.stop_price)
|
||||
|
||||
if 'timestamp' in order and order['timestamp'] is not None:
|
||||
self.order_date = datetime.fromtimestamp(order['timestamp'] / 1000, tz=timezone.utc)
|
||||
@@ -118,35 +120,60 @@ class Order(_DECL_BASE):
|
||||
self.order_filled_date = datetime.now(timezone.utc)
|
||||
self.order_update_date = datetime.now(timezone.utc)
|
||||
|
||||
def to_json(self, entry_side: str) -> Dict[str, Any]:
|
||||
def to_ccxt_object(self) -> Dict[str, Any]:
|
||||
return {
|
||||
'pair': self.ft_pair,
|
||||
'order_id': self.order_id,
|
||||
'status': self.status,
|
||||
'id': self.order_id,
|
||||
'symbol': self.ft_pair,
|
||||
'price': self.price,
|
||||
'average': self.average,
|
||||
'amount': self.amount,
|
||||
'average': round(self.average, 8) if self.average else 0,
|
||||
'safe_price': self.safe_price,
|
||||
'cost': self.cost if self.cost else 0,
|
||||
'cost': self.cost,
|
||||
'type': self.order_type,
|
||||
'side': self.ft_order_side,
|
||||
'filled': self.filled,
|
||||
'remaining': self.remaining,
|
||||
'stopPrice': self.stop_price,
|
||||
'datetime': self.order_date_utc.strftime('%Y-%m-%dT%H:%M:%S.%f'),
|
||||
'timestamp': int(self.order_date_utc.timestamp() * 1000),
|
||||
'status': self.status,
|
||||
'fee': None,
|
||||
'info': {},
|
||||
}
|
||||
|
||||
def to_json(self, entry_side: str, minified: bool = False) -> Dict[str, Any]:
|
||||
resp = {
|
||||
'amount': self.amount,
|
||||
'safe_price': self.safe_price,
|
||||
'ft_order_side': self.ft_order_side,
|
||||
'is_open': self.ft_is_open,
|
||||
'order_date': self.order_date.strftime(DATETIME_PRINT_FORMAT)
|
||||
if self.order_date else None,
|
||||
'order_timestamp': int(self.order_date.replace(
|
||||
tzinfo=timezone.utc).timestamp() * 1000) if self.order_date else None,
|
||||
'order_filled_date': self.order_filled_date.strftime(DATETIME_PRINT_FORMAT)
|
||||
if self.order_filled_date else None,
|
||||
'order_filled_timestamp': int(self.order_filled_date.replace(
|
||||
tzinfo=timezone.utc).timestamp() * 1000) if self.order_filled_date else None,
|
||||
'order_type': self.order_type,
|
||||
'price': self.price,
|
||||
'ft_is_entry': self.ft_order_side == entry_side,
|
||||
'remaining': self.remaining,
|
||||
}
|
||||
if not minified:
|
||||
resp.update({
|
||||
'pair': self.ft_pair,
|
||||
'order_id': self.order_id,
|
||||
'status': self.status,
|
||||
'average': round(self.average, 8) if self.average else 0,
|
||||
'cost': self.cost if self.cost else 0,
|
||||
'filled': self.filled,
|
||||
'is_open': self.ft_is_open,
|
||||
'order_date': self.order_date.strftime(DATETIME_PRINT_FORMAT)
|
||||
if self.order_date else None,
|
||||
'order_timestamp': int(self.order_date.replace(
|
||||
tzinfo=timezone.utc).timestamp() * 1000) if self.order_date else None,
|
||||
'order_filled_date': self.order_filled_date.strftime(DATETIME_PRINT_FORMAT)
|
||||
if self.order_filled_date else None,
|
||||
'order_type': self.order_type,
|
||||
'price': self.price,
|
||||
'remaining': self.remaining,
|
||||
})
|
||||
return resp
|
||||
|
||||
def close_bt_order(self, close_date: datetime, trade: 'LocalTrade'):
|
||||
self.order_filled_date = close_date
|
||||
self.filled = self.amount
|
||||
self.remaining = 0
|
||||
self.status = 'closed'
|
||||
self.ft_is_open = False
|
||||
if (self.ft_order_side == trade.entry_side
|
||||
@@ -190,6 +217,14 @@ class Order(_DECL_BASE):
|
||||
"""
|
||||
return Order.query.filter(Order.ft_is_open.is_(True)).all()
|
||||
|
||||
@staticmethod
|
||||
def order_by_id(order_id: str) -> Optional['Order']:
|
||||
"""
|
||||
Retrieve order based on order_id
|
||||
:return: Order or None
|
||||
"""
|
||||
return Order.query.filter(Order.order_id == order_id).first()
|
||||
|
||||
|
||||
class LocalTrade():
|
||||
"""
|
||||
@@ -366,9 +401,9 @@ class LocalTrade():
|
||||
f'open_rate={self.open_rate:.8f}, open_since={open_since})'
|
||||
)
|
||||
|
||||
def to_json(self) -> Dict[str, Any]:
|
||||
filled_orders = self.select_filled_orders()
|
||||
orders = [order.to_json(self.entry_side) for order in filled_orders]
|
||||
def to_json(self, minified: bool = False) -> Dict[str, Any]:
|
||||
filled_orders = self.select_filled_or_open_orders()
|
||||
orders = [order.to_json(self.entry_side, minified) for order in filled_orders]
|
||||
|
||||
return {
|
||||
'trade_id': self.id,
|
||||
@@ -592,8 +627,8 @@ class LocalTrade():
|
||||
"""
|
||||
self.close_rate = rate
|
||||
self.close_date = self.close_date or datetime.utcnow()
|
||||
self.close_profit = self.calc_profit_ratio()
|
||||
self.close_profit_abs = self.calc_profit()
|
||||
self.close_profit = self.calc_profit_ratio(rate)
|
||||
self.close_profit_abs = self.calc_profit(rate)
|
||||
self.is_open = False
|
||||
self.exit_order_status = 'closed'
|
||||
self.open_order_id = None
|
||||
@@ -661,10 +696,9 @@ class LocalTrade():
|
||||
"""
|
||||
self.open_trade_value = self._calc_open_trade_value()
|
||||
|
||||
def calculate_interest(self, interest_rate: Optional[float] = None) -> Decimal:
|
||||
def calculate_interest(self) -> Decimal:
|
||||
"""
|
||||
:param interest_rate: interest_charge for borrowing this coin(optional).
|
||||
If interest_rate is not set self.interest_rate will be used
|
||||
Calculate interest for this trade. Only applicable for Margin trading.
|
||||
"""
|
||||
zero = Decimal(0.0)
|
||||
# If nothing was borrowed
|
||||
@@ -677,34 +711,26 @@ class LocalTrade():
|
||||
total_seconds = Decimal((now - open_date).total_seconds())
|
||||
hours = total_seconds / sec_per_hour or zero
|
||||
|
||||
rate = Decimal(interest_rate or self.interest_rate)
|
||||
rate = Decimal(self.interest_rate)
|
||||
borrowed = Decimal(self.borrowed)
|
||||
|
||||
return interest(exchange_name=self.exchange, borrowed=borrowed, rate=rate, hours=hours)
|
||||
|
||||
def _calc_base_close(self, amount: Decimal, rate: Optional[float] = None,
|
||||
fee: Optional[float] = None) -> Decimal:
|
||||
def _calc_base_close(self, amount: Decimal, rate: float, fee: float) -> Decimal:
|
||||
|
||||
close_trade = Decimal(amount) * Decimal(rate or self.close_rate) # type: ignore
|
||||
fees = close_trade * Decimal(fee or self.fee_close)
|
||||
close_trade = amount * Decimal(rate)
|
||||
fees = close_trade * Decimal(fee)
|
||||
|
||||
if self.is_short:
|
||||
return close_trade + fees
|
||||
else:
|
||||
return close_trade - fees
|
||||
|
||||
def calc_close_trade_value(self, rate: Optional[float] = None,
|
||||
fee: Optional[float] = None,
|
||||
interest_rate: Optional[float] = None) -> float:
|
||||
def calc_close_trade_value(self, rate: float) -> float:
|
||||
"""
|
||||
Calculate the close_rate including fee
|
||||
:param fee: fee to use on the close rate (optional).
|
||||
If rate is not set self.fee will be used
|
||||
:param rate: rate to compare with (optional).
|
||||
If rate is not set self.close_rate will be used
|
||||
:param interest_rate: interest_charge for borrowing this coin (optional).
|
||||
If interest_rate is not set self.interest_rate will be used
|
||||
:return: Price in BTC of the open trade
|
||||
Calculate the Trade's close value including fees
|
||||
:param rate: rate to compare with.
|
||||
:return: value in stake currency of the open trade
|
||||
"""
|
||||
if rate is None and not self.close_rate:
|
||||
return 0.0
|
||||
@@ -713,49 +739,38 @@ class LocalTrade():
|
||||
trading_mode = self.trading_mode or TradingMode.SPOT
|
||||
|
||||
if trading_mode == TradingMode.SPOT:
|
||||
return float(self._calc_base_close(amount, rate, fee))
|
||||
return float(self._calc_base_close(amount, rate, self.fee_close))
|
||||
|
||||
elif (trading_mode == TradingMode.MARGIN):
|
||||
|
||||
total_interest = self.calculate_interest(interest_rate)
|
||||
total_interest = self.calculate_interest()
|
||||
|
||||
if self.is_short:
|
||||
amount = amount + total_interest
|
||||
return float(self._calc_base_close(amount, rate, fee))
|
||||
return float(self._calc_base_close(amount, rate, self.fee_close))
|
||||
else:
|
||||
# Currency already owned for longs, no need to purchase
|
||||
return float(self._calc_base_close(amount, rate, fee) - total_interest)
|
||||
return float(self._calc_base_close(amount, rate, self.fee_close) - total_interest)
|
||||
|
||||
elif (trading_mode == TradingMode.FUTURES):
|
||||
funding_fees = self.funding_fees or 0.0
|
||||
# Positive funding_fees -> Trade has gained from fees.
|
||||
# Negative funding_fees -> Trade had to pay the fees.
|
||||
if self.is_short:
|
||||
return float(self._calc_base_close(amount, rate, fee)) - funding_fees
|
||||
return float(self._calc_base_close(amount, rate, self.fee_close)) - funding_fees
|
||||
else:
|
||||
return float(self._calc_base_close(amount, rate, fee)) + funding_fees
|
||||
return float(self._calc_base_close(amount, rate, self.fee_close)) + funding_fees
|
||||
else:
|
||||
raise OperationalException(
|
||||
f"{self.trading_mode.value} trading is not yet available using freqtrade")
|
||||
|
||||
def calc_profit(self, rate: Optional[float] = None,
|
||||
fee: Optional[float] = None,
|
||||
interest_rate: Optional[float] = None) -> float:
|
||||
def calc_profit(self, rate: float) -> float:
|
||||
"""
|
||||
Calculate the absolute profit in stake currency between Close and Open trade
|
||||
:param fee: fee to use on the close rate (optional).
|
||||
If fee is not set self.fee will be used
|
||||
:param rate: close rate to compare with (optional).
|
||||
If rate is not set self.close_rate will be used
|
||||
:param interest_rate: interest_charge for borrowing this coin (optional).
|
||||
If interest_rate is not set self.interest_rate will be used
|
||||
:return: profit in stake currency as float
|
||||
:param rate: close rate to compare with.
|
||||
:return: profit in stake currency as float
|
||||
"""
|
||||
close_trade_value = self.calc_close_trade_value(
|
||||
rate=(rate or self.close_rate),
|
||||
fee=(fee or self.fee_close),
|
||||
interest_rate=(interest_rate or self.interest_rate)
|
||||
)
|
||||
close_trade_value = self.calc_close_trade_value(rate)
|
||||
|
||||
if self.is_short:
|
||||
profit = self.open_trade_value - close_trade_value
|
||||
@@ -763,23 +778,13 @@ class LocalTrade():
|
||||
profit = close_trade_value - self.open_trade_value
|
||||
return float(f"{profit:.8f}")
|
||||
|
||||
def calc_profit_ratio(self, rate: Optional[float] = None,
|
||||
fee: Optional[float] = None,
|
||||
interest_rate: Optional[float] = None) -> float:
|
||||
def calc_profit_ratio(self, rate: float) -> float:
|
||||
"""
|
||||
Calculates the profit as ratio (including fee).
|
||||
:param rate: rate to compare with (optional).
|
||||
If rate is not set self.close_rate will be used
|
||||
:param fee: fee to use on the close rate (optional).
|
||||
:param interest_rate: interest_charge for borrowing this coin (optional).
|
||||
If interest_rate is not set self.interest_rate will be used
|
||||
:param rate: rate to compare with.
|
||||
:return: profit ratio as float
|
||||
"""
|
||||
close_trade_value = self.calc_close_trade_value(
|
||||
rate=(rate or self.close_rate),
|
||||
fee=(fee or self.fee_close),
|
||||
interest_rate=(interest_rate or self.interest_rate)
|
||||
)
|
||||
close_trade_value = self.calc_close_trade_value(rate)
|
||||
|
||||
short_close_zero = (self.is_short and close_trade_value == 0.0)
|
||||
long_close_zero = (not self.is_short and self.open_trade_value == 0.0)
|
||||
@@ -796,14 +801,6 @@ class LocalTrade():
|
||||
return float(f"{profit_ratio:.8f}")
|
||||
|
||||
def recalc_trade_from_orders(self):
|
||||
# We need at least 2 entry orders for averaging amounts and rates.
|
||||
# TODO: this condition could probably be removed
|
||||
if len(self.select_filled_orders(self.entry_side)) < 2:
|
||||
self.stake_amount = self.amount * self.open_rate / self.leverage
|
||||
|
||||
# Just in case, still recalc open trade value
|
||||
self.recalc_open_trade_value()
|
||||
return
|
||||
|
||||
total_amount = 0.0
|
||||
total_stake = 0.0
|
||||
@@ -815,8 +812,6 @@ class LocalTrade():
|
||||
|
||||
tmp_amount = o.safe_amount_after_fee
|
||||
tmp_price = o.average or o.price
|
||||
if o.filled is not None:
|
||||
tmp_amount = o.filled
|
||||
if tmp_amount > 0.0 and tmp_price is not None:
|
||||
total_amount += tmp_amount
|
||||
total_stake += tmp_price * tmp_amount
|
||||
@@ -841,8 +836,8 @@ class LocalTrade():
|
||||
return o
|
||||
return None
|
||||
|
||||
def select_order(
|
||||
self, order_side: str = None, is_open: Optional[bool] = None) -> Optional[Order]:
|
||||
def select_order(self, order_side: Optional[str] = None,
|
||||
is_open: Optional[bool] = None) -> Optional[Order]:
|
||||
"""
|
||||
Finds latest order for this orderside and status
|
||||
:param order_side: ft_order_side of the order (either 'buy', 'sell' or 'stoploss')
|
||||
@@ -870,6 +865,21 @@ class LocalTrade():
|
||||
(o.filled or 0) > 0 and
|
||||
o.status in NON_OPEN_EXCHANGE_STATES]
|
||||
|
||||
def select_filled_or_open_orders(self) -> List['Order']:
|
||||
"""
|
||||
Finds filled or open orders
|
||||
:param order_side: Side of the order (either 'buy', 'sell', or None)
|
||||
:return: array of Order objects
|
||||
"""
|
||||
return [o for o in self.orders if
|
||||
(
|
||||
o.ft_is_open is False
|
||||
and (o.filled or 0) > 0
|
||||
and o.status in NON_OPEN_EXCHANGE_STATES
|
||||
)
|
||||
or (o.ft_is_open is True and o.status is not None)
|
||||
]
|
||||
|
||||
@property
|
||||
def nr_of_successful_entries(self) -> int:
|
||||
"""
|
||||
@@ -1108,7 +1118,7 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_trades(trade_filter=None) -> Query:
|
||||
def get_trades(trade_filter=None, include_orders: bool = True) -> Query:
|
||||
"""
|
||||
Helper function to query Trades using filters.
|
||||
NOTE: Not supported in Backtesting.
|
||||
@@ -1123,9 +1133,14 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
if trade_filter is not None:
|
||||
if not isinstance(trade_filter, list):
|
||||
trade_filter = [trade_filter]
|
||||
return Trade.query.filter(*trade_filter)
|
||||
this_query = Trade.query.filter(*trade_filter)
|
||||
else:
|
||||
return Trade.query
|
||||
this_query = Trade.query
|
||||
if not include_orders:
|
||||
# Don't load order relations
|
||||
# Consider using noload or raiseload instead of lazyload
|
||||
this_query = this_query.options(lazyload(Trade.orders))
|
||||
return this_query
|
||||
|
||||
@staticmethod
|
||||
def get_open_order_trades() -> List['Trade']:
|
||||
@@ -1345,3 +1360,18 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
.group_by(Trade.pair) \
|
||||
.order_by(desc('profit_sum')).first()
|
||||
return best_pair
|
||||
|
||||
@staticmethod
|
||||
def get_trading_volume(start_date: datetime = datetime.fromtimestamp(0)) -> float:
|
||||
"""
|
||||
Get Trade volume based on Orders
|
||||
NOTE: Not supported in Backtesting.
|
||||
:returns: Tuple containing (pair, profit_sum)
|
||||
"""
|
||||
trading_volume = Order.query.with_entities(
|
||||
func.sum(Order.cost).label('volume')
|
||||
).filter(
|
||||
Order.order_filled_date >= start_date,
|
||||
Order.status == 'closed'
|
||||
).scalar()
|
||||
return trading_volume
|
||||
|
@@ -633,7 +633,7 @@ def load_and_plot_trades(config: Dict[str, Any]):
|
||||
|
||||
exchange = ExchangeResolver.load_exchange(config['exchange']['name'], config)
|
||||
IStrategy.dp = DataProvider(config, exchange)
|
||||
strategy.bot_start()
|
||||
strategy.ft_bot_start()
|
||||
strategy.bot_loop_start()
|
||||
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
|
||||
timerange = plot_elements['timerange']
|
||||
|
@@ -30,20 +30,21 @@ class AgeFilter(IPairList):
|
||||
self._symbolsCheckFailed = PeriodicCache(maxsize=1000, ttl=86_400)
|
||||
|
||||
self._min_days_listed = pairlistconfig.get('min_days_listed', 10)
|
||||
self._max_days_listed = pairlistconfig.get('max_days_listed', None)
|
||||
self._max_days_listed = pairlistconfig.get('max_days_listed')
|
||||
|
||||
candle_limit = exchange.ohlcv_candle_limit('1d', self._config['candle_type_def'])
|
||||
if self._min_days_listed < 1:
|
||||
raise OperationalException("AgeFilter requires min_days_listed to be >= 1")
|
||||
if self._min_days_listed > exchange.ohlcv_candle_limit('1d'):
|
||||
if self._min_days_listed > candle_limit:
|
||||
raise OperationalException("AgeFilter requires min_days_listed to not exceed "
|
||||
"exchange max request size "
|
||||
f"({exchange.ohlcv_candle_limit('1d')})")
|
||||
f"({candle_limit})")
|
||||
if self._max_days_listed and self._max_days_listed <= self._min_days_listed:
|
||||
raise OperationalException("AgeFilter max_days_listed <= min_days_listed not permitted")
|
||||
if self._max_days_listed and self._max_days_listed > exchange.ohlcv_candle_limit('1d'):
|
||||
if self._max_days_listed and self._max_days_listed > candle_limit:
|
||||
raise OperationalException("AgeFilter requires max_days_listed to not exceed "
|
||||
"exchange max request size "
|
||||
f"({exchange.ohlcv_candle_limit('1d')})")
|
||||
f"({candle_limit})")
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
|
@@ -21,7 +21,7 @@ class PerformanceFilter(IPairList):
|
||||
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)
|
||||
|
||||
self._minutes = pairlistconfig.get('minutes', 0)
|
||||
self._min_profit = pairlistconfig.get('min_profit', None)
|
||||
self._min_profit = pairlistconfig.get('min_profit')
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
|
@@ -50,7 +50,7 @@ class SpreadFilter(IPairList):
|
||||
:param ticker: ticker dict as returned from ccxt.fetch_tickers()
|
||||
:return: True if the pair can stay, false if it should be removed
|
||||
"""
|
||||
if 'bid' in ticker and 'ask' in ticker and ticker['ask']:
|
||||
if 'bid' in ticker and 'ask' in ticker and ticker['ask'] and ticker['bid']:
|
||||
spread = 1 - ticker['bid'] / ticker['ask']
|
||||
if spread > self._max_spread_ratio:
|
||||
self.log_once(f"Removed {pair} from whitelist, because spread "
|
||||
|
@@ -38,12 +38,12 @@ class VolatilityFilter(IPairList):
|
||||
|
||||
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
||||
|
||||
candle_limit = exchange.ohlcv_candle_limit('1d', self._config['candle_type_def'])
|
||||
if self._days < 1:
|
||||
raise OperationalException("VolatilityFilter requires lookback_days to be >= 1")
|
||||
if self._days > exchange.ohlcv_candle_limit('1d'):
|
||||
if self._days > candle_limit:
|
||||
raise OperationalException("VolatilityFilter requires lookback_days to not "
|
||||
"exceed exchange max request size "
|
||||
f"({exchange.ohlcv_candle_limit('1d')})")
|
||||
f"exceed exchange max request size ({candle_limit})")
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
|
@@ -84,12 +84,13 @@ class VolumePairList(IPairList):
|
||||
raise OperationalException(
|
||||
f'key {self._sort_key} not in {SORT_VALUES}')
|
||||
|
||||
candle_limit = exchange.ohlcv_candle_limit(
|
||||
self._lookback_timeframe, self._config['candle_type_def'])
|
||||
if self._lookback_period < 0:
|
||||
raise OperationalException("VolumeFilter requires lookback_period to be >= 0")
|
||||
if self._lookback_period > exchange.ohlcv_candle_limit(self._lookback_timeframe):
|
||||
if self._lookback_period > candle_limit:
|
||||
raise OperationalException("VolumeFilter requires lookback_period to not "
|
||||
"exceed exchange max request size "
|
||||
f"({exchange.ohlcv_candle_limit(self._lookback_timeframe)})")
|
||||
f"exceed exchange max request size ({candle_limit})")
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
|
@@ -27,18 +27,18 @@ class RangeStabilityFilter(IPairList):
|
||||
|
||||
self._days = pairlistconfig.get('lookback_days', 10)
|
||||
self._min_rate_of_change = pairlistconfig.get('min_rate_of_change', 0.01)
|
||||
self._max_rate_of_change = pairlistconfig.get('max_rate_of_change', None)
|
||||
self._max_rate_of_change = pairlistconfig.get('max_rate_of_change')
|
||||
self._refresh_period = pairlistconfig.get('refresh_period', 1440)
|
||||
self._def_candletype = self._config['candle_type_def']
|
||||
|
||||
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
||||
|
||||
candle_limit = exchange.ohlcv_candle_limit('1d', self._config['candle_type_def'])
|
||||
if self._days < 1:
|
||||
raise OperationalException("RangeStabilityFilter requires lookback_days to be >= 1")
|
||||
if self._days > exchange.ohlcv_candle_limit('1d'):
|
||||
if self._days > candle_limit:
|
||||
raise OperationalException("RangeStabilityFilter requires lookback_days to not "
|
||||
"exceed exchange max request size "
|
||||
f"({exchange.ohlcv_candle_limit('1d')})")
|
||||
f"exceed exchange max request size ({candle_limit})")
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
|
@@ -28,7 +28,7 @@ class PairListManager(LoggingMixin):
|
||||
self._blacklist = self._config['exchange'].get('pair_blacklist', [])
|
||||
self._pairlist_handlers: List[IPairList] = []
|
||||
self._tickers_needed = False
|
||||
for pairlist_handler_config in self._config.get('pairlists', None):
|
||||
for pairlist_handler_config in self._config.get('pairlists', []):
|
||||
pairlist_handler = PairListResolver.load_pairlist(
|
||||
pairlist_handler_config['method'],
|
||||
exchange=exchange,
|
||||
|
@@ -47,26 +47,7 @@ class StrategyResolver(IResolver):
|
||||
strategy: IStrategy = StrategyResolver._load_strategy(
|
||||
strategy_name, config=config,
|
||||
extra_dir=config.get('strategy_path'))
|
||||
|
||||
if strategy._ft_params_from_file:
|
||||
# Set parameters from Hyperopt results file
|
||||
params = strategy._ft_params_from_file
|
||||
strategy.minimal_roi = params.get('roi', getattr(strategy, 'minimal_roi', {}))
|
||||
|
||||
strategy.stoploss = params.get('stoploss', {}).get(
|
||||
'stoploss', getattr(strategy, 'stoploss', -0.1))
|
||||
trailing = params.get('trailing', {})
|
||||
strategy.trailing_stop = trailing.get(
|
||||
'trailing_stop', getattr(strategy, 'trailing_stop', False))
|
||||
strategy.trailing_stop_positive = trailing.get(
|
||||
'trailing_stop_positive', getattr(strategy, 'trailing_stop_positive', None))
|
||||
strategy.trailing_stop_positive_offset = trailing.get(
|
||||
'trailing_stop_positive_offset',
|
||||
getattr(strategy, 'trailing_stop_positive_offset', 0))
|
||||
strategy.trailing_only_offset_is_reached = trailing.get(
|
||||
'trailing_only_offset_is_reached',
|
||||
getattr(strategy, 'trailing_only_offset_is_reached', 0.0))
|
||||
|
||||
strategy.ft_load_params_from_file()
|
||||
# Set attributes
|
||||
# Check if we need to override configuration
|
||||
# (Attribute name, default, subkey)
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from fastapi import APIRouter, BackgroundTasks, Depends
|
||||
@@ -102,7 +103,10 @@ async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: Bac
|
||||
min_date=min_date, max_date=max_date)
|
||||
|
||||
if btconfig.get('export', 'none') == 'trades':
|
||||
store_backtest_stats(btconfig['exportfilename'], ApiServer._bt.results)
|
||||
store_backtest_stats(
|
||||
btconfig['exportfilename'], ApiServer._bt.results,
|
||||
datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
)
|
||||
|
||||
logger.info("Backtest finished.")
|
||||
|
||||
|
@@ -104,6 +104,10 @@ class Profit(BaseModel):
|
||||
best_pair_profit_ratio: float
|
||||
winning_trades: int
|
||||
losing_trades: int
|
||||
profit_factor: float
|
||||
max_drawdown: float
|
||||
max_drawdown_abs: float
|
||||
trading_volume: Optional[float]
|
||||
|
||||
|
||||
class SellReason(BaseModel):
|
||||
@@ -120,6 +124,8 @@ class Stats(BaseModel):
|
||||
class DailyRecord(BaseModel):
|
||||
date: date
|
||||
abs_profit: float
|
||||
rel_profit: float
|
||||
starting_balance: float
|
||||
fiat_value: float
|
||||
trade_count: int
|
||||
|
||||
@@ -166,7 +172,7 @@ class ShowConfig(BaseModel):
|
||||
trailing_stop_positive: Optional[float]
|
||||
trailing_stop_positive_offset: Optional[float]
|
||||
trailing_only_offset_is_reached: Optional[bool]
|
||||
unfilledtimeout: UnfilledTimeout
|
||||
unfilledtimeout: Optional[UnfilledTimeout] # Empty in webserver mode
|
||||
order_types: Optional[OrderTypes]
|
||||
use_custom_stoploss: Optional[bool]
|
||||
timeframe: Optional[str]
|
||||
@@ -256,6 +262,7 @@ class TradeSchema(BaseModel):
|
||||
|
||||
leverage: Optional[float]
|
||||
interest_rate: Optional[float]
|
||||
liquidation_price: Optional[float]
|
||||
funding_fees: Optional[float]
|
||||
trading_mode: Optional[TradingMode]
|
||||
|
||||
@@ -276,6 +283,7 @@ class OpenTradeSchema(TradeSchema):
|
||||
class TradeResponse(BaseModel):
|
||||
trades: List[TradeSchema]
|
||||
trades_count: int
|
||||
offset: int
|
||||
total_trades: int
|
||||
|
||||
|
||||
|
@@ -36,7 +36,8 @@ logger = logging.getLogger(__name__)
|
||||
# versions 2.xx -> futures/short branch
|
||||
# 2.14: Add entry/exit orders to trade response
|
||||
# 2.15: Add backtest history endpoints
|
||||
API_VERSION = 2.15
|
||||
# 2.16: Additional daily metrics
|
||||
API_VERSION = 2.16
|
||||
|
||||
# Public API, requires no auth.
|
||||
router_public = APIRouter()
|
||||
@@ -86,8 +87,8 @@ def stats(rpc: RPC = Depends(get_rpc)):
|
||||
|
||||
@router.get('/daily', response_model=Daily, tags=['info'])
|
||||
def daily(timescale: int = 7, rpc: RPC = Depends(get_rpc), config=Depends(get_config)):
|
||||
return rpc._rpc_daily_profit(timescale, config['stake_currency'],
|
||||
config.get('fiat_display_currency', ''))
|
||||
return rpc._rpc_timeunit_profit(timescale, config['stake_currency'],
|
||||
config.get('fiat_display_currency', ''))
|
||||
|
||||
|
||||
@router.get('/status', response_model=List[OpenTradeSchema], tags=['info'])
|
||||
@@ -281,7 +282,7 @@ def get_strategy(strategy: str, config=Depends(get_config)):
|
||||
def list_available_pairs(timeframe: Optional[str] = None, stake_currency: Optional[str] = None,
|
||||
candletype: Optional[CandleType] = None, config=Depends(get_config)):
|
||||
|
||||
dh = get_datahandler(config['datadir'], config.get('dataformat_ohlcv', None))
|
||||
dh = get_datahandler(config['datadir'], config.get('dataformat_ohlcv'))
|
||||
trading_mode: TradingMode = config.get('trading_mode', TradingMode.SPOT)
|
||||
pair_interval = dh.ohlcv_get_available_data(config['datadir'], trading_mode)
|
||||
|
||||
|
59
freqtrade/rpc/discord.py
Normal file
59
freqtrade/rpc/discord.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from freqtrade.enums.rpcmessagetype import RPCMessageType
|
||||
from freqtrade.rpc import RPC
|
||||
from freqtrade.rpc.webhook import Webhook
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Discord(Webhook):
|
||||
def __init__(self, rpc: 'RPC', config: Dict[str, Any]):
|
||||
# super().__init__(rpc, config)
|
||||
self.rpc = rpc
|
||||
self.config = config
|
||||
self.strategy = config.get('strategy', '')
|
||||
self.timeframe = config.get('timeframe', '')
|
||||
|
||||
self._url = self.config['discord']['webhook_url']
|
||||
self._format = 'json'
|
||||
self._retries = 1
|
||||
self._retry_delay = 0.1
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""
|
||||
Cleanup pending module resources.
|
||||
This will do nothing for webhooks, they will simply not be called anymore
|
||||
"""
|
||||
pass
|
||||
|
||||
def send_msg(self, msg) -> None:
|
||||
logger.info(f"Sending discord message: {msg}")
|
||||
|
||||
if msg['type'].value in self.config['discord']:
|
||||
|
||||
msg['strategy'] = self.strategy
|
||||
msg['timeframe'] = self.timeframe
|
||||
fields = self.config['discord'].get(msg['type'].value)
|
||||
color = 0x0000FF
|
||||
if msg['type'] in (RPCMessageType.EXIT, RPCMessageType.EXIT_FILL):
|
||||
profit_ratio = msg.get('profit_ratio')
|
||||
color = (0x00FF00 if profit_ratio > 0 else 0xFF0000)
|
||||
|
||||
embeds = [{
|
||||
'title': f"Trade: {msg['pair']} {msg['type'].value}",
|
||||
'color': color,
|
||||
'fields': [],
|
||||
|
||||
}]
|
||||
for f in fields:
|
||||
for k, v in f.items():
|
||||
v = v.format(**msg)
|
||||
embeds[0]['fields'].append( # type: ignore
|
||||
{'name': k, 'value': v, 'inline': True})
|
||||
|
||||
# Send the message to discord channel
|
||||
payload = {'embeds': embeds}
|
||||
self._send_msg(payload)
|
@@ -18,6 +18,7 @@ from freqtrade import __version__
|
||||
from freqtrade.configuration.timerange import TimeRange
|
||||
from freqtrade.constants import CANCEL_REASON, DATETIME_PRINT_FORMAT
|
||||
from freqtrade.data.history import load_data
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.enums import (CandleType, ExitCheckTuple, ExitType, SignalDirection, State,
|
||||
TradingMode)
|
||||
from freqtrade.exceptions import ExchangeError, PricingError
|
||||
@@ -96,7 +97,7 @@ class RPC:
|
||||
"""
|
||||
self._freqtrade = freqtrade
|
||||
self._config: Dict[str, Any] = freqtrade.config
|
||||
if self._config.get('fiat_display_currency', None):
|
||||
if self._config.get('fiat_display_currency'):
|
||||
self._fiat_converter = CryptoToFiatConverter()
|
||||
|
||||
@staticmethod
|
||||
@@ -283,33 +284,57 @@ class RPC:
|
||||
columns.append('# Entries')
|
||||
return trades_list, columns, fiat_profit_sum
|
||||
|
||||
def _rpc_daily_profit(
|
||||
def _rpc_timeunit_profit(
|
||||
self, timescale: int,
|
||||
stake_currency: str, fiat_display_currency: str) -> Dict[str, Any]:
|
||||
today = datetime.now(timezone.utc).date()
|
||||
profit_days: Dict[date, Dict] = {}
|
||||
stake_currency: str, fiat_display_currency: str,
|
||||
timeunit: str = 'days') -> Dict[str, Any]:
|
||||
"""
|
||||
:param timeunit: Valid entries are 'days', 'weeks', 'months'
|
||||
"""
|
||||
start_date = datetime.now(timezone.utc).date()
|
||||
if timeunit == 'weeks':
|
||||
# weekly
|
||||
start_date = start_date - timedelta(days=start_date.weekday()) # Monday
|
||||
if timeunit == 'months':
|
||||
start_date = start_date.replace(day=1)
|
||||
|
||||
def time_offset(step: int):
|
||||
if timeunit == 'months':
|
||||
return relativedelta(months=step)
|
||||
return timedelta(**{timeunit: step})
|
||||
|
||||
if not (isinstance(timescale, int) and timescale > 0):
|
||||
raise RPCException('timescale must be an integer greater than 0')
|
||||
|
||||
profit_units: Dict[date, Dict] = {}
|
||||
daily_stake = self._freqtrade.wallets.get_total_stake_amount()
|
||||
|
||||
for day in range(0, timescale):
|
||||
profitday = today - timedelta(days=day)
|
||||
trades = Trade.get_trades(trade_filter=[
|
||||
profitday = start_date - time_offset(day)
|
||||
# Only query for necessary columns for performance reasons.
|
||||
trades = Trade.query.session.query(Trade.close_profit_abs).filter(
|
||||
Trade.is_open.is_(False),
|
||||
Trade.close_date >= profitday,
|
||||
Trade.close_date < (profitday + timedelta(days=1))
|
||||
]).order_by(Trade.close_date).all()
|
||||
Trade.close_date < (profitday + time_offset(1))
|
||||
).order_by(Trade.close_date).all()
|
||||
|
||||
curdayprofit = sum(
|
||||
trade.close_profit_abs for trade in trades if trade.close_profit_abs is not None)
|
||||
profit_days[profitday] = {
|
||||
# Calculate this periods starting balance
|
||||
daily_stake = daily_stake - curdayprofit
|
||||
profit_units[profitday] = {
|
||||
'amount': curdayprofit,
|
||||
'trades': len(trades)
|
||||
'daily_stake': daily_stake,
|
||||
'rel_profit': round(curdayprofit / daily_stake, 8) if daily_stake > 0 else 0,
|
||||
'trades': len(trades),
|
||||
}
|
||||
|
||||
data = [
|
||||
{
|
||||
'date': key,
|
||||
'date': f"{key.year}-{key.month:02d}" if timeunit == 'months' else key,
|
||||
'abs_profit': value["amount"],
|
||||
'starting_balance': value["daily_stake"],
|
||||
'rel_profit': value["rel_profit"],
|
||||
'fiat_value': self._fiat_converter.convert_amount(
|
||||
value['amount'],
|
||||
stake_currency,
|
||||
@@ -317,92 +342,7 @@ class RPC:
|
||||
) if self._fiat_converter else 0,
|
||||
'trade_count': value["trades"],
|
||||
}
|
||||
for key, value in profit_days.items()
|
||||
]
|
||||
return {
|
||||
'stake_currency': stake_currency,
|
||||
'fiat_display_currency': fiat_display_currency,
|
||||
'data': data
|
||||
}
|
||||
|
||||
def _rpc_weekly_profit(
|
||||
self, timescale: int,
|
||||
stake_currency: str, fiat_display_currency: str) -> Dict[str, Any]:
|
||||
today = datetime.now(timezone.utc).date()
|
||||
first_iso_day_of_week = today - timedelta(days=today.weekday()) # Monday
|
||||
profit_weeks: Dict[date, Dict] = {}
|
||||
|
||||
if not (isinstance(timescale, int) and timescale > 0):
|
||||
raise RPCException('timescale must be an integer greater than 0')
|
||||
|
||||
for week in range(0, timescale):
|
||||
profitweek = first_iso_day_of_week - timedelta(weeks=week)
|
||||
trades = Trade.get_trades(trade_filter=[
|
||||
Trade.is_open.is_(False),
|
||||
Trade.close_date >= profitweek,
|
||||
Trade.close_date < (profitweek + timedelta(weeks=1))
|
||||
]).order_by(Trade.close_date).all()
|
||||
curweekprofit = sum(
|
||||
trade.close_profit_abs for trade in trades if trade.close_profit_abs is not None)
|
||||
profit_weeks[profitweek] = {
|
||||
'amount': curweekprofit,
|
||||
'trades': len(trades)
|
||||
}
|
||||
|
||||
data = [
|
||||
{
|
||||
'date': key,
|
||||
'abs_profit': value["amount"],
|
||||
'fiat_value': self._fiat_converter.convert_amount(
|
||||
value['amount'],
|
||||
stake_currency,
|
||||
fiat_display_currency
|
||||
) if self._fiat_converter else 0,
|
||||
'trade_count': value["trades"],
|
||||
}
|
||||
for key, value in profit_weeks.items()
|
||||
]
|
||||
return {
|
||||
'stake_currency': stake_currency,
|
||||
'fiat_display_currency': fiat_display_currency,
|
||||
'data': data
|
||||
}
|
||||
|
||||
def _rpc_monthly_profit(
|
||||
self, timescale: int,
|
||||
stake_currency: str, fiat_display_currency: str) -> Dict[str, Any]:
|
||||
first_day_of_month = datetime.now(timezone.utc).date().replace(day=1)
|
||||
profit_months: Dict[date, Dict] = {}
|
||||
|
||||
if not (isinstance(timescale, int) and timescale > 0):
|
||||
raise RPCException('timescale must be an integer greater than 0')
|
||||
|
||||
for month in range(0, timescale):
|
||||
profitmonth = first_day_of_month - relativedelta(months=month)
|
||||
trades = Trade.get_trades(trade_filter=[
|
||||
Trade.is_open.is_(False),
|
||||
Trade.close_date >= profitmonth,
|
||||
Trade.close_date < (profitmonth + relativedelta(months=1))
|
||||
]).order_by(Trade.close_date).all()
|
||||
curmonthprofit = sum(
|
||||
trade.close_profit_abs for trade in trades if trade.close_profit_abs is not None)
|
||||
profit_months[profitmonth] = {
|
||||
'amount': curmonthprofit,
|
||||
'trades': len(trades)
|
||||
}
|
||||
|
||||
data = [
|
||||
{
|
||||
'date': f"{key.year}-{key.month:02d}",
|
||||
'abs_profit': value["amount"],
|
||||
'fiat_value': self._fiat_converter.convert_amount(
|
||||
value['amount'],
|
||||
stake_currency,
|
||||
fiat_display_currency
|
||||
) if self._fiat_converter else 0,
|
||||
'trade_count': value["trades"],
|
||||
}
|
||||
for key, value in profit_months.items()
|
||||
for key, value in profit_units.items()
|
||||
]
|
||||
return {
|
||||
'stake_currency': stake_currency,
|
||||
@@ -425,6 +365,7 @@ class RPC:
|
||||
return {
|
||||
"trades": output,
|
||||
"trades_count": len(output),
|
||||
"offset": offset,
|
||||
"total_trades": Trade.get_trades([Trade.is_open.is_(False)]).count(),
|
||||
}
|
||||
|
||||
@@ -439,7 +380,7 @@ class RPC:
|
||||
return 'losses'
|
||||
else:
|
||||
return 'draws'
|
||||
trades: List[Trade] = Trade.get_trades([Trade.is_open.is_(False)])
|
||||
trades: List[Trade] = Trade.get_trades([Trade.is_open.is_(False)], include_orders=False)
|
||||
# Sell reason
|
||||
exit_reasons = {}
|
||||
for trade in trades:
|
||||
@@ -467,7 +408,8 @@ class RPC:
|
||||
""" Returns cumulative profit statistics """
|
||||
trade_filter = ((Trade.is_open.is_(False) & (Trade.close_date >= start_date)) |
|
||||
Trade.is_open.is_(True))
|
||||
trades: List[Trade] = Trade.get_trades(trade_filter).order_by(Trade.id).all()
|
||||
trades: List[Trade] = Trade.get_trades(
|
||||
trade_filter, include_orders=False).order_by(Trade.id).all()
|
||||
|
||||
profit_all_coin = []
|
||||
profit_all_ratio = []
|
||||
@@ -476,6 +418,8 @@ class RPC:
|
||||
durations = []
|
||||
winning_trades = 0
|
||||
losing_trades = 0
|
||||
winning_profit = 0.0
|
||||
losing_profit = 0.0
|
||||
|
||||
for trade in trades:
|
||||
current_rate: float = 0.0
|
||||
@@ -491,8 +435,10 @@ class RPC:
|
||||
profit_closed_ratio.append(profit_ratio)
|
||||
if trade.close_profit >= 0:
|
||||
winning_trades += 1
|
||||
winning_profit += trade.close_profit_abs
|
||||
else:
|
||||
losing_trades += 1
|
||||
losing_profit += trade.close_profit_abs
|
||||
else:
|
||||
# Get current rate
|
||||
try:
|
||||
@@ -508,6 +454,7 @@ class RPC:
|
||||
profit_all_ratio.append(profit_ratio)
|
||||
|
||||
best_pair = Trade.get_best_pair(start_date)
|
||||
trading_volume = Trade.get_trading_volume(start_date)
|
||||
|
||||
# Prepare data to display
|
||||
profit_closed_coin_sum = round(sum(profit_closed_coin), 8)
|
||||
@@ -531,6 +478,21 @@ class RPC:
|
||||
profit_closed_ratio_fromstart = profit_closed_coin_sum / starting_balance
|
||||
profit_all_ratio_fromstart = profit_all_coin_sum / starting_balance
|
||||
|
||||
profit_factor = winning_profit / abs(losing_profit) if losing_profit else float('inf')
|
||||
|
||||
trades_df = DataFrame([{'close_date': trade.close_date.strftime(DATETIME_PRINT_FORMAT),
|
||||
'profit_abs': trade.close_profit_abs}
|
||||
for trade in trades if not trade.is_open])
|
||||
max_drawdown_abs = 0.0
|
||||
max_drawdown = 0.0
|
||||
if len(trades_df) > 0:
|
||||
try:
|
||||
(max_drawdown_abs, _, _, _, _, max_drawdown) = calculate_max_drawdown(
|
||||
trades_df, value_col='profit_abs', starting_balance=starting_balance)
|
||||
except ValueError:
|
||||
# ValueError if no losing trade.
|
||||
pass
|
||||
|
||||
profit_all_fiat = self._fiat_converter.convert_amount(
|
||||
profit_all_coin_sum,
|
||||
stake_currency,
|
||||
@@ -569,11 +531,15 @@ class RPC:
|
||||
'best_pair_profit_ratio': best_pair[1] if best_pair else 0,
|
||||
'winning_trades': winning_trades,
|
||||
'losing_trades': losing_trades,
|
||||
'profit_factor': profit_factor,
|
||||
'max_drawdown': max_drawdown,
|
||||
'max_drawdown_abs': max_drawdown_abs,
|
||||
'trading_volume': trading_volume,
|
||||
}
|
||||
|
||||
def _rpc_balance(self, stake_currency: str, fiat_display_currency: str) -> Dict:
|
||||
""" Returns current account balance per crypto """
|
||||
currencies = []
|
||||
currencies: List[Dict] = []
|
||||
total = 0.0
|
||||
try:
|
||||
tickers = self._freqtrade.exchange.get_tickers(cached=True)
|
||||
@@ -600,7 +566,7 @@ class RPC:
|
||||
else:
|
||||
try:
|
||||
pair = self._freqtrade.exchange.get_valid_pair_combination(coin, stake_currency)
|
||||
rate = tickers.get(pair, {}).get('last', None)
|
||||
rate = tickers.get(pair, {}).get('last')
|
||||
if rate:
|
||||
if pair.startswith(stake_currency) and not pair.endswith(stake_currency):
|
||||
rate = 1.0 / rate
|
||||
@@ -608,13 +574,12 @@ class RPC:
|
||||
except (ExchangeError):
|
||||
logger.warning(f" Could not get rate for pair {coin}.")
|
||||
continue
|
||||
total = total + (est_stake or 0)
|
||||
total = total + est_stake
|
||||
currencies.append({
|
||||
'currency': coin,
|
||||
# TODO: The below can be simplified if we don't assign None to values.
|
||||
'free': balance.free if balance.free is not None else 0,
|
||||
'balance': balance.total if balance.total is not None else 0,
|
||||
'used': balance.used if balance.used is not None else 0,
|
||||
'free': balance.free,
|
||||
'balance': balance.total,
|
||||
'used': balance.used,
|
||||
'est_stake': est_stake or 0,
|
||||
'stake': stake_currency,
|
||||
'side': 'long',
|
||||
@@ -644,7 +609,6 @@ class RPC:
|
||||
total, stake_currency, fiat_display_currency) if self._fiat_converter else 0
|
||||
|
||||
trade_count = len(Trade.get_trades_proxy())
|
||||
starting_capital_ratio = 0.0
|
||||
starting_capital_ratio = (total / starting_capital) - 1 if starting_capital else 0.0
|
||||
starting_cap_fiat_ratio = (value / starting_cap_fiat) - 1 if starting_cap_fiat else 0.0
|
||||
|
||||
@@ -932,7 +896,7 @@ class RPC:
|
||||
else:
|
||||
errors[pair] = {
|
||||
'error_msg': f"Pair {pair} is not in the current blacklist."
|
||||
}
|
||||
}
|
||||
resp = self._rpc_blacklist()
|
||||
resp['errors'] = errors
|
||||
return resp
|
||||
|
@@ -27,6 +27,12 @@ class RPCManager:
|
||||
from freqtrade.rpc.telegram import Telegram
|
||||
self.registered_modules.append(Telegram(self._rpc, config))
|
||||
|
||||
# Enable discord
|
||||
if config.get('discord', {}).get('enabled', False):
|
||||
logger.info('Enabling rpc.discord ...')
|
||||
from freqtrade.rpc.discord import Discord
|
||||
self.registered_modules.append(Discord(self._rpc, config))
|
||||
|
||||
# Enable Webhook
|
||||
if config.get('webhook', {}).get('enabled', False):
|
||||
logger.info('Enabling rpc.webhook ...')
|
||||
|
@@ -6,6 +6,7 @@ This module manage Telegram communication
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime, timedelta
|
||||
from functools import partial
|
||||
from html import escape
|
||||
@@ -37,6 +38,15 @@ logger.debug('Included module rpc.telegram ...')
|
||||
MAX_TELEGRAM_MESSAGE_LENGTH = 4096
|
||||
|
||||
|
||||
@dataclass
|
||||
class TimeunitMappings:
|
||||
header: str
|
||||
message: str
|
||||
message2: str
|
||||
callback: str
|
||||
default: int
|
||||
|
||||
|
||||
def authorized_only(command_handler: Callable[..., None]) -> Callable[..., Any]:
|
||||
"""
|
||||
Decorator to check if the message comes from the correct chat_id
|
||||
@@ -225,6 +235,14 @@ class Telegram(RPCHandler):
|
||||
# This can take up to `timeout` from the call to `start_polling`.
|
||||
self._updater.stop()
|
||||
|
||||
def _exchange_from_msg(self, msg: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Extracts the exchange name from the given message.
|
||||
:param msg: The message to extract the exchange name from.
|
||||
:return: The exchange name.
|
||||
"""
|
||||
return f"{msg['exchange']}{' (dry)' if self._config['dry_run'] else ''}"
|
||||
|
||||
def _format_entry_msg(self, msg: Dict[str, Any]) -> str:
|
||||
if self._rpc._fiat_converter:
|
||||
msg['stake_amount_fiat'] = self._rpc._fiat_converter.convert_amount(
|
||||
@@ -237,11 +255,11 @@ class Telegram(RPCHandler):
|
||||
entry_side = ({'enter': 'Long', 'entered': 'Longed'} if msg['direction'] == 'Long'
|
||||
else {'enter': 'Short', 'entered': 'Shorted'})
|
||||
message = (
|
||||
f"{emoji} *{msg['exchange']}:*"
|
||||
f"{emoji} *{self._exchange_from_msg(msg)}:*"
|
||||
f" {entry_side['entered'] if is_fill else entry_side['enter']} {msg['pair']}"
|
||||
f" (#{msg['trade_id']})\n"
|
||||
)
|
||||
message += f"*Enter Tag:* `{msg['enter_tag']}`\n" if msg.get('enter_tag', None) else ""
|
||||
message += f"*Enter Tag:* `{msg['enter_tag']}`\n" if msg.get('enter_tag') else ""
|
||||
message += f"*Amount:* `{msg['amount']:.8f}`\n"
|
||||
if msg.get('leverage') and msg.get('leverage', 1.0) != 1.0:
|
||||
message += f"*Leverage:* `{msg['leverage']}`\n"
|
||||
@@ -254,7 +272,7 @@ class Telegram(RPCHandler):
|
||||
|
||||
message += f"*Total:* `({round_coin_value(msg['stake_amount'], msg['stake_currency'])}"
|
||||
|
||||
if msg.get('fiat_currency', None):
|
||||
if msg.get('fiat_currency'):
|
||||
message += f", {round_coin_value(msg['stake_amount_fiat'], msg['fiat_currency'])}"
|
||||
|
||||
message += ")`"
|
||||
@@ -270,7 +288,7 @@ class Telegram(RPCHandler):
|
||||
msg['enter_tag'] = msg['enter_tag'] if "enter_tag" in msg.keys() else None
|
||||
msg['emoji'] = self._get_sell_emoji(msg)
|
||||
msg['leverage_text'] = (f"*Leverage:* `{msg['leverage']:.1f}`\n"
|
||||
if msg.get('leverage', None) and msg.get('leverage', 1.0) != 1.0
|
||||
if msg.get('leverage') and msg.get('leverage', 1.0) != 1.0
|
||||
else "")
|
||||
|
||||
# Check if all sell properties are available.
|
||||
@@ -286,7 +304,7 @@ class Telegram(RPCHandler):
|
||||
msg['profit_extra'] = ''
|
||||
is_fill = msg['type'] == RPCMessageType.EXIT_FILL
|
||||
message = (
|
||||
f"{msg['emoji']} *{msg['exchange']}:* "
|
||||
f"{msg['emoji']} *{self._exchange_from_msg(msg)}:* "
|
||||
f"{'Exited' if is_fill else 'Exiting'} {msg['pair']} (#{msg['trade_id']})\n"
|
||||
f"*{'Profit' if is_fill else 'Unrealized Profit'}:* "
|
||||
f"`{msg['profit_ratio']:.2%}{msg['profit_extra']}`\n"
|
||||
@@ -316,33 +334,33 @@ class Telegram(RPCHandler):
|
||||
|
||||
elif msg_type in (RPCMessageType.ENTRY_CANCEL, RPCMessageType.EXIT_CANCEL):
|
||||
msg['message_side'] = 'enter' if msg_type in [RPCMessageType.ENTRY_CANCEL] else 'exit'
|
||||
message = ("\N{WARNING SIGN} *{exchange}:* "
|
||||
"Cancelling {message_side} Order for {pair} (#{trade_id}). "
|
||||
"Reason: {reason}.".format(**msg))
|
||||
message = (f"\N{WARNING SIGN} *{self._exchange_from_msg(msg)}:* "
|
||||
f"Cancelling {msg['message_side']} Order for {msg['pair']} "
|
||||
f"(#{msg['trade_id']}). Reason: {msg['reason']}.")
|
||||
|
||||
elif msg_type == RPCMessageType.PROTECTION_TRIGGER:
|
||||
message = (
|
||||
"*Protection* triggered due to {reason}. "
|
||||
"`{pair}` will be locked until `{lock_end_time}`."
|
||||
).format(**msg)
|
||||
f"*Protection* triggered due to {msg['reason']}. "
|
||||
f"`{msg['pair']}` will be locked until `{msg['lock_end_time']}`."
|
||||
)
|
||||
|
||||
elif msg_type == RPCMessageType.PROTECTION_TRIGGER_GLOBAL:
|
||||
message = (
|
||||
"*Protection* triggered due to {reason}. "
|
||||
"*All pairs* will be locked until `{lock_end_time}`."
|
||||
).format(**msg)
|
||||
f"*Protection* triggered due to {msg['reason']}. "
|
||||
f"*All pairs* will be locked until `{msg['lock_end_time']}`."
|
||||
)
|
||||
|
||||
elif msg_type == RPCMessageType.STATUS:
|
||||
message = '*Status:* `{status}`'.format(**msg)
|
||||
message = f"*Status:* `{msg['status']}`"
|
||||
|
||||
elif msg_type == RPCMessageType.WARNING:
|
||||
message = '\N{WARNING SIGN} *Warning:* `{status}`'.format(**msg)
|
||||
message = f"\N{WARNING SIGN} *Warning:* `{msg['status']}`"
|
||||
|
||||
elif msg_type == RPCMessageType.STARTUP:
|
||||
message = '{status}'.format(**msg)
|
||||
message = f"{msg['status']}"
|
||||
|
||||
else:
|
||||
raise NotImplementedError('Unknown message type: {}'.format(msg_type))
|
||||
raise NotImplementedError(f"Unknown message type: {msg_type}")
|
||||
return message
|
||||
|
||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
||||
@@ -396,7 +414,7 @@ class Telegram(RPCHandler):
|
||||
first_avg = filled_orders[0]["safe_price"]
|
||||
|
||||
for x, order in enumerate(filled_orders):
|
||||
if not order['ft_is_entry']:
|
||||
if not order['ft_is_entry'] or order['is_open'] is True:
|
||||
continue
|
||||
cur_entry_datetime = arrow.get(order["order_filled_date"])
|
||||
cur_entry_amount = order["amount"]
|
||||
@@ -563,6 +581,60 @@ class Telegram(RPCHandler):
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
|
||||
@authorized_only
|
||||
def _timeunit_stats(self, update: Update, context: CallbackContext, unit: str) -> None:
|
||||
"""
|
||||
Handler for /daily <n>
|
||||
Returns a daily profit (in BTC) over the last n days.
|
||||
:param bot: telegram bot
|
||||
:param update: message update
|
||||
:return: None
|
||||
"""
|
||||
|
||||
vals = {
|
||||
'days': TimeunitMappings('Day', 'Daily', 'days', 'update_daily', 7),
|
||||
'weeks': TimeunitMappings('Monday', 'Weekly', 'weeks (starting from Monday)',
|
||||
'update_weekly', 8),
|
||||
'months': TimeunitMappings('Month', 'Monthly', 'months', 'update_monthly', 6),
|
||||
}
|
||||
val = vals[unit]
|
||||
|
||||
stake_cur = self._config['stake_currency']
|
||||
fiat_disp_cur = self._config.get('fiat_display_currency', '')
|
||||
try:
|
||||
timescale = int(context.args[0]) if context.args else val.default
|
||||
except (TypeError, ValueError, IndexError):
|
||||
timescale = val.default
|
||||
try:
|
||||
stats = self._rpc._rpc_timeunit_profit(
|
||||
timescale,
|
||||
stake_cur,
|
||||
fiat_disp_cur,
|
||||
unit
|
||||
)
|
||||
stats_tab = tabulate(
|
||||
[[f"{period['date']} ({period['trade_count']})",
|
||||
f"{round_coin_value(period['abs_profit'], stats['stake_currency'])}",
|
||||
f"{period['fiat_value']:.2f} {stats['fiat_display_currency']}",
|
||||
f"{period['rel_profit']:.2%}",
|
||||
] for period in stats['data']],
|
||||
headers=[
|
||||
f"{val.header} (count)",
|
||||
f'{stake_cur}',
|
||||
f'{fiat_disp_cur}',
|
||||
'Profit %',
|
||||
'Trades',
|
||||
],
|
||||
tablefmt='simple')
|
||||
message = (
|
||||
f'<b>{val.message} Profit over the last {timescale} {val.message2}</b>:\n'
|
||||
f'<pre>{stats_tab}</pre>'
|
||||
)
|
||||
self._send_msg(message, parse_mode=ParseMode.HTML, reload_able=True,
|
||||
callback_path=val.callback, query=update.callback_query)
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
|
||||
@authorized_only
|
||||
def _daily(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
@@ -572,35 +644,7 @@ class Telegram(RPCHandler):
|
||||
:param update: message update
|
||||
:return: None
|
||||
"""
|
||||
stake_cur = self._config['stake_currency']
|
||||
fiat_disp_cur = self._config.get('fiat_display_currency', '')
|
||||
try:
|
||||
timescale = int(context.args[0]) if context.args else 7
|
||||
except (TypeError, ValueError, IndexError):
|
||||
timescale = 7
|
||||
try:
|
||||
stats = self._rpc._rpc_daily_profit(
|
||||
timescale,
|
||||
stake_cur,
|
||||
fiat_disp_cur
|
||||
)
|
||||
stats_tab = tabulate(
|
||||
[[day['date'],
|
||||
f"{round_coin_value(day['abs_profit'], stats['stake_currency'])}",
|
||||
f"{day['fiat_value']:.3f} {stats['fiat_display_currency']}",
|
||||
f"{day['trade_count']} trades"] for day in stats['data']],
|
||||
headers=[
|
||||
'Day',
|
||||
f'Profit {stake_cur}',
|
||||
f'Profit {fiat_disp_cur}',
|
||||
'Trades',
|
||||
],
|
||||
tablefmt='simple')
|
||||
message = f'<b>Daily Profit over the last {timescale} days</b>:\n<pre>{stats_tab}</pre>'
|
||||
self._send_msg(message, parse_mode=ParseMode.HTML, reload_able=True,
|
||||
callback_path="update_daily", query=update.callback_query)
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
self._timeunit_stats(update, context, 'days')
|
||||
|
||||
@authorized_only
|
||||
def _weekly(self, update: Update, context: CallbackContext) -> None:
|
||||
@@ -611,36 +655,7 @@ class Telegram(RPCHandler):
|
||||
:param update: message update
|
||||
:return: None
|
||||
"""
|
||||
stake_cur = self._config['stake_currency']
|
||||
fiat_disp_cur = self._config.get('fiat_display_currency', '')
|
||||
try:
|
||||
timescale = int(context.args[0]) if context.args else 8
|
||||
except (TypeError, ValueError, IndexError):
|
||||
timescale = 8
|
||||
try:
|
||||
stats = self._rpc._rpc_weekly_profit(
|
||||
timescale,
|
||||
stake_cur,
|
||||
fiat_disp_cur
|
||||
)
|
||||
stats_tab = tabulate(
|
||||
[[week['date'],
|
||||
f"{round_coin_value(week['abs_profit'], stats['stake_currency'])}",
|
||||
f"{week['fiat_value']:.3f} {stats['fiat_display_currency']}",
|
||||
f"{week['trade_count']} trades"] for week in stats['data']],
|
||||
headers=[
|
||||
'Monday',
|
||||
f'Profit {stake_cur}',
|
||||
f'Profit {fiat_disp_cur}',
|
||||
'Trades',
|
||||
],
|
||||
tablefmt='simple')
|
||||
message = f'<b>Weekly Profit over the last {timescale} weeks ' \
|
||||
f'(starting from Monday)</b>:\n<pre>{stats_tab}</pre> '
|
||||
self._send_msg(message, parse_mode=ParseMode.HTML, reload_able=True,
|
||||
callback_path="update_weekly", query=update.callback_query)
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
self._timeunit_stats(update, context, 'weeks')
|
||||
|
||||
@authorized_only
|
||||
def _monthly(self, update: Update, context: CallbackContext) -> None:
|
||||
@@ -651,36 +666,7 @@ class Telegram(RPCHandler):
|
||||
:param update: message update
|
||||
:return: None
|
||||
"""
|
||||
stake_cur = self._config['stake_currency']
|
||||
fiat_disp_cur = self._config.get('fiat_display_currency', '')
|
||||
try:
|
||||
timescale = int(context.args[0]) if context.args else 6
|
||||
except (TypeError, ValueError, IndexError):
|
||||
timescale = 6
|
||||
try:
|
||||
stats = self._rpc._rpc_monthly_profit(
|
||||
timescale,
|
||||
stake_cur,
|
||||
fiat_disp_cur
|
||||
)
|
||||
stats_tab = tabulate(
|
||||
[[month['date'],
|
||||
f"{round_coin_value(month['abs_profit'], stats['stake_currency'])}",
|
||||
f"{month['fiat_value']:.3f} {stats['fiat_display_currency']}",
|
||||
f"{month['trade_count']} trades"] for month in stats['data']],
|
||||
headers=[
|
||||
'Month',
|
||||
f'Profit {stake_cur}',
|
||||
f'Profit {fiat_disp_cur}',
|
||||
'Trades',
|
||||
],
|
||||
tablefmt='simple')
|
||||
message = f'<b>Monthly Profit over the last {timescale} months' \
|
||||
f'</b>:\n<pre>{stats_tab}</pre> '
|
||||
self._send_msg(message, parse_mode=ParseMode.HTML, reload_able=True,
|
||||
callback_path="update_monthly", query=update.callback_query)
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
self._timeunit_stats(update, context, 'months')
|
||||
|
||||
@authorized_only
|
||||
def _profit(self, update: Update, context: CallbackContext) -> None:
|
||||
@@ -744,12 +730,18 @@ class Telegram(RPCHandler):
|
||||
f"*Total Trade Count:* `{trade_count}`\n"
|
||||
f"*{'First Trade opened' if not timescale else 'Showing Profit since'}:* "
|
||||
f"`{first_trade_date}`\n"
|
||||
f"*Latest Trade opened:* `{latest_trade_date}\n`"
|
||||
f"*Latest Trade opened:* `{latest_trade_date}`\n"
|
||||
f"*Win / Loss:* `{stats['winning_trades']} / {stats['losing_trades']}`"
|
||||
)
|
||||
if stats['closed_trade_count'] > 0:
|
||||
markdown_msg += (f"\n*Avg. Duration:* `{avg_duration}`\n"
|
||||
f"*Best Performing:* `{best_pair}: {best_pair_profit_ratio:.2%}`")
|
||||
markdown_msg += (
|
||||
f"\n*Avg. Duration:* `{avg_duration}`\n"
|
||||
f"*Best Performing:* `{best_pair}: {best_pair_profit_ratio:.2%}`\n"
|
||||
f"*Trading volume:* `{round_coin_value(stats['trading_volume'], stake_cur)}`\n"
|
||||
f"*Profit factor:* `{stats['profit_factor']:.2f}`\n"
|
||||
f"*Max Drawdown:* `{stats['max_drawdown']:.2%} "
|
||||
f"({round_coin_value(stats['max_drawdown_abs'], stake_cur)})`"
|
||||
)
|
||||
self._send_msg(markdown_msg, reload_able=True, callback_path="update_profit",
|
||||
query=update.callback_query)
|
||||
|
||||
@@ -785,7 +777,7 @@ class Telegram(RPCHandler):
|
||||
headers=['Exit Reason', 'Exits', 'Wins', 'Losses']
|
||||
)
|
||||
if len(exit_reasons_tabulate) > 25:
|
||||
self._send_msg(exit_reasons_msg, ParseMode.MARKDOWN)
|
||||
self._send_msg(f"```\n{exit_reasons_msg}```", ParseMode.MARKDOWN)
|
||||
exit_reasons_msg = ''
|
||||
|
||||
durations = stats['durations']
|
||||
@@ -889,7 +881,7 @@ class Telegram(RPCHandler):
|
||||
:return: None
|
||||
"""
|
||||
msg = self._rpc._rpc_start()
|
||||
self._send_msg('Status: `{status}`'.format(**msg))
|
||||
self._send_msg(f"Status: `{msg['status']}`")
|
||||
|
||||
@authorized_only
|
||||
def _stop(self, update: Update, context: CallbackContext) -> None:
|
||||
@@ -901,7 +893,7 @@ class Telegram(RPCHandler):
|
||||
:return: None
|
||||
"""
|
||||
msg = self._rpc._rpc_stop()
|
||||
self._send_msg('Status: `{status}`'.format(**msg))
|
||||
self._send_msg(f"Status: `{msg['status']}`")
|
||||
|
||||
@authorized_only
|
||||
def _reload_config(self, update: Update, context: CallbackContext) -> None:
|
||||
@@ -913,7 +905,7 @@ class Telegram(RPCHandler):
|
||||
:return: None
|
||||
"""
|
||||
msg = self._rpc._rpc_reload_config()
|
||||
self._send_msg('Status: `{status}`'.format(**msg))
|
||||
self._send_msg(f"Status: `{msg['status']}`")
|
||||
|
||||
@authorized_only
|
||||
def _stopbuy(self, update: Update, context: CallbackContext) -> None:
|
||||
@@ -925,7 +917,7 @@ class Telegram(RPCHandler):
|
||||
:return: None
|
||||
"""
|
||||
msg = self._rpc._rpc_stopbuy()
|
||||
self._send_msg('Status: `{status}`'.format(**msg))
|
||||
self._send_msg(f"Status: `{msg['status']}`")
|
||||
|
||||
@authorized_only
|
||||
def _force_exit(self, update: Update, context: CallbackContext) -> None:
|
||||
@@ -1087,9 +1079,9 @@ class Telegram(RPCHandler):
|
||||
trade_id = int(context.args[0])
|
||||
msg = self._rpc._rpc_delete(trade_id)
|
||||
self._send_msg((
|
||||
'`{result_msg}`\n'
|
||||
f"`{msg['result_msg']}`\n"
|
||||
'Please make sure to take care of this asset on the exchange manually.'
|
||||
).format(**msg))
|
||||
))
|
||||
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
@@ -1410,14 +1402,14 @@ class Telegram(RPCHandler):
|
||||
"Optionally takes a rate at which to sell "
|
||||
"(only applies to limit orders).` \n")
|
||||
message = (
|
||||
"_BotControl_\n"
|
||||
"_Bot Control_\n"
|
||||
"------------\n"
|
||||
"*/start:* `Starts the trader`\n"
|
||||
"*/stop:* Stops the trader\n"
|
||||
"*/stopbuy:* `Stops buying, but handles open trades gracefully` \n"
|
||||
"*/forceexit <trade_id>|all:* `Instantly exits the given trade or all trades, "
|
||||
"regardless of profit`\n"
|
||||
"*/fe <trade_id>|all:* `Alias to /forceexit`"
|
||||
"*/fx <trade_id>|all:* `Alias to /forceexit`\n"
|
||||
f"{force_enter_text if self._config.get('force_entry_enable', False) else ''}"
|
||||
"*/delete <trade_id>:* `Instantly delete the given trade in the database`\n"
|
||||
"*/whitelist:* `Show current whitelist` \n"
|
||||
|
@@ -45,21 +45,21 @@ class Webhook(RPCHandler):
|
||||
try:
|
||||
whconfig = self._config['webhook']
|
||||
if msg['type'] in [RPCMessageType.ENTRY]:
|
||||
valuedict = whconfig.get('webhookentry', None)
|
||||
valuedict = whconfig.get('webhookentry')
|
||||
elif msg['type'] in [RPCMessageType.ENTRY_CANCEL]:
|
||||
valuedict = whconfig.get('webhookentrycancel', None)
|
||||
valuedict = whconfig.get('webhookentrycancel')
|
||||
elif msg['type'] in [RPCMessageType.ENTRY_FILL]:
|
||||
valuedict = whconfig.get('webhookentryfill', None)
|
||||
valuedict = whconfig.get('webhookentryfill')
|
||||
elif msg['type'] == RPCMessageType.EXIT:
|
||||
valuedict = whconfig.get('webhookexit', None)
|
||||
valuedict = whconfig.get('webhookexit')
|
||||
elif msg['type'] == RPCMessageType.EXIT_FILL:
|
||||
valuedict = whconfig.get('webhookexitfill', None)
|
||||
valuedict = whconfig.get('webhookexitfill')
|
||||
elif msg['type'] == RPCMessageType.EXIT_CANCEL:
|
||||
valuedict = whconfig.get('webhookexitcancel', None)
|
||||
valuedict = whconfig.get('webhookexitcancel')
|
||||
elif msg['type'] in (RPCMessageType.STATUS,
|
||||
RPCMessageType.STARTUP,
|
||||
RPCMessageType.WARNING):
|
||||
valuedict = whconfig.get('webhookstatus', None)
|
||||
valuedict = whconfig.get('webhookstatus')
|
||||
else:
|
||||
raise NotImplementedError('Unknown message type: {}'.format(msg['type']))
|
||||
if not valuedict:
|
||||
|
@@ -1,9 +1,9 @@
|
||||
# flake8: noqa: F401
|
||||
from freqtrade.exchange import (timeframe_to_minutes, timeframe_to_msecs, timeframe_to_next_date,
|
||||
timeframe_to_prev_date, timeframe_to_seconds)
|
||||
from freqtrade.strategy.hyper import (BooleanParameter, CategoricalParameter, DecimalParameter,
|
||||
IntParameter, RealParameter)
|
||||
from freqtrade.strategy.informative_decorator import informative
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
from freqtrade.strategy.parameters import (BooleanParameter, CategoricalParameter, DecimalParameter,
|
||||
IntParameter, RealParameter)
|
||||
from freqtrade.strategy.strategy_helper import (merge_informative_pair, stoploss_from_absolute,
|
||||
stoploss_from_open)
|
||||
|
@@ -3,295 +3,18 @@ IHyperStrategy interface, hyperoptable Parameter class.
|
||||
This module defines a base class for auto-hyperoptable strategies.
|
||||
"""
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from contextlib import suppress
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union
|
||||
from typing import Any, Dict, Iterator, List, Tuple, Type, Union
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.misc import deep_merge_dicts, json_load
|
||||
from freqtrade.optimize.hyperopt_tools import HyperoptTools
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
from skopt.space import Integer, Real, Categorical
|
||||
from freqtrade.optimize.space import SKDecimal
|
||||
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.strategy.parameters import BaseParameter
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseParameter(ABC):
|
||||
"""
|
||||
Defines a parameter that can be optimized by hyperopt.
|
||||
"""
|
||||
category: Optional[str]
|
||||
default: Any
|
||||
value: Any
|
||||
in_space: bool = False
|
||||
name: str
|
||||
|
||||
def __init__(self, *, default: Any, space: Optional[str] = None,
|
||||
optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable parameter.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter field
|
||||
name is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.(Integer|Real|Categorical).
|
||||
"""
|
||||
if 'name' in kwargs:
|
||||
raise OperationalException(
|
||||
'Name is determined by parameter field name and can not be specified manually.')
|
||||
self.category = space
|
||||
self._space_params = kwargs
|
||||
self.value = default
|
||||
self.optimize = optimize
|
||||
self.load = load
|
||||
|
||||
def __repr__(self):
|
||||
return f'{self.__class__.__name__}({self.value})'
|
||||
|
||||
@abstractmethod
|
||||
def get_space(self, name: str) -> Union['Integer', 'Real', 'SKDecimal', 'Categorical']:
|
||||
"""
|
||||
Get-space - will be used by Hyperopt to get the hyperopt Space
|
||||
"""
|
||||
|
||||
|
||||
class NumericParameter(BaseParameter):
|
||||
""" Internal parameter used for Numeric purposes """
|
||||
float_or_int = Union[int, float]
|
||||
default: float_or_int
|
||||
value: float_or_int
|
||||
|
||||
def __init__(self, low: Union[float_or_int, Sequence[float_or_int]],
|
||||
high: Optional[float_or_int] = None, *, default: float_or_int,
|
||||
space: Optional[str] = None, optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable numeric parameter.
|
||||
Cannot be instantiated, but provides the validation for other numeric parameters
|
||||
:param low: Lower end (inclusive) of optimization space or [low, high].
|
||||
:param high: Upper end (inclusive) of optimization space.
|
||||
Must be none of entire range is passed first parameter.
|
||||
:param default: A default value.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter fieldname is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.*.
|
||||
"""
|
||||
if high is not None and isinstance(low, Sequence):
|
||||
raise OperationalException(f'{self.__class__.__name__} space invalid.')
|
||||
if high is None or isinstance(low, Sequence):
|
||||
if not isinstance(low, Sequence) or len(low) != 2:
|
||||
raise OperationalException(f'{self.__class__.__name__} space must be [low, high]')
|
||||
self.low, self.high = low
|
||||
else:
|
||||
self.low = low
|
||||
self.high = high
|
||||
|
||||
super().__init__(default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
|
||||
class IntParameter(NumericParameter):
|
||||
default: int
|
||||
value: int
|
||||
|
||||
def __init__(self, low: Union[int, Sequence[int]], high: Optional[int] = None, *, default: int,
|
||||
space: Optional[str] = None, optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable integer parameter.
|
||||
:param low: Lower end (inclusive) of optimization space or [low, high].
|
||||
:param high: Upper end (inclusive) of optimization space.
|
||||
Must be none of entire range is passed first parameter.
|
||||
:param default: A default value.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter fieldname is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Integer.
|
||||
"""
|
||||
|
||||
super().__init__(low=low, high=high, default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
def get_space(self, name: str) -> 'Integer':
|
||||
"""
|
||||
Create skopt optimization space.
|
||||
:param name: A name of parameter field.
|
||||
"""
|
||||
return Integer(low=self.low, high=self.high, name=name, **self._space_params)
|
||||
|
||||
@property
|
||||
def range(self):
|
||||
"""
|
||||
Get each value in this space as list.
|
||||
Returns a List from low to high (inclusive) in Hyperopt mode.
|
||||
Returns a List with 1 item (`value`) in "non-hyperopt" mode, to avoid
|
||||
calculating 100ds of indicators.
|
||||
"""
|
||||
if self.in_space and self.optimize:
|
||||
# Scikit-optimize ranges are "inclusive", while python's "range" is exclusive
|
||||
return range(self.low, self.high + 1)
|
||||
else:
|
||||
return range(self.value, self.value + 1)
|
||||
|
||||
|
||||
class RealParameter(NumericParameter):
|
||||
default: float
|
||||
value: float
|
||||
|
||||
def __init__(self, low: Union[float, Sequence[float]], high: Optional[float] = None, *,
|
||||
default: float, space: Optional[str] = None, optimize: bool = True,
|
||||
load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable floating point parameter with unlimited precision.
|
||||
:param low: Lower end (inclusive) of optimization space or [low, high].
|
||||
:param high: Upper end (inclusive) of optimization space.
|
||||
Must be none if entire range is passed first parameter.
|
||||
:param default: A default value.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter fieldname is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Real.
|
||||
"""
|
||||
super().__init__(low=low, high=high, default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
def get_space(self, name: str) -> 'Real':
|
||||
"""
|
||||
Create skopt optimization space.
|
||||
:param name: A name of parameter field.
|
||||
"""
|
||||
return Real(low=self.low, high=self.high, name=name, **self._space_params)
|
||||
|
||||
|
||||
class DecimalParameter(NumericParameter):
|
||||
default: float
|
||||
value: float
|
||||
|
||||
def __init__(self, low: Union[float, Sequence[float]], high: Optional[float] = None, *,
|
||||
default: float, decimals: int = 3, space: Optional[str] = None,
|
||||
optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable decimal parameter with a limited precision.
|
||||
:param low: Lower end (inclusive) of optimization space or [low, high].
|
||||
:param high: Upper end (inclusive) of optimization space.
|
||||
Must be none if entire range is passed first parameter.
|
||||
:param default: A default value.
|
||||
:param decimals: A number of decimals after floating point to be included in testing.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter fieldname is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Integer.
|
||||
"""
|
||||
self._decimals = decimals
|
||||
default = round(default, self._decimals)
|
||||
|
||||
super().__init__(low=low, high=high, default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
def get_space(self, name: str) -> 'SKDecimal':
|
||||
"""
|
||||
Create skopt optimization space.
|
||||
:param name: A name of parameter field.
|
||||
"""
|
||||
return SKDecimal(low=self.low, high=self.high, decimals=self._decimals, name=name,
|
||||
**self._space_params)
|
||||
|
||||
@property
|
||||
def range(self):
|
||||
"""
|
||||
Get each value in this space as list.
|
||||
Returns a List from low to high (inclusive) in Hyperopt mode.
|
||||
Returns a List with 1 item (`value`) in "non-hyperopt" mode, to avoid
|
||||
calculating 100ds of indicators.
|
||||
"""
|
||||
if self.in_space and self.optimize:
|
||||
low = int(self.low * pow(10, self._decimals))
|
||||
high = int(self.high * pow(10, self._decimals)) + 1
|
||||
return [round(n * pow(0.1, self._decimals), self._decimals) for n in range(low, high)]
|
||||
else:
|
||||
return [self.value]
|
||||
|
||||
|
||||
class CategoricalParameter(BaseParameter):
|
||||
default: Any
|
||||
value: Any
|
||||
opt_range: Sequence[Any]
|
||||
|
||||
def __init__(self, categories: Sequence[Any], *, default: Optional[Any] = None,
|
||||
space: Optional[str] = None, optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable parameter.
|
||||
:param categories: Optimization space, [a, b, ...].
|
||||
:param default: A default value. If not specified, first item from specified space will be
|
||||
used.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter field
|
||||
name is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Categorical.
|
||||
"""
|
||||
if len(categories) < 2:
|
||||
raise OperationalException(
|
||||
'CategoricalParameter space must be [a, b, ...] (at least two parameters)')
|
||||
self.opt_range = categories
|
||||
super().__init__(default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
def get_space(self, name: str) -> 'Categorical':
|
||||
"""
|
||||
Create skopt optimization space.
|
||||
:param name: A name of parameter field.
|
||||
"""
|
||||
return Categorical(self.opt_range, name=name, **self._space_params)
|
||||
|
||||
@property
|
||||
def range(self):
|
||||
"""
|
||||
Get each value in this space as list.
|
||||
Returns a List of categories in Hyperopt mode.
|
||||
Returns a List with 1 item (`value`) in "non-hyperopt" mode, to avoid
|
||||
calculating 100ds of indicators.
|
||||
"""
|
||||
if self.in_space and self.optimize:
|
||||
return self.opt_range
|
||||
else:
|
||||
return [self.value]
|
||||
|
||||
|
||||
class BooleanParameter(CategoricalParameter):
|
||||
|
||||
def __init__(self, *, default: Optional[Any] = None,
|
||||
space: Optional[str] = None, optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable Boolean Parameter.
|
||||
It's a shortcut to `CategoricalParameter([True, False])`.
|
||||
:param default: A default value. If not specified, first item from specified space will be
|
||||
used.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter field
|
||||
name is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Categorical.
|
||||
"""
|
||||
|
||||
categories = [True, False]
|
||||
super().__init__(categories=categories, default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
|
||||
class HyperStrategyMixin:
|
||||
"""
|
||||
A helper base class which allows HyperOptAuto class to reuse implementations of buy/sell
|
||||
@@ -307,7 +30,10 @@ class HyperStrategyMixin:
|
||||
self.ft_sell_params: List[BaseParameter] = []
|
||||
self.ft_protection_params: List[BaseParameter] = []
|
||||
|
||||
self._load_hyper_params(config.get('runmode') == RunMode.HYPEROPT)
|
||||
params = self.load_params_from_file()
|
||||
params = params.get('params', {})
|
||||
self._ft_params_from_file = params
|
||||
# Init/loading of parameters is done as part of ft_bot_start().
|
||||
|
||||
def enumerate_parameters(self, category: str = None) -> Iterator[Tuple[str, BaseParameter]]:
|
||||
"""
|
||||
@@ -327,28 +53,13 @@ class HyperStrategyMixin:
|
||||
for par in params:
|
||||
yield par.name, par
|
||||
|
||||
@classmethod
|
||||
def detect_parameters(cls, category: str) -> Iterator[Tuple[str, BaseParameter]]:
|
||||
""" Detect all parameters for 'category' """
|
||||
for attr_name in dir(cls):
|
||||
if not attr_name.startswith('__'): # Ignore internals, not strictly necessary.
|
||||
attr = getattr(cls, attr_name)
|
||||
if issubclass(attr.__class__, BaseParameter):
|
||||
if (attr_name.startswith(category + '_')
|
||||
and attr.category is not None and attr.category != category):
|
||||
raise OperationalException(
|
||||
f'Inconclusive parameter name {attr_name}, category: {attr.category}.')
|
||||
if (category == attr.category or
|
||||
(attr_name.startswith(category + '_') and attr.category is None)):
|
||||
yield attr_name, attr
|
||||
|
||||
@classmethod
|
||||
def detect_all_parameters(cls) -> Dict:
|
||||
""" Detect all parameters and return them as a list"""
|
||||
params: Dict = {
|
||||
'buy': list(cls.detect_parameters('buy')),
|
||||
'sell': list(cls.detect_parameters('sell')),
|
||||
'protection': list(cls.detect_parameters('protection')),
|
||||
params: Dict[str, Any] = {
|
||||
'buy': list(detect_parameters(cls, 'buy')),
|
||||
'sell': list(detect_parameters(cls, 'sell')),
|
||||
'protection': list(detect_parameters(cls, 'protection')),
|
||||
}
|
||||
params.update({
|
||||
'count': len(params['buy'] + params['sell'] + params['protection'])
|
||||
@@ -356,21 +67,49 @@ class HyperStrategyMixin:
|
||||
|
||||
return params
|
||||
|
||||
def _load_hyper_params(self, hyperopt: bool = False) -> None:
|
||||
def ft_load_params_from_file(self) -> None:
|
||||
"""
|
||||
Load Parameters from parameter file
|
||||
Should/must run before config values are loaded in strategy_resolver.
|
||||
"""
|
||||
if self._ft_params_from_file:
|
||||
# Set parameters from Hyperopt results file
|
||||
params = self._ft_params_from_file
|
||||
self.minimal_roi = params.get('roi', getattr(self, 'minimal_roi', {}))
|
||||
|
||||
self.stoploss = params.get('stoploss', {}).get(
|
||||
'stoploss', getattr(self, 'stoploss', -0.1))
|
||||
trailing = params.get('trailing', {})
|
||||
self.trailing_stop = trailing.get(
|
||||
'trailing_stop', getattr(self, 'trailing_stop', False))
|
||||
self.trailing_stop_positive = trailing.get(
|
||||
'trailing_stop_positive', getattr(self, 'trailing_stop_positive', None))
|
||||
self.trailing_stop_positive_offset = trailing.get(
|
||||
'trailing_stop_positive_offset',
|
||||
getattr(self, 'trailing_stop_positive_offset', 0))
|
||||
self.trailing_only_offset_is_reached = trailing.get(
|
||||
'trailing_only_offset_is_reached',
|
||||
getattr(self, 'trailing_only_offset_is_reached', 0.0))
|
||||
|
||||
def ft_load_hyper_params(self, hyperopt: bool = False) -> None:
|
||||
"""
|
||||
Load Hyperoptable parameters
|
||||
Prevalence:
|
||||
* Parameters from parameter file
|
||||
* Parameters defined in parameters objects (buy_params, sell_params, ...)
|
||||
* Parameter defaults
|
||||
"""
|
||||
params = self.load_params_from_file()
|
||||
params = params.get('params', {})
|
||||
self._ft_params_from_file = params
|
||||
buy_params = deep_merge_dicts(params.get('buy', {}), getattr(self, 'buy_params', {}))
|
||||
sell_params = deep_merge_dicts(params.get('sell', {}), getattr(self, 'sell_params', {}))
|
||||
protection_params = deep_merge_dicts(params.get('protection', {}),
|
||||
|
||||
buy_params = deep_merge_dicts(self._ft_params_from_file.get('buy', {}),
|
||||
getattr(self, 'buy_params', {}))
|
||||
sell_params = deep_merge_dicts(self._ft_params_from_file.get('sell', {}),
|
||||
getattr(self, 'sell_params', {}))
|
||||
protection_params = deep_merge_dicts(self._ft_params_from_file.get('protection', {}),
|
||||
getattr(self, 'protection_params', {}))
|
||||
|
||||
self._load_params(buy_params, 'buy', hyperopt)
|
||||
self._load_params(sell_params, 'sell', hyperopt)
|
||||
self._load_params(protection_params, 'protection', hyperopt)
|
||||
self._ft_load_params(buy_params, 'buy', hyperopt)
|
||||
self._ft_load_params(sell_params, 'sell', hyperopt)
|
||||
self._ft_load_params(protection_params, 'protection', hyperopt)
|
||||
|
||||
def load_params_from_file(self) -> Dict:
|
||||
filename_str = getattr(self, '__file__', '')
|
||||
@@ -393,7 +132,7 @@ class HyperStrategyMixin:
|
||||
|
||||
return {}
|
||||
|
||||
def _load_params(self, params: Dict, space: str, hyperopt: bool = False) -> None:
|
||||
def _ft_load_params(self, params: Dict, space: str, hyperopt: bool = False) -> None:
|
||||
"""
|
||||
Set optimizable parameter values.
|
||||
:param params: Dictionary with new parameter values.
|
||||
@@ -402,7 +141,7 @@ class HyperStrategyMixin:
|
||||
logger.info(f"No params for {space} found, using default values.")
|
||||
param_container: List[BaseParameter] = getattr(self, f"ft_{space}_params")
|
||||
|
||||
for attr_name, attr in self.detect_parameters(space):
|
||||
for attr_name, attr in detect_parameters(self, space):
|
||||
attr.name = attr_name
|
||||
attr.in_space = hyperopt and HyperoptTools.has_space(self.config, space)
|
||||
if not attr.category:
|
||||
@@ -424,7 +163,7 @@ class HyperStrategyMixin:
|
||||
"""
|
||||
Returns list of Parameters that are not part of the current optimize job
|
||||
"""
|
||||
params = {
|
||||
params: Dict[str, Dict] = {
|
||||
'buy': {},
|
||||
'sell': {},
|
||||
'protection': {},
|
||||
@@ -433,3 +172,25 @@ class HyperStrategyMixin:
|
||||
if not p.optimize or not p.in_space:
|
||||
params[p.category][name] = p.value
|
||||
return params
|
||||
|
||||
|
||||
def detect_parameters(
|
||||
obj: Union[HyperStrategyMixin, Type[HyperStrategyMixin]],
|
||||
category: str
|
||||
) -> Iterator[Tuple[str, BaseParameter]]:
|
||||
"""
|
||||
Detect all parameters for 'category' for "obj"
|
||||
:param obj: Strategy object or class
|
||||
:param category: category - usually `'buy', 'sell', 'protection',...
|
||||
"""
|
||||
for attr_name in dir(obj):
|
||||
if not attr_name.startswith('__'): # Ignore internals, not strictly necessary.
|
||||
attr = getattr(obj, attr_name)
|
||||
if issubclass(attr.__class__, BaseParameter):
|
||||
if (attr_name.startswith(category + '_')
|
||||
and attr.category is not None and attr.category != category):
|
||||
raise OperationalException(
|
||||
f'Inconclusive parameter name {attr_name}, category: {attr.category}.')
|
||||
if (category == attr.category or
|
||||
(attr_name.startswith(category + '_') and attr.category is None)):
|
||||
yield attr_name, attr
|
||||
|
@@ -14,9 +14,10 @@ from freqtrade.constants import ListPairsWithTimeframes
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.enums import (CandleType, ExitCheckTuple, ExitType, SignalDirection, SignalTagType,
|
||||
SignalType, TradingMode)
|
||||
from freqtrade.enums.runmode import RunMode
|
||||
from freqtrade.exceptions import OperationalException, StrategyError
|
||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date, timeframe_to_seconds
|
||||
from freqtrade.persistence import LocalTrade, Order, PairLocks, Trade
|
||||
from freqtrade.persistence import Order, PairLocks, Trade
|
||||
from freqtrade.strategy.hyper import HyperStrategyMixin
|
||||
from freqtrade.strategy.informative_decorator import (InformativeData, PopulateIndicators,
|
||||
_create_and_merge_informative_pair,
|
||||
@@ -82,7 +83,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
}
|
||||
|
||||
# run "populate_indicators" only for new candle
|
||||
process_only_new_candles: bool = False
|
||||
process_only_new_candles: bool = True
|
||||
|
||||
use_exit_signal: bool
|
||||
exit_profit_only: bool
|
||||
@@ -144,6 +145,15 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
informative_data.candle_type = config['candle_type_def']
|
||||
self._ft_informative.append((informative_data, cls_method))
|
||||
|
||||
def ft_bot_start(self, **kwargs) -> None:
|
||||
"""
|
||||
Strategy init - runs after dataprovider has been added.
|
||||
Must call bot_start()
|
||||
"""
|
||||
strategy_safe_wrapper(self.bot_start)()
|
||||
|
||||
self.ft_load_hyper_params(self.config.get('runmode') == RunMode.HYPEROPT)
|
||||
|
||||
@abstractmethod
|
||||
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
"""
|
||||
@@ -277,8 +287,9 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
|
||||
:param pair: Pair that's about to be bought/shorted.
|
||||
:param order_type: Order type (as configured in order_types). usually limit or market.
|
||||
:param amount: Amount in target (quote) currency that's going to be traded.
|
||||
:param amount: Amount in target (base) currency that's going to be traded.
|
||||
:param rate: Rate that's going to be used when using limit orders
|
||||
or current rate for market orders.
|
||||
:param time_in_force: Time in force. Defaults to GTC (Good-til-cancelled).
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||
@@ -304,8 +315,9 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
:param pair: Pair for trade that's about to be exited.
|
||||
:param trade: trade object.
|
||||
:param order_type: Order type (as configured in order_types). usually limit or market.
|
||||
:param amount: Amount in quote currency.
|
||||
:param amount: Amount in base currency.
|
||||
:param rate: Rate that's going to be used when using limit orders
|
||||
or current rate for market orders.
|
||||
:param time_in_force: Time in force. Defaults to GTC (Good-til-cancelled).
|
||||
:param exit_reason: Exit reason.
|
||||
Can be any of ['roi', 'stop_loss', 'stoploss_on_exchange', 'trailing_stop_loss',
|
||||
@@ -429,7 +441,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
return self.custom_sell(pair, trade, current_time, current_rate, current_profit, **kwargs)
|
||||
|
||||
def custom_stake_amount(self, pair: str, current_time: datetime, current_rate: float,
|
||||
proposed_stake: float, min_stake: float, max_stake: float,
|
||||
proposed_stake: float, min_stake: Optional[float], max_stake: float,
|
||||
entry_tag: Optional[str], side: str, **kwargs) -> float:
|
||||
"""
|
||||
Customize stake size for each new trade.
|
||||
@@ -447,8 +459,9 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
return proposed_stake
|
||||
|
||||
def adjust_trade_position(self, trade: Trade, current_time: datetime,
|
||||
current_rate: float, current_profit: float, min_stake: float,
|
||||
max_stake: float, **kwargs) -> Optional[float]:
|
||||
current_rate: float, current_profit: float,
|
||||
min_stake: Optional[float], max_stake: float,
|
||||
**kwargs) -> Optional[float]:
|
||||
"""
|
||||
Custom trade adjustment logic, returning the stake amount that a trade should be increased.
|
||||
This means extra buy orders with additional fees.
|
||||
@@ -498,8 +511,8 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
return current_order_rate
|
||||
|
||||
def leverage(self, pair: str, current_time: datetime, current_rate: float,
|
||||
proposed_leverage: float, max_leverage: float, side: str,
|
||||
**kwargs) -> float:
|
||||
proposed_leverage: float, max_leverage: float, entry_tag: Optional[str],
|
||||
side: str, **kwargs) -> float:
|
||||
"""
|
||||
Customize leverage for each new trade. This method is only called in futures mode.
|
||||
|
||||
@@ -508,6 +521,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
:param current_rate: Rate, calculated based on pricing settings in exit_pricing.
|
||||
:param proposed_leverage: A leverage proposed by the bot.
|
||||
:param max_leverage: Max leverage allowed on this pair
|
||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||
:param side: 'long' or 'short' - indicating the direction of the proposed trade
|
||||
:return: A leverage amount, which is between 1.0 and max_leverage.
|
||||
"""
|
||||
@@ -533,7 +547,8 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
return None
|
||||
|
||||
def populate_any_indicators(self, basepair: str, pair: str, df: DataFrame, tf: str,
|
||||
informative: DataFrame = None, coin: str = "") -> DataFrame:
|
||||
informative: DataFrame = None, coin: str = "",
|
||||
set_generalized_indicators: bool = False) -> DataFrame:
|
||||
"""
|
||||
Function designed to automatically generate, name and merge features
|
||||
from user indicated timeframes in the configuration file. User can add
|
||||
@@ -894,16 +909,16 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
def should_exit(self, trade: Trade, rate: float, current_time: datetime, *,
|
||||
enter: bool, exit_: bool,
|
||||
low: float = None, high: float = None,
|
||||
force_stoploss: float = 0) -> ExitCheckTuple:
|
||||
force_stoploss: float = 0) -> List[ExitCheckTuple]:
|
||||
"""
|
||||
This function evaluates if one of the conditions required to trigger an exit order
|
||||
has been reached, which can either be a stop-loss, ROI or exit-signal.
|
||||
:param low: Only used during backtesting to simulate (long)stoploss/(short)ROI
|
||||
:param high: Only used during backtesting, to simulate (short)stoploss/(long)ROI
|
||||
:param force_stoploss: Externally provided stoploss
|
||||
:return: True if trade should be exited, False otherwise
|
||||
:return: List of exit reasons - or empty list.
|
||||
"""
|
||||
|
||||
exits: List[ExitCheckTuple] = []
|
||||
current_rate = rate
|
||||
current_profit = trade.calc_profit_ratio(current_rate)
|
||||
|
||||
@@ -933,19 +948,20 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
if exit_ and not enter:
|
||||
exit_signal = ExitType.EXIT_SIGNAL
|
||||
else:
|
||||
custom_reason = strategy_safe_wrapper(self.custom_exit, default_retval=False)(
|
||||
reason_cust = strategy_safe_wrapper(self.custom_exit, default_retval=False)(
|
||||
pair=trade.pair, trade=trade, current_time=current_time,
|
||||
current_rate=current_rate, current_profit=current_profit)
|
||||
if custom_reason:
|
||||
if reason_cust:
|
||||
exit_signal = ExitType.CUSTOM_EXIT
|
||||
if isinstance(custom_reason, str):
|
||||
if len(custom_reason) > CUSTOM_EXIT_MAX_LENGTH:
|
||||
if isinstance(reason_cust, str):
|
||||
custom_reason = reason_cust
|
||||
if len(reason_cust) > CUSTOM_EXIT_MAX_LENGTH:
|
||||
logger.warning(f'Custom exit reason returned from '
|
||||
f'custom_exit is too long and was trimmed'
|
||||
f'to {CUSTOM_EXIT_MAX_LENGTH} characters.')
|
||||
custom_reason = custom_reason[:CUSTOM_EXIT_MAX_LENGTH]
|
||||
custom_reason = reason_cust[:CUSTOM_EXIT_MAX_LENGTH]
|
||||
else:
|
||||
custom_reason = None
|
||||
custom_reason = ''
|
||||
if (
|
||||
exit_signal == ExitType.CUSTOM_EXIT
|
||||
or (exit_signal == ExitType.EXIT_SIGNAL
|
||||
@@ -954,24 +970,29 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
logger.debug(f"{trade.pair} - Sell signal received. "
|
||||
f"exit_type=ExitType.{exit_signal.name}" +
|
||||
(f", custom_reason={custom_reason}" if custom_reason else ""))
|
||||
return ExitCheckTuple(exit_type=exit_signal, exit_reason=custom_reason)
|
||||
exits.append(ExitCheckTuple(exit_type=exit_signal, exit_reason=custom_reason))
|
||||
|
||||
# Sequence:
|
||||
# Exit-signal
|
||||
# ROI (if not stoploss)
|
||||
# Stoploss
|
||||
if roi_reached and stoplossflag.exit_type != ExitType.STOP_LOSS:
|
||||
logger.debug(f"{trade.pair} - Required profit reached. exit_type=ExitType.ROI")
|
||||
return ExitCheckTuple(exit_type=ExitType.ROI)
|
||||
# ROI
|
||||
# Trailing stoploss
|
||||
|
||||
if stoplossflag.exit_flag:
|
||||
if stoplossflag.exit_type == ExitType.STOP_LOSS:
|
||||
|
||||
logger.debug(f"{trade.pair} - Stoploss hit. exit_type={stoplossflag.exit_type}")
|
||||
return stoplossflag
|
||||
exits.append(stoplossflag)
|
||||
|
||||
# This one is noisy, commented out...
|
||||
# logger.debug(f"{trade.pair} - No exit signal.")
|
||||
return ExitCheckTuple(exit_type=ExitType.NONE)
|
||||
if roi_reached:
|
||||
logger.debug(f"{trade.pair} - Required profit reached. exit_type=ExitType.ROI")
|
||||
exits.append(ExitCheckTuple(exit_type=ExitType.ROI))
|
||||
|
||||
if stoplossflag.exit_type == ExitType.TRAILING_STOP_LOSS:
|
||||
|
||||
logger.debug(f"{trade.pair} - Trailing stoploss hit.")
|
||||
exits.append(stoplossflag)
|
||||
|
||||
return exits
|
||||
|
||||
def stop_loss_reached(self, current_rate: float, trade: Trade,
|
||||
current_time: datetime, current_profit: float,
|
||||
@@ -1086,7 +1107,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
else:
|
||||
return current_profit > roi
|
||||
|
||||
def ft_check_timed_out(self, trade: LocalTrade, order: Order,
|
||||
def ft_check_timed_out(self, trade: Trade, order: Order,
|
||||
current_time: datetime) -> bool:
|
||||
"""
|
||||
FT Internal method.
|
||||
|
289
freqtrade/strategy/parameters.py
Normal file
289
freqtrade/strategy/parameters.py
Normal file
@@ -0,0 +1,289 @@
|
||||
"""
|
||||
IHyperStrategy interface, hyperoptable Parameter class.
|
||||
This module defines a base class for auto-hyperoptable strategies.
|
||||
"""
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from contextlib import suppress
|
||||
from typing import Any, Optional, Sequence, Union
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
from skopt.space import Integer, Real, Categorical
|
||||
from freqtrade.optimize.space import SKDecimal
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseParameter(ABC):
|
||||
"""
|
||||
Defines a parameter that can be optimized by hyperopt.
|
||||
"""
|
||||
category: Optional[str]
|
||||
default: Any
|
||||
value: Any
|
||||
in_space: bool = False
|
||||
name: str
|
||||
|
||||
def __init__(self, *, default: Any, space: Optional[str] = None,
|
||||
optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable parameter.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter field
|
||||
name is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.(Integer|Real|Categorical).
|
||||
"""
|
||||
if 'name' in kwargs:
|
||||
raise OperationalException(
|
||||
'Name is determined by parameter field name and can not be specified manually.')
|
||||
self.category = space
|
||||
self._space_params = kwargs
|
||||
self.value = default
|
||||
self.optimize = optimize
|
||||
self.load = load
|
||||
|
||||
def __repr__(self):
|
||||
return f'{self.__class__.__name__}({self.value})'
|
||||
|
||||
@abstractmethod
|
||||
def get_space(self, name: str) -> Union['Integer', 'Real', 'SKDecimal', 'Categorical']:
|
||||
"""
|
||||
Get-space - will be used by Hyperopt to get the hyperopt Space
|
||||
"""
|
||||
|
||||
|
||||
class NumericParameter(BaseParameter):
|
||||
""" Internal parameter used for Numeric purposes """
|
||||
float_or_int = Union[int, float]
|
||||
default: float_or_int
|
||||
value: float_or_int
|
||||
|
||||
def __init__(self, low: Union[float_or_int, Sequence[float_or_int]],
|
||||
high: Optional[float_or_int] = None, *, default: float_or_int,
|
||||
space: Optional[str] = None, optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable numeric parameter.
|
||||
Cannot be instantiated, but provides the validation for other numeric parameters
|
||||
:param low: Lower end (inclusive) of optimization space or [low, high].
|
||||
:param high: Upper end (inclusive) of optimization space.
|
||||
Must be none of entire range is passed first parameter.
|
||||
:param default: A default value.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter fieldname is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.*.
|
||||
"""
|
||||
if high is not None and isinstance(low, Sequence):
|
||||
raise OperationalException(f'{self.__class__.__name__} space invalid.')
|
||||
if high is None or isinstance(low, Sequence):
|
||||
if not isinstance(low, Sequence) or len(low) != 2:
|
||||
raise OperationalException(f'{self.__class__.__name__} space must be [low, high]')
|
||||
self.low, self.high = low
|
||||
else:
|
||||
self.low = low
|
||||
self.high = high
|
||||
|
||||
super().__init__(default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
|
||||
class IntParameter(NumericParameter):
|
||||
default: int
|
||||
value: int
|
||||
low: int
|
||||
high: int
|
||||
|
||||
def __init__(self, low: Union[int, Sequence[int]], high: Optional[int] = None, *, default: int,
|
||||
space: Optional[str] = None, optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable integer parameter.
|
||||
:param low: Lower end (inclusive) of optimization space or [low, high].
|
||||
:param high: Upper end (inclusive) of optimization space.
|
||||
Must be none of entire range is passed first parameter.
|
||||
:param default: A default value.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter fieldname is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Integer.
|
||||
"""
|
||||
|
||||
super().__init__(low=low, high=high, default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
def get_space(self, name: str) -> 'Integer':
|
||||
"""
|
||||
Create skopt optimization space.
|
||||
:param name: A name of parameter field.
|
||||
"""
|
||||
return Integer(low=self.low, high=self.high, name=name, **self._space_params)
|
||||
|
||||
@property
|
||||
def range(self):
|
||||
"""
|
||||
Get each value in this space as list.
|
||||
Returns a List from low to high (inclusive) in Hyperopt mode.
|
||||
Returns a List with 1 item (`value`) in "non-hyperopt" mode, to avoid
|
||||
calculating 100ds of indicators.
|
||||
"""
|
||||
if self.in_space and self.optimize:
|
||||
# Scikit-optimize ranges are "inclusive", while python's "range" is exclusive
|
||||
return range(self.low, self.high + 1)
|
||||
else:
|
||||
return range(self.value, self.value + 1)
|
||||
|
||||
|
||||
class RealParameter(NumericParameter):
|
||||
default: float
|
||||
value: float
|
||||
|
||||
def __init__(self, low: Union[float, Sequence[float]], high: Optional[float] = None, *,
|
||||
default: float, space: Optional[str] = None, optimize: bool = True,
|
||||
load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable floating point parameter with unlimited precision.
|
||||
:param low: Lower end (inclusive) of optimization space or [low, high].
|
||||
:param high: Upper end (inclusive) of optimization space.
|
||||
Must be none if entire range is passed first parameter.
|
||||
:param default: A default value.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter fieldname is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Real.
|
||||
"""
|
||||
super().__init__(low=low, high=high, default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
def get_space(self, name: str) -> 'Real':
|
||||
"""
|
||||
Create skopt optimization space.
|
||||
:param name: A name of parameter field.
|
||||
"""
|
||||
return Real(low=self.low, high=self.high, name=name, **self._space_params)
|
||||
|
||||
|
||||
class DecimalParameter(NumericParameter):
|
||||
default: float
|
||||
value: float
|
||||
|
||||
def __init__(self, low: Union[float, Sequence[float]], high: Optional[float] = None, *,
|
||||
default: float, decimals: int = 3, space: Optional[str] = None,
|
||||
optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable decimal parameter with a limited precision.
|
||||
:param low: Lower end (inclusive) of optimization space or [low, high].
|
||||
:param high: Upper end (inclusive) of optimization space.
|
||||
Must be none if entire range is passed first parameter.
|
||||
:param default: A default value.
|
||||
:param decimals: A number of decimals after floating point to be included in testing.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter fieldname is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Integer.
|
||||
"""
|
||||
self._decimals = decimals
|
||||
default = round(default, self._decimals)
|
||||
|
||||
super().__init__(low=low, high=high, default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
def get_space(self, name: str) -> 'SKDecimal':
|
||||
"""
|
||||
Create skopt optimization space.
|
||||
:param name: A name of parameter field.
|
||||
"""
|
||||
return SKDecimal(low=self.low, high=self.high, decimals=self._decimals, name=name,
|
||||
**self._space_params)
|
||||
|
||||
@property
|
||||
def range(self):
|
||||
"""
|
||||
Get each value in this space as list.
|
||||
Returns a List from low to high (inclusive) in Hyperopt mode.
|
||||
Returns a List with 1 item (`value`) in "non-hyperopt" mode, to avoid
|
||||
calculating 100ds of indicators.
|
||||
"""
|
||||
if self.in_space and self.optimize:
|
||||
low = int(self.low * pow(10, self._decimals))
|
||||
high = int(self.high * pow(10, self._decimals)) + 1
|
||||
return [round(n * pow(0.1, self._decimals), self._decimals) for n in range(low, high)]
|
||||
else:
|
||||
return [self.value]
|
||||
|
||||
|
||||
class CategoricalParameter(BaseParameter):
|
||||
default: Any
|
||||
value: Any
|
||||
opt_range: Sequence[Any]
|
||||
|
||||
def __init__(self, categories: Sequence[Any], *, default: Optional[Any] = None,
|
||||
space: Optional[str] = None, optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable parameter.
|
||||
:param categories: Optimization space, [a, b, ...].
|
||||
:param default: A default value. If not specified, first item from specified space will be
|
||||
used.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter field
|
||||
name is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Categorical.
|
||||
"""
|
||||
if len(categories) < 2:
|
||||
raise OperationalException(
|
||||
'CategoricalParameter space must be [a, b, ...] (at least two parameters)')
|
||||
self.opt_range = categories
|
||||
super().__init__(default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
||||
|
||||
def get_space(self, name: str) -> 'Categorical':
|
||||
"""
|
||||
Create skopt optimization space.
|
||||
:param name: A name of parameter field.
|
||||
"""
|
||||
return Categorical(self.opt_range, name=name, **self._space_params)
|
||||
|
||||
@property
|
||||
def range(self):
|
||||
"""
|
||||
Get each value in this space as list.
|
||||
Returns a List of categories in Hyperopt mode.
|
||||
Returns a List with 1 item (`value`) in "non-hyperopt" mode, to avoid
|
||||
calculating 100ds of indicators.
|
||||
"""
|
||||
if self.in_space and self.optimize:
|
||||
return self.opt_range
|
||||
else:
|
||||
return [self.value]
|
||||
|
||||
|
||||
class BooleanParameter(CategoricalParameter):
|
||||
|
||||
def __init__(self, *, default: Optional[Any] = None,
|
||||
space: Optional[str] = None, optimize: bool = True, load: bool = True, **kwargs):
|
||||
"""
|
||||
Initialize hyperopt-optimizable Boolean Parameter.
|
||||
It's a shortcut to `CategoricalParameter([True, False])`.
|
||||
:param default: A default value. If not specified, first item from specified space will be
|
||||
used.
|
||||
:param space: A parameter category. Can be 'buy' or 'sell'. This parameter is optional if
|
||||
parameter field
|
||||
name is prefixed with 'buy_' or 'sell_'.
|
||||
:param optimize: Include parameter in hyperopt optimizations.
|
||||
:param load: Load parameter value from {space}_params.
|
||||
:param kwargs: Extra parameters to skopt.space.Categorical.
|
||||
"""
|
||||
|
||||
categories = [True, False]
|
||||
super().__init__(categories=categories, default=default, space=space, optimize=optimize,
|
||||
load=load, **kwargs)
|
@@ -1,5 +1,7 @@
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, TypeVar, cast
|
||||
|
||||
from freqtrade.exceptions import StrategyError
|
||||
|
||||
@@ -7,12 +9,16 @@ from freqtrade.exceptions import StrategyError
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def strategy_safe_wrapper(f, message: str = "", default_retval=None, supress_error=False):
|
||||
F = TypeVar('F', bound=Callable[..., Any])
|
||||
|
||||
|
||||
def strategy_safe_wrapper(f: F, message: str = "", default_retval=None, supress_error=False) -> F:
|
||||
"""
|
||||
Wrapper around user-provided methods and functions.
|
||||
Caches all exceptions and returns either the default_retval (if it's not None) or raises
|
||||
a StrategyError exception, which then needs to be handled by the calling method.
|
||||
"""
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
if 'trade' in kwargs:
|
||||
@@ -37,4 +43,4 @@ def strategy_safe_wrapper(f, message: str = "", default_retval=None, supress_err
|
||||
raise StrategyError(str(error)) from error
|
||||
return default_retval
|
||||
|
||||
return wrapper
|
||||
return cast(F, wrapper)
|
||||
|
@@ -70,7 +70,9 @@ class FreqaiExampleStrategy(IStrategy):
|
||||
def bot_start(self):
|
||||
self.model = CustomModel(self.config)
|
||||
|
||||
def populate_any_indicators(self, metadata, pair, df, tf, informative=None, coin=""):
|
||||
def populate_any_indicators(
|
||||
self, metadata, pair, df, tf, informative=None, coin="", set_generalized_indicators=False
|
||||
):
|
||||
"""
|
||||
Function designed to automatically generate, name and merge features
|
||||
from user indicated timeframes in the configuration file. User controls the indicators
|
||||
@@ -120,9 +122,7 @@ class FreqaiExampleStrategy(IStrategy):
|
||||
informative["close"] / informative[f"{coin}bb_lowerband-period_{t}"]
|
||||
)
|
||||
|
||||
informative[f"%-{coin}roc-period_{t}"] = ta.ROC(
|
||||
informative, timeperiod=t
|
||||
)
|
||||
informative[f"%-{coin}roc-period_{t}"] = ta.ROC(informative, timeperiod=t)
|
||||
macd = ta.MACD(informative, timeperiod=t)
|
||||
informative[f"%-{coin}macd-period_{t}"] = macd["macd"]
|
||||
|
||||
@@ -152,17 +152,17 @@ class FreqaiExampleStrategy(IStrategy):
|
||||
# Add generalized indicators here (because in live, it will call this
|
||||
# function to populate indicators during training). Notice how we ensure not to
|
||||
# add them multiple times
|
||||
if pair == self.freqai_info['corr_pairlist'][0] and tf == self.timeframe:
|
||||
if set_generalized_indicators:
|
||||
df["%-day_of_week"] = (df["date"].dt.dayofweek + 1) / 7
|
||||
df["%-hour_of_day"] = (df["date"].dt.hour + 1) / 25
|
||||
|
||||
# user adds targets here by prepending them with &- (see convention below)
|
||||
# If user wishes to use multiple targets, a multioutput prediction model
|
||||
# needs to be used such as templates/CatboostPredictionMultiModel.py
|
||||
df['&-s_close'] = (
|
||||
df["&-s_close"] = (
|
||||
df["close"]
|
||||
.shift(-self.freqai_info['feature_parameters']["period"])
|
||||
.rolling(self.freqai_info['feature_parameters']["period"])
|
||||
.shift(-self.freqai_info["feature_parameters"]["period"])
|
||||
.rolling(self.freqai_info["feature_parameters"]["period"])
|
||||
.mean()
|
||||
/ df["close"]
|
||||
- 1
|
||||
@@ -174,15 +174,21 @@ class FreqaiExampleStrategy(IStrategy):
|
||||
|
||||
self.freqai_info = self.config["freqai"]
|
||||
self.pair = metadata["pair"]
|
||||
|
||||
sgi = True
|
||||
# the following loops are necessary for building the features
|
||||
# indicated by the user in the configuration file.
|
||||
# All indicators must be populated by populate_any_indicators() for live functionality
|
||||
# to work correctly.
|
||||
for tf in self.freqai_info["timeframes"]:
|
||||
dataframe = self.populate_any_indicators(
|
||||
metadata, self.pair, dataframe.copy(), tf, coin=self.pair.split("/")[0] + "-"
|
||||
metadata,
|
||||
self.pair,
|
||||
dataframe.copy(),
|
||||
tf,
|
||||
coin=self.pair.split("/")[0] + "-",
|
||||
set_generalized_indicators=sgi,
|
||||
)
|
||||
sgi = False
|
||||
for pair in self.freqai_info["corr_pairlist"]:
|
||||
if metadata["pair"] in pair:
|
||||
continue # do not include whitelisted pair twice if it is in corr_pairlist
|
||||
@@ -231,51 +237,55 @@ class FreqaiExampleStrategy(IStrategy):
|
||||
def get_ticker_indicator(self):
|
||||
return int(self.config["timeframe"][:-1])
|
||||
|
||||
def custom_exit(self, pair: str, trade: Trade, current_time, current_rate,
|
||||
current_profit, **kwargs):
|
||||
def custom_exit(
|
||||
self, pair: str, trade: Trade, current_time, current_rate, current_profit, **kwargs
|
||||
):
|
||||
|
||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair=pair, timeframe=self.timeframe)
|
||||
|
||||
trade_date = timeframe_to_prev_date(self.config['timeframe'], trade.open_date_utc)
|
||||
trade_candle = dataframe.loc[(dataframe['date'] == trade_date)]
|
||||
trade_date = timeframe_to_prev_date(self.config["timeframe"], trade.open_date_utc)
|
||||
trade_candle = dataframe.loc[(dataframe["date"] == trade_date)]
|
||||
|
||||
if trade_candle.empty:
|
||||
return None
|
||||
trade_candle = trade_candle.squeeze()
|
||||
|
||||
follow_mode = self.config.get('freqai', {}).get('follow_mode', False)
|
||||
follow_mode = self.config.get("freqai", {}).get("follow_mode", False)
|
||||
|
||||
if not follow_mode:
|
||||
pair_dict = self.model.bridge.data_drawer.pair_dict
|
||||
pair_dict = self.model.bridge.dd.pair_dict
|
||||
else:
|
||||
pair_dict = self.model.bridge.data_drawer.follower_dict
|
||||
pair_dict = self.model.bridge.dd.follower_dict
|
||||
|
||||
entry_tag = trade.enter_tag
|
||||
|
||||
if ('prediction' + entry_tag not in pair_dict[pair] or
|
||||
pair_dict[pair]['prediction' + entry_tag] > 0):
|
||||
if (
|
||||
"prediction" + entry_tag not in pair_dict[pair]
|
||||
or pair_dict[pair]["prediction" + entry_tag] > 0
|
||||
):
|
||||
with self.model.bridge.lock:
|
||||
pair_dict[pair]['prediction' + entry_tag] = abs(trade_candle['&-s_close'])
|
||||
pair_dict[pair]["prediction" + entry_tag] = abs(trade_candle["&-s_close"])
|
||||
if not follow_mode:
|
||||
self.model.bridge.data_drawer.save_drawer_to_disk()
|
||||
self.model.bridge.dd.save_drawer_to_disk()
|
||||
else:
|
||||
self.model.bridge.data_drawer.save_follower_dict_to_disk()
|
||||
self.model.bridge.dd.save_follower_dict_to_disk()
|
||||
|
||||
roi_price = pair_dict[pair]['prediction' + entry_tag]
|
||||
roi_price = pair_dict[pair]["prediction" + entry_tag]
|
||||
roi_time = self.max_roi_time_long.value
|
||||
|
||||
roi_decay = roi_price * (1 - ((current_time - trade.open_date_utc).seconds) /
|
||||
(roi_time * 60))
|
||||
roi_decay = roi_price * (
|
||||
1 - ((current_time - trade.open_date_utc).seconds) / (roi_time * 60)
|
||||
)
|
||||
if roi_decay < 0:
|
||||
roi_decay = self.linear_roi_offset.value
|
||||
else:
|
||||
roi_decay += self.linear_roi_offset.value
|
||||
|
||||
if current_profit > roi_decay:
|
||||
return 'roi_custom_win'
|
||||
return "roi_custom_win"
|
||||
|
||||
if current_profit < -roi_decay:
|
||||
return 'roi_custom_loss'
|
||||
return "roi_custom_loss"
|
||||
|
||||
def confirm_trade_exit(
|
||||
self,
|
||||
@@ -287,22 +297,22 @@ class FreqaiExampleStrategy(IStrategy):
|
||||
time_in_force: str,
|
||||
exit_reason: str,
|
||||
current_time,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
) -> bool:
|
||||
|
||||
entry_tag = trade.enter_tag
|
||||
follow_mode = self.config.get("freqai", {}).get("follow_mode", False)
|
||||
if not follow_mode:
|
||||
pair_dict = self.model.bridge.data_drawer.pair_dict
|
||||
pair_dict = self.model.bridge.dd.pair_dict
|
||||
else:
|
||||
pair_dict = self.model.bridge.data_drawer.follower_dict
|
||||
pair_dict = self.model.bridge.dd.follower_dict
|
||||
|
||||
with self.model.bridge.lock:
|
||||
pair_dict[pair]["prediction" + entry_tag] = 0
|
||||
if not follow_mode:
|
||||
self.model.bridge.data_drawer.save_drawer_to_disk()
|
||||
self.model.bridge.dd.save_drawer_to_disk()
|
||||
else:
|
||||
self.model.bridge.data_drawer.save_follower_dict_to_disk()
|
||||
self.model.bridge.dd.save_follower_dict_to_disk()
|
||||
|
||||
return True
|
||||
|
||||
@@ -316,7 +326,7 @@ class FreqaiExampleStrategy(IStrategy):
|
||||
current_time,
|
||||
entry_tag,
|
||||
side: str,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
) -> bool:
|
||||
|
||||
df, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
|
||||
|
@@ -6,7 +6,7 @@ import numpy as np # noqa
|
||||
import pandas as pd # noqa
|
||||
from pandas import DataFrame # noqa
|
||||
from datetime import datetime # noqa
|
||||
from typing import Optional # noqa
|
||||
from typing import Optional, Union # noqa
|
||||
|
||||
from freqtrade.strategy import (BooleanParameter, CategoricalParameter, DecimalParameter,
|
||||
IStrategy, IntParameter)
|
||||
@@ -64,7 +64,7 @@ class {{ strategy }}(IStrategy):
|
||||
# trailing_stop_positive_offset = 0.0 # Disabled / not configured
|
||||
|
||||
# Run "populate_indicators()" only for new candle.
|
||||
process_only_new_candles = False
|
||||
process_only_new_candles = True
|
||||
|
||||
# These values can be overridden in the config.
|
||||
use_exit_signal = True
|
||||
|
@@ -62,7 +62,7 @@ class SampleStrategy(IStrategy):
|
||||
timeframe = '5m'
|
||||
|
||||
# Run "populate_indicators()" only for new candle.
|
||||
process_only_new_candles = False
|
||||
process_only_new_candles = True
|
||||
|
||||
# These values can be overridden in the config.
|
||||
use_exit_signal = True
|
||||
|
@@ -13,7 +13,7 @@ def bot_loop_start(self, **kwargs) -> None:
|
||||
pass
|
||||
|
||||
def custom_entry_price(self, pair: str, current_time: 'datetime', proposed_rate: float,
|
||||
entry_tag: Optional[str], **kwargs) -> float:
|
||||
entry_tag: 'Optional[str]', side: str, **kwargs) -> float:
|
||||
"""
|
||||
Custom entry price logic, returning the new entry price.
|
||||
|
||||
@@ -80,8 +80,8 @@ def custom_exit_price(self, pair: str, trade: 'Trade',
|
||||
return proposed_rate
|
||||
|
||||
def custom_stake_amount(self, pair: str, current_time: 'datetime', current_rate: float,
|
||||
proposed_stake: float, min_stake: float, max_stake: float,
|
||||
side: str, entry_tag: Optional[str], **kwargs) -> float:
|
||||
proposed_stake: float, min_stake: Optional[float], max_stake: float,
|
||||
entry_tag: 'Optional[str]', side: str, **kwargs) -> float:
|
||||
"""
|
||||
Customize stake size for each new trade.
|
||||
|
||||
@@ -159,8 +159,9 @@ def confirm_trade_entry(self, pair: str, order_type: str, amount: float, rate: f
|
||||
|
||||
:param pair: Pair that's about to be bought/shorted.
|
||||
:param order_type: Order type (as configured in order_types). usually limit or market.
|
||||
:param amount: Amount in target (quote) currency that's going to be traded.
|
||||
:param amount: Amount in target (base) currency that's going to be traded.
|
||||
:param rate: Rate that's going to be used when using limit orders
|
||||
or current rate for market orders.
|
||||
:param time_in_force: Time in force. Defaults to GTC (Good-til-cancelled).
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||
@@ -175,7 +176,7 @@ def confirm_trade_exit(self, pair: str, trade: 'Trade', order_type: str, amount:
|
||||
rate: float, time_in_force: str, exit_reason: str,
|
||||
current_time: 'datetime', **kwargs) -> bool:
|
||||
"""
|
||||
Called right before placing a regular sell order.
|
||||
Called right before placing a regular exit order.
|
||||
Timing for this function is critical, so avoid doing heavy computations or
|
||||
network requests in this method.
|
||||
|
||||
@@ -183,18 +184,19 @@ def confirm_trade_exit(self, pair: str, trade: 'Trade', order_type: str, amount:
|
||||
|
||||
When not implemented by a strategy, returns True (always confirming).
|
||||
|
||||
:param pair: Pair that's currently analyzed
|
||||
:param pair: Pair for trade that's about to be exited.
|
||||
:param trade: trade object.
|
||||
:param order_type: Order type (as configured in order_types). usually limit or market.
|
||||
:param amount: Amount in quote currency.
|
||||
:param amount: Amount in base currency.
|
||||
:param rate: Rate that's going to be used when using limit orders
|
||||
or current rate for market orders.
|
||||
:param time_in_force: Time in force. Defaults to GTC (Good-til-cancelled).
|
||||
:param exit_reason: Exit reason.
|
||||
Can be any of ['roi', 'stop_loss', 'stoploss_on_exchange', 'trailing_stop_loss',
|
||||
'exit_signal', 'force_exit', 'emergency_exit']
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return bool: When True is returned, then the exit-order is placed on the exchange.
|
||||
:return bool: When True, then the exit-order is placed on the exchange.
|
||||
False aborts the process
|
||||
"""
|
||||
return True
|
||||
@@ -244,8 +246,8 @@ def check_exit_timeout(self, pair: str, trade: 'Trade', order: 'Order',
|
||||
return False
|
||||
|
||||
def adjust_trade_position(self, trade: 'Trade', current_time: 'datetime',
|
||||
current_rate: float, current_profit: float, min_stake: float,
|
||||
max_stake: float, **kwargs) -> Optional[float]:
|
||||
current_rate: float, current_profit: float, min_stake: Optional[float],
|
||||
max_stake: float, **kwargs) -> 'Optional[float]':
|
||||
"""
|
||||
Custom trade adjustment logic, returning the stake amount that a trade should be increased.
|
||||
This means extra buy orders with additional fees.
|
||||
@@ -267,8 +269,8 @@ def adjust_trade_position(self, trade: 'Trade', current_time: 'datetime',
|
||||
return None
|
||||
|
||||
def leverage(self, pair: str, current_time: datetime, current_rate: float,
|
||||
proposed_leverage: float, max_leverage: float, side: str,
|
||||
**kwargs) -> float:
|
||||
proposed_leverage: float, max_leverage: float, entry_tag: Optional[str],
|
||||
side: str, **kwargs) -> float:
|
||||
"""
|
||||
Customize leverage for each new trade. This method is only called in futures mode.
|
||||
|
||||
@@ -277,6 +279,7 @@ def leverage(self, pair: str, current_time: datetime, current_rate: float,
|
||||
:param current_rate: Rate, calculated based on pricing settings in exit_pricing.
|
||||
:param proposed_leverage: A leverage proposed by the bot.
|
||||
:param max_leverage: Max leverage allowed on this pair
|
||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||
:param side: 'long' or 'short' - indicating the direction of the proposed trade
|
||||
:return: A leverage amount, which is between 1.0 and max_leverage.
|
||||
"""
|
||||
|
@@ -131,9 +131,9 @@ class Wallets:
|
||||
if isinstance(balances[currency], dict):
|
||||
self._wallets[currency] = Wallet(
|
||||
currency,
|
||||
balances[currency].get('free', None),
|
||||
balances[currency].get('used', None),
|
||||
balances[currency].get('total', None)
|
||||
balances[currency].get('free'),
|
||||
balances[currency].get('used'),
|
||||
balances[currency].get('total')
|
||||
)
|
||||
# Remove currencies no longer in get_balances output
|
||||
for currency in deepcopy(self._wallets):
|
||||
|
Reference in New Issue
Block a user