First pass changes for cleaning up
This commit is contained in:
parent
145faf9817
commit
e7c5818d16
@ -101,8 +101,8 @@ ARGS_HYPEROPT_SHOW = ["hyperopt_list_best", "hyperopt_list_profitable", "hyperop
|
|||||||
"print_json", "hyperoptexportfilename", "hyperopt_show_no_header",
|
"print_json", "hyperoptexportfilename", "hyperopt_show_no_header",
|
||||||
"disableparamexport", "backtest_breakdown"]
|
"disableparamexport", "backtest_breakdown"]
|
||||||
|
|
||||||
ARGS_ANALYZE_ENTRIES_EXITS = ["analysis_groups", "enter_reason_list",
|
ARGS_ANALYZE_ENTRIES_EXITS = ["analysis-groups", "enter-reason-list",
|
||||||
"exit_reason_list", "indicator_list"]
|
"exit-reason-list", "indicator-list"]
|
||||||
|
|
||||||
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
||||||
"list-markets", "list-pairs", "list-strategies", "list-data",
|
"list-markets", "list-pairs", "list-strategies", "list-data",
|
||||||
@ -421,7 +421,7 @@ class Arguments:
|
|||||||
self._build_args(optionlist=ARGS_WEBSERVER, parser=webserver_cmd)
|
self._build_args(optionlist=ARGS_WEBSERVER, parser=webserver_cmd)
|
||||||
|
|
||||||
# Add backtesting analysis subcommand
|
# Add backtesting analysis subcommand
|
||||||
analysis_cmd = subparsers.add_parser('analysis', help='Analysis module.',
|
analysis_cmd = subparsers.add_parser('analysis', help='Backtest Analysis module.',
|
||||||
parents=[_common_parser, _strategy_parser])
|
parents=[_common_parser, _strategy_parser])
|
||||||
analysis_cmd.set_defaults(func=start_analysis_entries_exits)
|
analysis_cmd.set_defaults(func=start_analysis_entries_exits)
|
||||||
self._build_args(optionlist=ARGS_ANALYZE_ENTRIES_EXITS, parser=analysis_cmd)
|
self._build_args(optionlist=ARGS_ANALYZE_ENTRIES_EXITS, parser=analysis_cmd)
|
||||||
|
@ -615,7 +615,7 @@ AVAILABLE_CLI_OPTIONS = {
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
),
|
),
|
||||||
"analysis_groups": Arg(
|
"analysis_groups": Arg(
|
||||||
"--analysis_groups",
|
"--analysis-groups",
|
||||||
help=("grouping output - ",
|
help=("grouping output - ",
|
||||||
"0: simple wins/losses by enter tag, ",
|
"0: simple wins/losses by enter tag, ",
|
||||||
"1: by enter_tag, ",
|
"1: by enter_tag, ",
|
||||||
@ -626,21 +626,21 @@ AVAILABLE_CLI_OPTIONS = {
|
|||||||
default="0,1,2",
|
default="0,1,2",
|
||||||
),
|
),
|
||||||
"enter_reason_list": Arg(
|
"enter_reason_list": Arg(
|
||||||
"--enter_reason_list",
|
"--enter-reason-list",
|
||||||
help=("Comma separated list of entry signals to analyse. Default: all. ",
|
help=("Comma separated list of entry signals to analyse. Default: all. ",
|
||||||
"e.g. 'entry_tag_a,entry_tag_b'"),
|
"e.g. 'entry_tag_a,entry_tag_b'"),
|
||||||
nargs='?',
|
nargs='?',
|
||||||
default='all',
|
default='all',
|
||||||
),
|
),
|
||||||
"exit_reason_list": Arg(
|
"exit_reason_list": Arg(
|
||||||
"--exit_reason_list",
|
"--exit-reason-list",
|
||||||
help=("Comma separated list of exit signals to analyse. Default: all. ",
|
help=("Comma separated list of exit signals to analyse. Default: all. ",
|
||||||
"e.g. 'exit_tag_a,roi,stop_loss,trailing_stop_loss'"),
|
"e.g. 'exit_tag_a,roi,stop_loss,trailing_stop_loss'"),
|
||||||
nargs='?',
|
nargs='?',
|
||||||
default='all',
|
default='all',
|
||||||
),
|
),
|
||||||
"indicator_list": Arg(
|
"indicator_list": Arg(
|
||||||
"--indicator_list",
|
"--indicator-list",
|
||||||
help=("Comma separated list of indicators to analyse. ",
|
help=("Comma separated list of indicators to analyse. ",
|
||||||
"e.g. 'close,rsi,bb_lowerband,profit_abs'"),
|
"e.g. 'close,rsi,bb_lowerband,profit_abs'"),
|
||||||
nargs='?',
|
nargs='?',
|
||||||
|
@ -7,7 +7,8 @@ import joblib
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
from tabulate import tabulate
|
from tabulate import tabulate
|
||||||
|
|
||||||
from freqtrade.data.btanalysis import get_latest_backtest_filename, load_backtest_data
|
from freqtrade.data.btanalysis import (get_latest_backtest_filename, load_backtest_data,
|
||||||
|
load_backtest_stats)
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
|
|
||||||
|
|
||||||
@ -49,8 +50,8 @@ def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_cand
|
|||||||
pair,
|
pair,
|
||||||
trades,
|
trades,
|
||||||
signal_candles[strategy_name][pair])
|
signal_candles[strategy_name][pair])
|
||||||
except Exception:
|
except Exception as e:
|
||||||
pass
|
print(f"Cannot process entry/exit reasons for {strategy_name}: ", e)
|
||||||
|
|
||||||
return analysed_trades_dict
|
return analysed_trades_dict
|
||||||
|
|
||||||
@ -82,104 +83,79 @@ def _analyze_candles_and_indicators(pair, trades, signal_candles):
|
|||||||
try:
|
try:
|
||||||
trades_red = pd.merge(trades_red, trades_inds, on='signal_date', how='outer')
|
trades_red = pd.merge(trades_red, trades_inds, on='signal_date', how='outer')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
raise e
|
||||||
return trades_red
|
return trades_red
|
||||||
else:
|
else:
|
||||||
return pd.DataFrame()
|
return pd.DataFrame()
|
||||||
|
|
||||||
|
|
||||||
def _do_group_table_output(bigdf, glist):
|
def _do_group_table_output(bigdf, glist):
|
||||||
if "0" in glist:
|
for g in glist:
|
||||||
wins = bigdf.loc[bigdf['profit_abs'] >= 0] \
|
# 0: summary wins/losses grouped by enter tag
|
||||||
.groupby(['enter_reason']) \
|
if g == "0":
|
||||||
.agg({'profit_abs': ['sum']})
|
group_mask = ['enter_reason']
|
||||||
|
wins = bigdf.loc[bigdf['profit_abs'] >= 0] \
|
||||||
|
.groupby(group_mask) \
|
||||||
|
.agg({'profit_abs': ['sum']})
|
||||||
|
|
||||||
wins.columns = ['profit_abs_wins']
|
wins.columns = ['profit_abs_wins']
|
||||||
loss = bigdf.loc[bigdf['profit_abs'] < 0] \
|
loss = bigdf.loc[bigdf['profit_abs'] < 0] \
|
||||||
.groupby(['enter_reason']) \
|
.groupby(group_mask) \
|
||||||
.agg({'profit_abs': ['sum']})
|
.agg({'profit_abs': ['sum']})
|
||||||
loss.columns = ['profit_abs_loss']
|
loss.columns = ['profit_abs_loss']
|
||||||
|
|
||||||
new = bigdf.groupby(['enter_reason']).agg({'profit_abs': [
|
new = bigdf.groupby(group_mask).agg({'profit_abs': [
|
||||||
'count',
|
'count',
|
||||||
lambda x: sum(x > 0),
|
lambda x: sum(x > 0),
|
||||||
lambda x: sum(x <= 0)]})
|
lambda x: sum(x <= 0)]})
|
||||||
new = pd.concat([new, wins, loss], axis=1).fillna(0)
|
new = pd.concat([new, wins, loss], axis=1).fillna(0)
|
||||||
|
|
||||||
new['profit_tot'] = new['profit_abs_wins'] - abs(new['profit_abs_loss'])
|
new['profit_tot'] = new['profit_abs_wins'] - abs(new['profit_abs_loss'])
|
||||||
new['wl_ratio_pct'] = (new.iloc[:, 1] / new.iloc[:, 0] * 100).fillna(0)
|
new['wl_ratio_pct'] = (new.iloc[:, 1] / new.iloc[:, 0] * 100).fillna(0)
|
||||||
new['avg_win'] = (new['profit_abs_wins'] / new.iloc[:, 1]).fillna(0)
|
new['avg_win'] = (new['profit_abs_wins'] / new.iloc[:, 1]).fillna(0)
|
||||||
new['avg_loss'] = (new['profit_abs_loss'] / new.iloc[:, 2]).fillna(0)
|
new['avg_loss'] = (new['profit_abs_loss'] / new.iloc[:, 2]).fillna(0)
|
||||||
|
|
||||||
new.columns = ['total_num_buys', 'wins', 'losses', 'profit_abs_wins', 'profit_abs_loss',
|
new.columns = ['total_num_buys', 'wins', 'losses', 'profit_abs_wins', 'profit_abs_loss',
|
||||||
'profit_tot', 'wl_ratio_pct', 'avg_win', 'avg_loss']
|
'profit_tot', 'wl_ratio_pct', 'avg_win', 'avg_loss']
|
||||||
|
|
||||||
sortcols = ['total_num_buys']
|
sortcols = ['total_num_buys']
|
||||||
|
|
||||||
_print_table(new, sortcols, show_index=True)
|
_print_table(new, sortcols, show_index=True)
|
||||||
if "1" in glist:
|
|
||||||
new = bigdf.groupby(['enter_reason']) \
|
|
||||||
.agg({'profit_abs': ['count', 'sum', 'median', 'mean'],
|
|
||||||
'profit_ratio': ['sum', 'median', 'mean']}
|
|
||||||
).reset_index()
|
|
||||||
new.columns = ['enter_reason', 'num_buys', 'profit_abs_sum', 'profit_abs_median',
|
|
||||||
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
|
|
||||||
'total_profit_pct']
|
|
||||||
sortcols = ['profit_abs_sum', 'enter_reason']
|
|
||||||
|
|
||||||
new['median_profit_pct'] = new['median_profit_pct'] * 100
|
else:
|
||||||
new['mean_profit_pct'] = new['mean_profit_pct'] * 100
|
agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'],
|
||||||
new['total_profit_pct'] = new['total_profit_pct'] * 100
|
|
||||||
|
|
||||||
_print_table(new, sortcols)
|
|
||||||
if "2" in glist:
|
|
||||||
new = bigdf.groupby(['enter_reason', 'exit_reason']) \
|
|
||||||
.agg({'profit_abs': ['count', 'sum', 'median', 'mean'],
|
|
||||||
'profit_ratio': ['sum', 'median', 'mean']}
|
|
||||||
).reset_index()
|
|
||||||
new.columns = ['enter_reason', 'exit_reason', 'num_buys', 'profit_abs_sum',
|
|
||||||
'profit_abs_median', 'profit_abs_mean', 'median_profit_pct',
|
|
||||||
'mean_profit_pct', 'total_profit_pct']
|
|
||||||
sortcols = ['profit_abs_sum', 'enter_reason']
|
|
||||||
|
|
||||||
new['median_profit_pct'] = new['median_profit_pct'] * 100
|
|
||||||
new['mean_profit_pct'] = new['mean_profit_pct'] * 100
|
|
||||||
new['total_profit_pct'] = new['total_profit_pct'] * 100
|
|
||||||
|
|
||||||
_print_table(new, sortcols)
|
|
||||||
if "3" in glist:
|
|
||||||
new = bigdf.groupby(['pair', 'enter_reason']) \
|
|
||||||
.agg({'profit_abs': ['count', 'sum', 'median', 'mean'],
|
|
||||||
'profit_ratio': ['sum', 'median', 'mean']}
|
'profit_ratio': ['sum', 'median', 'mean']}
|
||||||
).reset_index()
|
agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median',
|
||||||
new.columns = ['pair', 'enter_reason', 'num_buys', 'profit_abs_sum',
|
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
|
||||||
'profit_abs_median', 'profit_abs_mean', 'median_profit_pct',
|
'total_profit_pct']
|
||||||
'mean_profit_pct', 'total_profit_pct']
|
sortcols = ['profit_abs_sum', 'enter_reason']
|
||||||
sortcols = ['profit_abs_sum', 'enter_reason']
|
|
||||||
|
|
||||||
new['median_profit_pct'] = new['median_profit_pct'] * 100
|
# 1: profit summaries grouped by enter_tag
|
||||||
new['mean_profit_pct'] = new['mean_profit_pct'] * 100
|
if g == "1":
|
||||||
new['total_profit_pct'] = new['total_profit_pct'] * 100
|
group_mask = ['enter_reason']
|
||||||
|
|
||||||
_print_table(new, sortcols)
|
# 2: profit summaries grouped by enter_tag and exit_tag
|
||||||
if "4" in glist:
|
if g == "2":
|
||||||
new = bigdf.groupby(['pair', 'enter_reason', 'exit_reason']) \
|
group_mask = ['enter_reason', 'exit_reason']
|
||||||
.agg({'profit_abs': ['count', 'sum', 'median', 'mean'],
|
|
||||||
'profit_ratio': ['sum', 'median', 'mean']}
|
|
||||||
).reset_index()
|
|
||||||
new.columns = ['pair', 'enter_reason', 'exit_reason', 'num_buys', 'profit_abs_sum',
|
|
||||||
'profit_abs_median', 'profit_abs_mean', 'median_profit_pct',
|
|
||||||
'mean_profit_pct', 'total_profit_pct']
|
|
||||||
sortcols = ['profit_abs_sum', 'enter_reason']
|
|
||||||
|
|
||||||
new['median_profit_pct'] = new['median_profit_pct'] * 100
|
# 3: profit summaries grouped by pair and enter_tag
|
||||||
new['mean_profit_pct'] = new['mean_profit_pct'] * 100
|
if g == "3":
|
||||||
new['total_profit_pct'] = new['total_profit_pct'] * 100
|
group_mask = ['pair', 'enter_reason']
|
||||||
|
|
||||||
_print_table(new, sortcols)
|
# 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
|
||||||
|
if g == "4":
|
||||||
|
group_mask = ['pair', 'enter_reason', 'exit_reason']
|
||||||
|
|
||||||
|
new = bigdf.groupby(group_mask).agg(agg_mask).reset_index()
|
||||||
|
new.columns = group_mask + agg_cols
|
||||||
|
new['median_profit_pct'] = new['median_profit_pct'] * 100
|
||||||
|
new['mean_profit_pct'] = new['mean_profit_pct'] * 100
|
||||||
|
new['total_profit_pct'] = new['total_profit_pct'] * 100
|
||||||
|
|
||||||
|
_print_table(new, sortcols)
|
||||||
|
|
||||||
|
|
||||||
def _print_results(analysed_trades, stratname, group,
|
def _print_results(analysed_trades, stratname, analysis_groups,
|
||||||
enter_reason_list, exit_reason_list,
|
enter_reason_list, exit_reason_list,
|
||||||
indicator_list, columns=None):
|
indicator_list, columns=None):
|
||||||
|
|
||||||
@ -191,8 +167,8 @@ def _print_results(analysed_trades, stratname, group,
|
|||||||
bigdf = pd.concat([bigdf, trades], ignore_index=True)
|
bigdf = pd.concat([bigdf, trades], ignore_index=True)
|
||||||
|
|
||||||
if bigdf.shape[0] > 0 and ('enter_reason' in bigdf.columns):
|
if bigdf.shape[0] > 0 and ('enter_reason' in bigdf.columns):
|
||||||
if group is not None:
|
if analysis_groups is not None:
|
||||||
glist = group.split(",")
|
glist = analysis_groups.split(",")
|
||||||
_do_group_table_output(bigdf, glist)
|
_do_group_table_output(bigdf, glist)
|
||||||
|
|
||||||
if enter_reason_list is not None and not enter_reason_list == "all":
|
if enter_reason_list is not None and not enter_reason_list == "all":
|
||||||
@ -244,6 +220,9 @@ def process_entry_exit_reasons(backtest_dir: Path,
|
|||||||
indicator_list: Optional[str] = None):
|
indicator_list: Optional[str] = None):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
bt_stats = load_backtest_stats(backtest_dir)
|
||||||
|
logger.info(bt_stats)
|
||||||
|
# strategy_name = bt_stats['something']
|
||||||
trades = load_backtest_data(backtest_dir, strategy_name)
|
trades = load_backtest_data(backtest_dir, strategy_name)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise OperationalException(e) from e
|
raise OperationalException(e) from e
|
||||||
|
@ -24,10 +24,10 @@ def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, tmp
|
|||||||
"exit_profit_only": False,
|
"exit_profit_only": False,
|
||||||
"exit_profit_offset": 0.0,
|
"exit_profit_offset": 0.0,
|
||||||
"ignore_roi_if_entry_signal": False,
|
"ignore_roi_if_entry_signal": False,
|
||||||
'analysis_groups': "0",
|
'analysis-groups': "0",
|
||||||
'enter_reason_list': "all",
|
'enter-reason-list': "all",
|
||||||
'exit_reason_list': "all",
|
'exit-reason-list': "all",
|
||||||
'indicator_list': "rsi"
|
'indicator-list': "rsi"
|
||||||
})
|
})
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
result1 = pd.DataFrame({'pair': ['ETH/BTC', 'LTC/BTC'],
|
result1 = pd.DataFrame({'pair': ['ETH/BTC', 'LTC/BTC'],
|
||||||
@ -94,8 +94,8 @@ def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, tmp
|
|||||||
'--config', 'config.json',
|
'--config', 'config.json',
|
||||||
'--datadir', str(testdatadir),
|
'--datadir', str(testdatadir),
|
||||||
'--user-data-dir', str(tmpdir),
|
'--user-data-dir', str(tmpdir),
|
||||||
'--analysis_groups', '0',
|
'--analysis-groups', '0',
|
||||||
'--indicator_list', 'rsi',
|
'--indicator-list', 'rsi',
|
||||||
'--strategy',
|
'--strategy',
|
||||||
'StrategyTestV3Analysis',
|
'StrategyTestV3Analysis',
|
||||||
]
|
]
|
||||||
|
Loading…
Reference in New Issue
Block a user