Make CLI option and docs clearer that we're handling signals not trades
This commit is contained in:
parent
854f056eaf
commit
f5359985e8
@ -116,12 +116,12 @@ For example, if your backtest timerange was `20220101-20221231` but you only wan
|
|||||||
freqtrade backtesting-analysis -c <config.json> --timerange 20220101-20220201
|
freqtrade backtesting-analysis -c <config.json> --timerange 20220101-20220201
|
||||||
```
|
```
|
||||||
|
|
||||||
### Printing out rejected trades
|
### Printing out rejected signals
|
||||||
|
|
||||||
Use the `--rejected` option to print out rejected trades.
|
Use the `--rejected-signals` option to print out rejected signals.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
freqtrade backtesting-analysis -c <config.json> --rejected
|
freqtrade backtesting-analysis -c <config.json> --rejected-signals
|
||||||
```
|
```
|
||||||
|
|
||||||
### Writing tables to CSV
|
### Writing tables to CSV
|
||||||
@ -136,11 +136,11 @@ freqtrade backtesting-analysis -c <config.json> --analysis-to-csv
|
|||||||
By default this will write one file per output table you specified in the `backtesting-analysis` command, e.g.
|
By default this will write one file per output table you specified in the `backtesting-analysis` command, e.g.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
freqtrade backtesting-analysis -c <config.json> --analysis-to-csv --rejected --analysis-groups 0 1
|
freqtrade backtesting-analysis -c <config.json> --analysis-to-csv --rejected-signals --analysis-groups 0 1
|
||||||
```
|
```
|
||||||
|
|
||||||
This will write to `user_data/backtest_results`:
|
This will write to `user_data/backtest_results`:
|
||||||
* rejected.csv
|
* rejected_signals.csv
|
||||||
* group_0.csv
|
* group_0.csv
|
||||||
* group_1.csv
|
* group_1.csv
|
||||||
|
|
||||||
|
@ -659,8 +659,8 @@ AVAILABLE_CLI_OPTIONS = {
|
|||||||
default=[],
|
default=[],
|
||||||
),
|
),
|
||||||
"analysis_rejected": Arg(
|
"analysis_rejected": Arg(
|
||||||
'--rejected',
|
'--rejected-signals',
|
||||||
help='Analyse rejected trades',
|
help='Analyse rejected signals',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
),
|
),
|
||||||
"analysis_to_csv": Arg(
|
"analysis_to_csv": Arg(
|
||||||
|
@ -466,7 +466,7 @@ class Configuration:
|
|||||||
logstring='Filter trades by timerange: {}')
|
logstring='Filter trades by timerange: {}')
|
||||||
|
|
||||||
self._args_to_config(config, argname='analysis_rejected',
|
self._args_to_config(config, argname='analysis_rejected',
|
||||||
logstring='Analyse rejected trades: {}')
|
logstring='Analyse rejected signals: {}')
|
||||||
|
|
||||||
self._args_to_config(config, argname='analysis_to_csv',
|
self._args_to_config(config, argname='analysis_to_csv',
|
||||||
logstring='Store analysis tables to CSV: {}')
|
logstring='Store analysis tables to CSV: {}')
|
||||||
|
@ -25,15 +25,15 @@ def _load_backtest_analysis_data(backtest_dir: Path, name: str):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
scp = open(scpf, "rb")
|
scp = open(scpf, "rb")
|
||||||
rejected_trades = joblib.load(scp)
|
loaded_data = joblib.load(scp)
|
||||||
logger.info(f"Loaded {name} data: {str(scpf)}")
|
logger.info(f"Loaded {name} data: {str(scpf)}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Cannot load {name} data from pickled results: ", e)
|
logger.error(f"Cannot load {name} data from pickled results: ", e)
|
||||||
|
|
||||||
return rejected_trades
|
return loaded_data
|
||||||
|
|
||||||
|
|
||||||
def _load_rejected_trades(backtest_dir: Path):
|
def _load_rejected_signals(backtest_dir: Path):
|
||||||
return _load_backtest_analysis_data(backtest_dir, "rejected")
|
return _load_backtest_analysis_data(backtest_dir, "rejected")
|
||||||
|
|
||||||
|
|
||||||
@ -163,13 +163,13 @@ def _do_group_table_output(bigdf, glist, to_csv=False, csv_path=None):
|
|||||||
logger.warning("Invalid group mask specified.")
|
logger.warning("Invalid group mask specified.")
|
||||||
|
|
||||||
|
|
||||||
def _do_rejected_trades_output(rejected_trades_df, to_csv=False, csv_path=None):
|
def _do_rejected_signals_output(rejected_signals_df, to_csv=False, csv_path=None):
|
||||||
cols = ['pair', 'date', 'enter_tag']
|
cols = ['pair', 'date', 'enter_tag']
|
||||||
sortcols = ['date', 'pair', 'enter_tag']
|
sortcols = ['date', 'pair', 'enter_tag']
|
||||||
_print_table(rejected_trades_df[cols],
|
_print_table(rejected_signals_df[cols],
|
||||||
sortcols,
|
sortcols,
|
||||||
show_index=False,
|
show_index=False,
|
||||||
name="Rejected Trades:",
|
name="Rejected Signals:",
|
||||||
to_csv=to_csv,
|
to_csv=to_csv,
|
||||||
csv_path=csv_path)
|
csv_path=csv_path)
|
||||||
|
|
||||||
@ -208,13 +208,13 @@ def prepare_results(analysed_trades, stratname,
|
|||||||
|
|
||||||
|
|
||||||
def print_results(res_df, analysis_groups, indicator_list,
|
def print_results(res_df, analysis_groups, indicator_list,
|
||||||
rejected_trades=None, to_csv=False, csv_path=None):
|
rejected_signals=None, to_csv=False, csv_path=None):
|
||||||
if res_df.shape[0] > 0:
|
if res_df.shape[0] > 0:
|
||||||
if analysis_groups:
|
if analysis_groups:
|
||||||
_do_group_table_output(res_df, analysis_groups, to_csv=to_csv, csv_path=csv_path)
|
_do_group_table_output(res_df, analysis_groups, to_csv=to_csv, csv_path=csv_path)
|
||||||
|
|
||||||
if rejected_trades is not None and not rejected_trades.empty:
|
if rejected_signals is not None and not rejected_signals.empty:
|
||||||
_do_rejected_trades_output(rejected_trades, to_csv=to_csv, csv_path=csv_path)
|
_do_rejected_signals_output(rejected_signals, to_csv=to_csv, csv_path=csv_path)
|
||||||
|
|
||||||
# NB this can be large for big dataframes!
|
# NB this can be large for big dataframes!
|
||||||
if "all" in indicator_list:
|
if "all" in indicator_list:
|
||||||
@ -291,8 +291,8 @@ def process_entry_exit_reasons(config: Config):
|
|||||||
|
|
||||||
rej_df = None
|
rej_df = None
|
||||||
if do_rejected:
|
if do_rejected:
|
||||||
rejected_trades_dict = _load_rejected_trades(config['exportfilename'])
|
rejected_signals_dict = _load_rejected_signals(config['exportfilename'])
|
||||||
rej_df = prepare_results(rejected_trades_dict, strategy_name,
|
rej_df = prepare_results(rejected_signals_dict, strategy_name,
|
||||||
enter_reason_list, exit_reason_list,
|
enter_reason_list, exit_reason_list,
|
||||||
timerange=timerange)
|
timerange=timerange)
|
||||||
|
|
||||||
@ -307,7 +307,7 @@ def process_entry_exit_reasons(config: Config):
|
|||||||
print_results(res_df,
|
print_results(res_df,
|
||||||
analysis_groups,
|
analysis_groups,
|
||||||
indicator_list,
|
indicator_list,
|
||||||
rejected_trades=rej_df,
|
rejected_signals=rej_df,
|
||||||
to_csv=to_csv,
|
to_csv=to_csv,
|
||||||
csv_path=csv_path)
|
csv_path=csv_path)
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ from freqtrade.mixins import LoggingMixin
|
|||||||
from freqtrade.optimize.backtest_caching import get_strategy_run_id
|
from freqtrade.optimize.backtest_caching import get_strategy_run_id
|
||||||
from freqtrade.optimize.bt_progress import BTProgress
|
from freqtrade.optimize.bt_progress import BTProgress
|
||||||
from freqtrade.optimize.optimize_reports import (generate_backtest_stats, show_backtest_results,
|
from freqtrade.optimize.optimize_reports import (generate_backtest_stats, show_backtest_results,
|
||||||
store_backtest_rejected_trades,
|
store_backtest_rejected_signals,
|
||||||
store_backtest_signal_candles,
|
store_backtest_signal_candles,
|
||||||
store_backtest_stats)
|
store_backtest_stats)
|
||||||
from freqtrade.persistence import LocalTrade, Order, PairLocks, Trade
|
from freqtrade.persistence import LocalTrade, Order, PairLocks, Trade
|
||||||
@ -1053,7 +1053,7 @@ class Backtesting:
|
|||||||
|
|
||||||
def _collate_rejected(self, pair, row):
|
def _collate_rejected(self, pair, row):
|
||||||
"""
|
"""
|
||||||
Temporarily store rejected trade information for downstream use in backtesting_analysis
|
Temporarily store rejected signal information for downstream use in backtesting_analysis
|
||||||
"""
|
"""
|
||||||
# It could be fun to enable hyperopt mode to write
|
# It could be fun to enable hyperopt mode to write
|
||||||
# a loss function to reduce rejected signals
|
# a loss function to reduce rejected signals
|
||||||
@ -1283,7 +1283,7 @@ class Backtesting:
|
|||||||
if (self.config.get('export', 'none') == 'signals' and
|
if (self.config.get('export', 'none') == 'signals' and
|
||||||
self.dataprovider.runmode == RunMode.BACKTEST):
|
self.dataprovider.runmode == RunMode.BACKTEST):
|
||||||
self._generate_trade_signal_candles(preprocessed_tmp, results)
|
self._generate_trade_signal_candles(preprocessed_tmp, results)
|
||||||
self._generate_rejected_trades(preprocessed_tmp, self.rejected_dict)
|
self._generate_rejected_signals(preprocessed_tmp, self.rejected_dict)
|
||||||
|
|
||||||
return min_date, max_date
|
return min_date, max_date
|
||||||
|
|
||||||
@ -1308,22 +1308,22 @@ class Backtesting:
|
|||||||
|
|
||||||
self.processed_dfs[self.strategy.get_strategy_name()] = signal_candles_only
|
self.processed_dfs[self.strategy.get_strategy_name()] = signal_candles_only
|
||||||
|
|
||||||
def _generate_rejected_trades(self, preprocessed_df, rejected_dict):
|
def _generate_rejected_signals(self, preprocessed_df, rejected_dict):
|
||||||
rejected_candles_only = {}
|
rejected_candles_only = {}
|
||||||
for pair, trades in rejected_dict.items():
|
for pair, signals in rejected_dict.items():
|
||||||
rejected_trades_only_df = DataFrame()
|
rejected_signals_only_df = DataFrame()
|
||||||
pairdf = preprocessed_df[pair]
|
pairdf = preprocessed_df[pair]
|
||||||
|
|
||||||
for t in trades:
|
for t in signals:
|
||||||
data_df_row = pairdf.loc[(pairdf['date'] == t[0])].copy()
|
data_df_row = pairdf.loc[(pairdf['date'] == t[0])].copy()
|
||||||
data_df_row['pair'] = pair
|
data_df_row['pair'] = pair
|
||||||
data_df_row['enter_tag'] = t[1]
|
data_df_row['enter_tag'] = t[1]
|
||||||
|
|
||||||
rejected_trades_only_df = pd.concat([
|
rejected_signals_only_df = pd.concat([
|
||||||
rejected_trades_only_df.infer_objects(),
|
rejected_signals_only_df.infer_objects(),
|
||||||
data_df_row.infer_objects()])
|
data_df_row.infer_objects()])
|
||||||
|
|
||||||
rejected_candles_only[pair] = rejected_trades_only_df
|
rejected_candles_only[pair] = rejected_signals_only_df
|
||||||
|
|
||||||
self.rejected_df[self.strategy.get_strategy_name()] = rejected_candles_only
|
self.rejected_df[self.strategy.get_strategy_name()] = rejected_candles_only
|
||||||
|
|
||||||
@ -1392,7 +1392,7 @@ class Backtesting:
|
|||||||
store_backtest_signal_candles(
|
store_backtest_signal_candles(
|
||||||
self.config['exportfilename'], self.processed_dfs, dt_appendix)
|
self.config['exportfilename'], self.processed_dfs, dt_appendix)
|
||||||
|
|
||||||
store_backtest_rejected_trades(
|
store_backtest_rejected_signals(
|
||||||
self.config['exportfilename'], self.rejected_df, dt_appendix)
|
self.config['exportfilename'], self.rejected_df, dt_appendix)
|
||||||
|
|
||||||
# Results may be mixed up now. Sort them so they follow --strategy-list order.
|
# Results may be mixed up now. Sort them so they follow --strategy-list order.
|
||||||
|
@ -75,7 +75,7 @@ def store_backtest_signal_candles(
|
|||||||
return _store_backtest_analysis_data(Path(recordfilename), candles, dtappendix, "signals")
|
return _store_backtest_analysis_data(Path(recordfilename), candles, dtappendix, "signals")
|
||||||
|
|
||||||
|
|
||||||
def store_backtest_rejected_trades(
|
def store_backtest_rejected_signals(
|
||||||
recordfilename: Path, trades: Dict[str, Dict], dtappendix: str) -> Path:
|
recordfilename: Path, trades: Dict[str, Dict], dtappendix: str) -> Path:
|
||||||
return _store_backtest_analysis_data(Path(recordfilename), trades, dtappendix, "rejected")
|
return _store_backtest_analysis_data(Path(recordfilename), trades, dtappendix, "rejected")
|
||||||
|
|
||||||
|
@ -201,8 +201,8 @@ def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, tmp
|
|||||||
assert 'enter_tag_long_b' not in captured.out
|
assert 'enter_tag_long_b' not in captured.out
|
||||||
|
|
||||||
# test rejected - how to mock this?
|
# test rejected - how to mock this?
|
||||||
# args = get_args(base_args + ['--rejected'])
|
# args = get_args(base_args + ['--rejected-signals'])
|
||||||
# start_analysis_entries_exits(args)
|
# start_analysis_entries_exits(args)
|
||||||
# captured = capsys.readouterr()
|
# captured = capsys.readouterr()
|
||||||
# assert 'Rejected Trades:' in captured.out
|
# assert 'Rejected Signals:' in captured.out
|
||||||
# assert False
|
# assert False
|
||||||
|
Loading…
Reference in New Issue
Block a user