Replace separate start/end date option with usual timerange option
This commit is contained in:
parent
706bc9ebea
commit
1a3f88c7b9
@ -103,15 +103,14 @@ output.
|
|||||||
|
|
||||||
### Filtering the trade output by date
|
### Filtering the trade output by date
|
||||||
|
|
||||||
To show only trades between dates within your backtested timerange, supply the following option(s) in YYYYMMDD format:
|
To show only trades between dates within your backtested timerange, supply the usual `timerange` option in `YYYYMMDD-[YYYYMMDD]` format:
|
||||||
|
|
||||||
```
|
```
|
||||||
--analysis-date-start : Start date to filter output trades, inclusive. e.g. 20220101
|
--timerange : Timerange to filter output trades, start date inclusive, end date exclusive. e.g. 20220101-20221231
|
||||||
--analysis-date-end : End date to filter output trades, exclusive. e.g. 20220131
|
|
||||||
```
|
```
|
||||||
|
|
||||||
For example, if your backtest timerange was `20220101-20221231` but you only want to output trades in January:
|
For example, if your backtest timerange was `20220101-20221231` but you only want to output trades in January:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
freqtrade backtesting-analysis -c <config.json> --analysis-date-start 20220101 --analysis-date-end 20220201
|
freqtrade backtesting-analysis -c <config.json> --timerange 20220101-20220201
|
||||||
```
|
```
|
||||||
|
@ -106,8 +106,7 @@ ARGS_HYPEROPT_SHOW = ["hyperopt_list_best", "hyperopt_list_profitable", "hyperop
|
|||||||
"disableparamexport", "backtest_breakdown"]
|
"disableparamexport", "backtest_breakdown"]
|
||||||
|
|
||||||
ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason_list",
|
ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason_list",
|
||||||
"exit_reason_list", "indicator_list",
|
"exit_reason_list", "indicator_list", "timerange"]
|
||||||
"analysis_date_start", "analysis_date_end"]
|
|
||||||
|
|
||||||
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
||||||
"list-markets", "list-pairs", "list-strategies", "list-freqaimodels",
|
"list-markets", "list-pairs", "list-strategies", "list-freqaimodels",
|
||||||
|
@ -658,16 +658,6 @@ AVAILABLE_CLI_OPTIONS = {
|
|||||||
nargs='+',
|
nargs='+',
|
||||||
default=[],
|
default=[],
|
||||||
),
|
),
|
||||||
"analysis_date_start": Arg(
|
|
||||||
"--analysis-date-start",
|
|
||||||
help=("Start date to filter trades for analysis (inclusive). "
|
|
||||||
"e.g. '20220101'"),
|
|
||||||
),
|
|
||||||
"analysis_date_end": Arg(
|
|
||||||
"--analysis-date-end",
|
|
||||||
help=("End date to filter trades for analysis (exclusive). "
|
|
||||||
"e.g. '20220131'"),
|
|
||||||
),
|
|
||||||
"freqaimodel": Arg(
|
"freqaimodel": Arg(
|
||||||
'--freqaimodel',
|
'--freqaimodel',
|
||||||
help='Specify a custom freqaimodels.',
|
help='Specify a custom freqaimodels.',
|
||||||
|
@ -462,11 +462,8 @@ class Configuration:
|
|||||||
self._args_to_config(config, argname='indicator_list',
|
self._args_to_config(config, argname='indicator_list',
|
||||||
logstring='Analysis indicator list: {}')
|
logstring='Analysis indicator list: {}')
|
||||||
|
|
||||||
self._args_to_config(config, argname='analysis_date_start',
|
self._args_to_config(config, argname='timerange',
|
||||||
logstring='Analysis filter start date: {}')
|
logstring='Filter trades by timerange: {}')
|
||||||
|
|
||||||
self._args_to_config(config, argname='analysis_date_end',
|
|
||||||
logstring='Analysis filter end date: {}')
|
|
||||||
|
|
||||||
def _process_runmode(self, config: Config) -> None:
|
def _process_runmode(self, config: Config) -> None:
|
||||||
|
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import joblib
|
import joblib
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from tabulate import tabulate
|
from tabulate import tabulate
|
||||||
|
|
||||||
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
from freqtrade.data.btanalysis import (get_latest_backtest_filename, load_backtest_data,
|
from freqtrade.data.btanalysis import (get_latest_backtest_filename, load_backtest_data,
|
||||||
load_backtest_stats)
|
load_backtest_stats)
|
||||||
@ -153,22 +153,12 @@ def _do_group_table_output(bigdf, glist):
|
|||||||
logger.warning("Invalid group mask specified.")
|
logger.warning("Invalid group mask specified.")
|
||||||
|
|
||||||
|
|
||||||
def _select_rows_within_dates(df, date_start=None, date_end=None):
|
def _select_rows_within_dates(df, timerange=None, df_date_col: str = 'date'):
|
||||||
dtfmt = "%Y%m%d"
|
if timerange:
|
||||||
try:
|
if timerange.starttype == 'date':
|
||||||
bool(datetime.strptime(date_start, dtfmt))
|
df = df.loc[(df[df_date_col] >= timerange.startdt)]
|
||||||
bool(datetime.strptime(date_end, dtfmt))
|
if timerange.stoptype == 'date':
|
||||||
except ValueError:
|
df = df.loc[(df[df_date_col] < timerange.stopdt)]
|
||||||
logger.error("Invalid start and/or end date provided. Use YYYYMMDD.")
|
|
||||||
return None
|
|
||||||
except TypeError:
|
|
||||||
return df
|
|
||||||
|
|
||||||
if (date_start is not None):
|
|
||||||
df = df.loc[(df['date'] >= date_start)]
|
|
||||||
|
|
||||||
if (date_end is not None):
|
|
||||||
df = df.loc[(df['date'] < date_end)]
|
|
||||||
return df
|
return df
|
||||||
|
|
||||||
|
|
||||||
@ -183,12 +173,12 @@ def _select_rows_by_tags(df, enter_reason_list, exit_reason_list):
|
|||||||
|
|
||||||
def prepare_results(analysed_trades, stratname,
|
def prepare_results(analysed_trades, stratname,
|
||||||
enter_reason_list, exit_reason_list,
|
enter_reason_list, exit_reason_list,
|
||||||
date_start=None, date_end=None):
|
timerange=None):
|
||||||
res_df = pd.DataFrame()
|
res_df = pd.DataFrame()
|
||||||
for pair, trades in analysed_trades[stratname].items():
|
for pair, trades in analysed_trades[stratname].items():
|
||||||
res_df = pd.concat([res_df, trades], ignore_index=True)
|
res_df = pd.concat([res_df, trades], ignore_index=True)
|
||||||
|
|
||||||
res_df = _select_rows_within_dates(res_df, date_start, date_end)
|
res_df = _select_rows_within_dates(res_df, timerange)
|
||||||
|
|
||||||
if res_df is not None and res_df.shape[0] > 0 and ('enter_reason' in res_df.columns):
|
if res_df is not None and res_df.shape[0] > 0 and ('enter_reason' in res_df.columns):
|
||||||
res_df = _select_rows_by_tags(res_df, enter_reason_list, exit_reason_list)
|
res_df = _select_rows_by_tags(res_df, enter_reason_list, exit_reason_list)
|
||||||
@ -236,8 +226,9 @@ def process_entry_exit_reasons(config: Config):
|
|||||||
enter_reason_list = config.get('enter_reason_list', ["all"])
|
enter_reason_list = config.get('enter_reason_list', ["all"])
|
||||||
exit_reason_list = config.get('exit_reason_list', ["all"])
|
exit_reason_list = config.get('exit_reason_list', ["all"])
|
||||||
indicator_list = config.get('indicator_list', [])
|
indicator_list = config.get('indicator_list', [])
|
||||||
analysis_date_start = config.get('analysis_date_start', None)
|
|
||||||
analysis_date_end = config.get('analysis_date_end', None)
|
timerange = TimeRange.parse_timerange(None if config.get(
|
||||||
|
'timerange') is None else str(config.get('timerange')))
|
||||||
|
|
||||||
backtest_stats = load_backtest_stats(config['exportfilename'])
|
backtest_stats = load_backtest_stats(config['exportfilename'])
|
||||||
|
|
||||||
@ -252,8 +243,7 @@ def process_entry_exit_reasons(config: Config):
|
|||||||
|
|
||||||
res_df = prepare_results(analysed_trades_dict, strategy_name,
|
res_df = prepare_results(analysed_trades_dict, strategy_name,
|
||||||
enter_reason_list, exit_reason_list,
|
enter_reason_list, exit_reason_list,
|
||||||
date_start=analysis_date_start,
|
timerange=timerange)
|
||||||
date_end=analysis_date_end)
|
|
||||||
|
|
||||||
print_results(res_df,
|
print_results(res_df,
|
||||||
analysis_groups,
|
analysis_groups,
|
||||||
|
@ -191,10 +191,8 @@ def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, tmp
|
|||||||
assert '2.5' in captured.out
|
assert '2.5' in captured.out
|
||||||
|
|
||||||
# test date filtering
|
# test date filtering
|
||||||
args = get_args(base_args +
|
args = get_args(base_args + ['--timerange', "20180129-20180130"])
|
||||||
['--analysis-date-start', "20180129",
|
|
||||||
'--analysis-date-end', "20180130"]
|
|
||||||
)
|
|
||||||
start_analysis_entries_exits(args)
|
start_analysis_entries_exits(args)
|
||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
|
assert 'enter_tag_long_a' in captured.out
|
||||||
assert 'enter_tag_long_b' not in captured.out
|
assert 'enter_tag_long_b' not in captured.out
|
||||||
|
Loading…
Reference in New Issue
Block a user