Merge pull request #8031 from froggleston/eea_grp5

Add a new analysis group to output stats grouped by exit_tag
This commit is contained in:
Matthias 2023-01-16 15:50:47 +01:00 committed by GitHub
commit 81eb9ebc6e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 20 additions and 3 deletions

View File

@ -32,7 +32,7 @@ To analyze the entry/exit tags, we now need to use the `freqtrade backtesting-an
with `--analysis-groups` option provided with space-separated arguments (default `0 1 2`): with `--analysis-groups` option provided with space-separated arguments (default `0 1 2`):
``` bash ``` bash
freqtrade backtesting-analysis -c <config.json> --analysis-groups 0 1 2 3 4 freqtrade backtesting-analysis -c <config.json> --analysis-groups 0 1 2 3 4 5
``` ```
This command will read from the last backtesting results. The `--analysis-groups` option is This command will read from the last backtesting results. The `--analysis-groups` option is
@ -43,6 +43,7 @@ ranging from the simplest (0) to the most detailed per pair, per buy and per sel
* 2: profit summaries grouped by enter_tag and exit_tag * 2: profit summaries grouped by enter_tag and exit_tag
* 3: profit summaries grouped by pair and enter_tag * 3: profit summaries grouped by pair and enter_tag
* 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large) * 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
* 5: profit summaries grouped by exit_tag
More options are available by running with the `-h` option. More options are available by running with the `-h` option.

View File

@ -632,10 +632,11 @@ AVAILABLE_CLI_OPTIONS = {
"1: by enter_tag, " "1: by enter_tag, "
"2: by enter_tag and exit_tag, " "2: by enter_tag and exit_tag, "
"3: by pair and enter_tag, " "3: by pair and enter_tag, "
"4: by pair, enter_ and exit_tag (this can get quite large)"), "4: by pair, enter_ and exit_tag (this can get quite large), "
"5: by exit_tag"),
nargs='+', nargs='+',
default=['0', '1', '2'], default=['0', '1', '2'],
choices=['0', '1', '2', '3', '4'], choices=['0', '1', '2', '3', '4', '5'],
), ),
"enter_reason_list": Arg( "enter_reason_list": Arg(
"--enter-reason-list", "--enter-reason-list",

View File

@ -141,6 +141,12 @@ def _do_group_table_output(bigdf, glist):
# 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large) # 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
if g == "4": if g == "4":
group_mask = ['pair', 'enter_reason', 'exit_reason'] group_mask = ['pair', 'enter_reason', 'exit_reason']
# 5: profit summaries grouped by exit_tag
if g == "5":
group_mask = ['exit_reason']
sortcols = ['exit_reason']
if group_mask: if group_mask:
new = bigdf.groupby(group_mask).agg(agg_mask).reset_index() new = bigdf.groupby(group_mask).agg(agg_mask).reset_index()
new.columns = group_mask + agg_cols new.columns = group_mask + agg_cols

View File

@ -190,6 +190,15 @@ def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, tmp
assert '1' in captured.out assert '1' in captured.out
assert '2.5' in captured.out assert '2.5' in captured.out
# test group 5
args = get_args(base_args + ['--analysis-groups', "5"])
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert 'exit_signal' in captured.out
assert 'roi' in captured.out
assert 'stop_loss' in captured.out
assert 'trailing_stop_loss' in captured.out
# test date filtering # test date filtering
args = get_args(base_args + ['--timerange', "20180129-20180130"]) args = get_args(base_args + ['--timerange', "20180129-20180130"])
start_analysis_entries_exits(args) start_analysis_entries_exits(args)