Merge pull request #2451 from freqtrade/bt_analysis_maxopen

Bt analysis maxopen at any time
This commit is contained in:
hroff-1902 2019-11-01 00:13:31 +03:00 committed by GitHub
commit f435384bf0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 83 additions and 16 deletions

View File

@ -107,6 +107,22 @@ trades = load_trades_from_db("sqlite:///tradesv3.sqlite")
trades.groupby("pair")["sell_reason"].value_counts() trades.groupby("pair")["sell_reason"].value_counts()
``` ```
## Analyze the loaded trades for trade parallelism
This can be useful to find the best `max_open_trades` parameter, when used with backtesting in conjunction with `--disable-max-market-positions`.
`analyze_trade_parallelism()` returns a timeseries dataframe with an "open_trades" column, specifying the number of open trades for each candle.
```python
from freqtrade.data.btanalysis import analyze_trade_parallelism
# Analyze the above
parallel_trades = analyze_trade_parallelism(trades, '5m')
parallel_trades.plot()
```
## Plot results ## Plot results
Freqtrade offers interactive plotting capabilities based on plotly. Freqtrade offers interactive plotting capabilities based on plotly.

View File

@ -52,16 +52,18 @@ def load_backtest_data(filename) -> pd.DataFrame:
return df return df
def evaluate_result_multi(results: pd.DataFrame, freq: str, max_open_trades: int) -> pd.DataFrame: def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataFrame:
""" """
Find overlapping trades by expanding each trade once per period it was open Find overlapping trades by expanding each trade once per period it was open
and then counting overlaps and then counting overlaps.
:param results: Results Dataframe - can be loaded :param results: Results Dataframe - can be loaded
:param freq: Frequency used for the backtest :param timeframe: Timeframe used for backtest
:param max_open_trades: parameter max_open_trades used during backtest run :return: dataframe with open-counts per time-period in timeframe
:return: dataframe with open-counts per time-period in freq
""" """
dates = [pd.Series(pd.date_range(row[1].open_time, row[1].close_time, freq=freq)) from freqtrade.exchange import timeframe_to_minutes
timeframe_min = timeframe_to_minutes(timeframe)
dates = [pd.Series(pd.date_range(row[1].open_time, row[1].close_time,
freq=f"{timeframe_min}min"))
for row in results[['open_time', 'close_time']].iterrows()] for row in results[['open_time', 'close_time']].iterrows()]
deltas = [len(x) for x in dates] deltas = [len(x) for x in dates]
dates = pd.Series(pd.concat(dates).values, name='date') dates = pd.Series(pd.concat(dates).values, name='date')
@ -69,8 +71,23 @@ def evaluate_result_multi(results: pd.DataFrame, freq: str, max_open_trades: int
df2 = pd.concat([dates, df2], axis=1) df2 = pd.concat([dates, df2], axis=1)
df2 = df2.set_index('date') df2 = df2.set_index('date')
df_final = df2.resample(freq)[['pair']].count() df_final = df2.resample(f"{timeframe_min}min")[['pair']].count()
return df_final[df_final['pair'] > max_open_trades] df_final = df_final.rename({'pair': 'open_trades'}, axis=1)
return df_final
def evaluate_result_multi(results: pd.DataFrame, timeframe: str,
max_open_trades: int) -> pd.DataFrame:
"""
Find overlapping trades by expanding each trade once per period it was open
and then counting overlaps
:param results: Results Dataframe - can be loaded
:param timeframe: Frequency used for the backtest
:param max_open_trades: parameter max_open_trades used during backtest run
:return: dataframe with open-counts per time-period in freq
"""
df_final = analyze_trade_parallelism(results, timeframe)
return df_final[df_final['open_trades'] > max_open_trades]
def load_trades_from_db(db_url: str) -> pd.DataFrame: def load_trades_from_db(db_url: str) -> pd.DataFrame:

View File

@ -10,7 +10,7 @@ from freqtrade.data.btanalysis import (BT_DATA_COLUMNS,
create_cum_profit, create_cum_profit,
extract_trades_of_period, extract_trades_of_period,
load_backtest_data, load_trades, load_backtest_data, load_trades,
load_trades_from_db) load_trades_from_db, analyze_trade_parallelism)
from freqtrade.data.history import load_data, load_pair_history from freqtrade.data.history import load_data, load_pair_history
from tests.test_persistence import create_mock_trades from tests.test_persistence import create_mock_trades
@ -32,7 +32,7 @@ def test_load_backtest_data(testdatadir):
@pytest.mark.usefixtures("init_persistence") @pytest.mark.usefixtures("init_persistence")
def test_load_trades_db(default_conf, fee, mocker): def test_load_trades_from_db(default_conf, fee, mocker):
create_mock_trades(fee) create_mock_trades(fee)
# remove init so it does not init again # remove init so it does not init again
@ -84,6 +84,17 @@ def test_extract_trades_of_period(testdatadir):
assert trades1.iloc[-1].close_time == Arrow(2017, 11, 14, 15, 25, 0).datetime assert trades1.iloc[-1].close_time == Arrow(2017, 11, 14, 15, 25, 0).datetime
def test_analyze_trade_parallelism(default_conf, mocker, testdatadir):
filename = testdatadir / "backtest-result_test.json"
bt_data = load_backtest_data(filename)
res = analyze_trade_parallelism(bt_data, "5m")
assert isinstance(res, DataFrame)
assert 'open_trades' in res.columns
assert res['open_trades'].max() == 3
assert res['open_trades'].min() == 0
def test_load_trades(default_conf, mocker): def test_load_trades(default_conf, mocker):
db_mock = mocker.patch("freqtrade.data.btanalysis.load_trades_from_db", MagicMock()) db_mock = mocker.patch("freqtrade.data.btanalysis.load_trades_from_db", MagicMock())
bt_mock = mocker.patch("freqtrade.data.btanalysis.load_backtest_data", MagicMock()) bt_mock = mocker.patch("freqtrade.data.btanalysis.load_backtest_data", MagicMock())

View File

@ -714,9 +714,9 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir)
results = backtesting.backtest(backtest_conf) results = backtesting.backtest(backtest_conf)
# Make sure we have parallel trades # Make sure we have parallel trades
assert len(evaluate_result_multi(results, '5min', 2)) > 0 assert len(evaluate_result_multi(results, '5m', 2)) > 0
# make sure we don't have trades with more than configured max_open_trades # make sure we don't have trades with more than configured max_open_trades
assert len(evaluate_result_multi(results, '5min', 3)) == 0 assert len(evaluate_result_multi(results, '5m', 3)) == 0
backtest_conf = { backtest_conf = {
'stake_amount': default_conf['stake_amount'], 'stake_amount': default_conf['stake_amount'],
@ -727,7 +727,7 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir)
'end_date': max_date, 'end_date': max_date,
} }
results = backtesting.backtest(backtest_conf) results = backtesting.backtest(backtest_conf)
assert len(evaluate_result_multi(results, '5min', 1)) == 0 assert len(evaluate_result_multi(results, '5m', 1)) == 0
def test_backtest_record(default_conf, fee, mocker): def test_backtest_record(default_conf, fee, mocker):

View File

@ -68,9 +68,7 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"metadata": { "metadata": {},
"scrolled": true
},
"outputs": [], "outputs": [],
"source": [ "source": [
"# Load strategy using values set above\n", "# Load strategy using values set above\n",
@ -169,6 +167,31 @@
"trades.groupby(\"pair\")[\"sell_reason\"].value_counts()" "trades.groupby(\"pair\")[\"sell_reason\"].value_counts()"
] ]
}, },
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Analyze the loaded trades for trade parallelism\n",
"This can be useful to find the best `max_open_trades` parameter, when used with backtesting in conjunction with `--disable-max-market-positions`.\n",
"\n",
"`analyze_trade_parallelism()` returns a timeseries dataframe with an \"open_trades\" column, specifying the number of open trades for each candle."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from freqtrade.data.btanalysis import analyze_trade_parallelism\n",
"\n",
"# Analyze the above\n",
"parallel_trades = analyze_trade_parallelism(trades, '5m')\n",
"\n",
"\n",
"parallel_trades.plot()"
]
},
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},