2020-03-15 08:39:45 +00:00
|
|
|
from pathlib import Path
|
2019-06-16 08:57:21 +00:00
|
|
|
from unittest.mock import MagicMock
|
2019-03-16 16:50:57 +00:00
|
|
|
|
2019-06-16 09:12:19 +00:00
|
|
|
import pytest
|
2019-06-29 15:19:42 +00:00
|
|
|
from arrow import Arrow
|
2020-03-15 08:39:45 +00:00
|
|
|
from pandas import DataFrame, DateOffset, Timestamp, to_datetime
|
2019-06-16 09:12:19 +00:00
|
|
|
|
2019-08-14 08:07:32 +00:00
|
|
|
from freqtrade.configuration import TimeRange
|
2019-06-30 08:04:43 +00:00
|
|
|
from freqtrade.data.btanalysis import (BT_DATA_COLUMNS,
|
2020-03-03 06:20:41 +00:00
|
|
|
analyze_trade_parallelism,
|
|
|
|
calculate_max_drawdown,
|
2020-03-08 10:35:31 +00:00
|
|
|
combine_dataframes_with_mean,
|
2019-06-30 08:04:43 +00:00
|
|
|
create_cum_profit,
|
2019-06-16 09:12:19 +00:00
|
|
|
extract_trades_of_period,
|
2019-06-29 18:50:31 +00:00
|
|
|
load_backtest_data, load_trades,
|
2020-03-03 06:20:41 +00:00
|
|
|
load_trades_from_db)
|
2019-09-07 18:34:25 +00:00
|
|
|
from freqtrade.data.history import load_data, load_pair_history
|
2020-04-07 17:42:16 +00:00
|
|
|
from tests.conftest import create_mock_trades
|
2019-03-16 16:50:57 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_load_backtest_data(testdatadir):
|
2019-03-16 16:50:57 +00:00
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
filename = testdatadir / "backtest-result_test.json"
|
2019-03-16 16:50:57 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
assert isinstance(bt_data, DataFrame)
|
2020-01-02 08:51:47 +00:00
|
|
|
assert list(bt_data.columns) == BT_DATA_COLUMNS + ["profit"]
|
2019-03-16 16:50:57 +00:00
|
|
|
assert len(bt_data) == 179
|
|
|
|
|
|
|
|
# Test loading from string (must yield same result)
|
|
|
|
bt_data2 = load_backtest_data(str(filename))
|
|
|
|
assert bt_data.equals(bt_data2)
|
|
|
|
|
|
|
|
with pytest.raises(ValueError, match=r"File .* does not exist\."):
|
|
|
|
load_backtest_data(str("filename") + "nofile")
|
2019-06-16 08:57:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("init_persistence")
|
2019-10-30 13:07:23 +00:00
|
|
|
def test_load_trades_from_db(default_conf, fee, mocker):
|
2019-06-16 08:57:21 +00:00
|
|
|
|
|
|
|
create_mock_trades(fee)
|
|
|
|
# remove init so it does not init again
|
|
|
|
init_mock = mocker.patch('freqtrade.persistence.init', MagicMock())
|
|
|
|
|
2019-06-22 14:20:41 +00:00
|
|
|
trades = load_trades_from_db(db_url=default_conf['db_url'])
|
2019-06-16 08:57:21 +00:00
|
|
|
assert init_mock.call_count == 1
|
|
|
|
assert len(trades) == 3
|
|
|
|
assert isinstance(trades, DataFrame)
|
|
|
|
assert "pair" in trades.columns
|
|
|
|
assert "open_time" in trades.columns
|
2019-08-03 17:55:54 +00:00
|
|
|
assert "profitperc" in trades.columns
|
|
|
|
|
|
|
|
for col in BT_DATA_COLUMNS:
|
|
|
|
if col not in ['index', 'open_at_end']:
|
|
|
|
assert col in trades.columns
|
2019-06-16 09:12:19 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_extract_trades_of_period(testdatadir):
|
2019-06-16 09:12:19 +00:00
|
|
|
pair = "UNITTEST/BTC"
|
2019-10-19 12:53:56 +00:00
|
|
|
# 2018-11-14 06:07:00
|
|
|
|
timerange = TimeRange('date', None, 1510639620, 0)
|
2019-06-16 09:12:19 +00:00
|
|
|
|
2019-11-02 19:19:13 +00:00
|
|
|
data = load_pair_history(pair=pair, timeframe='1m',
|
2019-09-07 18:56:03 +00:00
|
|
|
datadir=testdatadir, timerange=timerange)
|
2019-06-16 09:12:19 +00:00
|
|
|
|
|
|
|
trades = DataFrame(
|
|
|
|
{'pair': [pair, pair, pair, pair],
|
|
|
|
'profit_percent': [0.0, 0.1, -0.2, -0.5],
|
|
|
|
'profit_abs': [0.0, 1, -2, -5],
|
|
|
|
'open_time': to_datetime([Arrow(2017, 11, 13, 15, 40, 0).datetime,
|
|
|
|
Arrow(2017, 11, 14, 9, 41, 0).datetime,
|
|
|
|
Arrow(2017, 11, 14, 14, 20, 0).datetime,
|
|
|
|
Arrow(2017, 11, 15, 3, 40, 0).datetime,
|
|
|
|
], utc=True
|
|
|
|
),
|
|
|
|
'close_time': to_datetime([Arrow(2017, 11, 13, 16, 40, 0).datetime,
|
|
|
|
Arrow(2017, 11, 14, 10, 41, 0).datetime,
|
|
|
|
Arrow(2017, 11, 14, 15, 25, 0).datetime,
|
|
|
|
Arrow(2017, 11, 15, 3, 55, 0).datetime,
|
|
|
|
], utc=True)
|
|
|
|
})
|
|
|
|
trades1 = extract_trades_of_period(data, trades)
|
|
|
|
# First and last trade are dropped as they are out of range
|
|
|
|
assert len(trades1) == 2
|
|
|
|
assert trades1.iloc[0].open_time == Arrow(2017, 11, 14, 9, 41, 0).datetime
|
|
|
|
assert trades1.iloc[0].close_time == Arrow(2017, 11, 14, 10, 41, 0).datetime
|
|
|
|
assert trades1.iloc[-1].open_time == Arrow(2017, 11, 14, 14, 20, 0).datetime
|
|
|
|
assert trades1.iloc[-1].close_time == Arrow(2017, 11, 14, 15, 25, 0).datetime
|
2019-06-29 15:19:42 +00:00
|
|
|
|
|
|
|
|
2019-10-30 13:07:23 +00:00
|
|
|
def test_analyze_trade_parallelism(default_conf, mocker, testdatadir):
|
|
|
|
filename = testdatadir / "backtest-result_test.json"
|
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
|
|
|
|
res = analyze_trade_parallelism(bt_data, "5m")
|
|
|
|
assert isinstance(res, DataFrame)
|
|
|
|
assert 'open_trades' in res.columns
|
|
|
|
assert res['open_trades'].max() == 3
|
|
|
|
assert res['open_trades'].min() == 0
|
|
|
|
|
|
|
|
|
2019-06-29 18:50:31 +00:00
|
|
|
def test_load_trades(default_conf, mocker):
|
|
|
|
db_mock = mocker.patch("freqtrade.data.btanalysis.load_trades_from_db", MagicMock())
|
|
|
|
bt_mock = mocker.patch("freqtrade.data.btanalysis.load_backtest_data", MagicMock())
|
|
|
|
|
2019-08-22 18:17:36 +00:00
|
|
|
load_trades("DB",
|
|
|
|
db_url=default_conf.get('db_url'),
|
|
|
|
exportfilename=default_conf.get('exportfilename'),
|
2020-03-15 20:20:32 +00:00
|
|
|
no_trades=False
|
2019-08-22 18:17:36 +00:00
|
|
|
)
|
2019-06-29 18:50:31 +00:00
|
|
|
|
|
|
|
assert db_mock.call_count == 1
|
|
|
|
assert bt_mock.call_count == 0
|
|
|
|
|
|
|
|
db_mock.reset_mock()
|
|
|
|
bt_mock.reset_mock()
|
2020-03-15 08:39:45 +00:00
|
|
|
default_conf['exportfilename'] = Path("testfile.json")
|
2019-08-22 18:17:36 +00:00
|
|
|
load_trades("file",
|
|
|
|
db_url=default_conf.get('db_url'),
|
2020-03-14 21:15:03 +00:00
|
|
|
exportfilename=default_conf.get('exportfilename'),
|
|
|
|
)
|
2019-06-29 18:50:31 +00:00
|
|
|
|
|
|
|
assert db_mock.call_count == 0
|
|
|
|
assert bt_mock.call_count == 1
|
|
|
|
|
2020-03-14 23:09:08 +00:00
|
|
|
db_mock.reset_mock()
|
|
|
|
bt_mock.reset_mock()
|
|
|
|
default_conf['exportfilename'] = "testfile.json"
|
|
|
|
load_trades("file",
|
|
|
|
db_url=default_conf.get('db_url'),
|
|
|
|
exportfilename=default_conf.get('exportfilename'),
|
2020-03-15 20:20:32 +00:00
|
|
|
no_trades=True
|
2020-03-14 23:09:08 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
assert db_mock.call_count == 0
|
|
|
|
assert bt_mock.call_count == 0
|
|
|
|
|
2019-06-29 18:50:31 +00:00
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
def test_combine_dataframes_with_mean(testdatadir):
|
2019-10-02 08:59:45 +00:00
|
|
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
2020-03-08 10:35:31 +00:00
|
|
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='5m')
|
|
|
|
df = combine_dataframes_with_mean(data)
|
2019-06-30 08:04:43 +00:00
|
|
|
assert isinstance(df, DataFrame)
|
|
|
|
assert "ETH/BTC" in df.columns
|
2019-10-02 08:59:45 +00:00
|
|
|
assert "ADA/BTC" in df.columns
|
2019-06-30 08:04:43 +00:00
|
|
|
assert "mean" in df.columns
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_create_cum_profit(testdatadir):
|
|
|
|
filename = testdatadir / "backtest-result_test.json"
|
2019-06-29 15:19:42 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
2019-08-14 08:07:32 +00:00
|
|
|
timerange = TimeRange.parse_timerange("20180110-20180112")
|
2019-06-29 15:19:42 +00:00
|
|
|
|
2019-11-02 19:19:13 +00:00
|
|
|
df = load_pair_history(pair="TRX/BTC", timeframe='5m',
|
2019-09-07 18:56:03 +00:00
|
|
|
datadir=testdatadir, timerange=timerange)
|
2019-06-29 15:19:42 +00:00
|
|
|
|
|
|
|
cum_profits = create_cum_profit(df.set_index('date'),
|
2019-10-30 08:20:56 +00:00
|
|
|
bt_data[bt_data["pair"] == 'TRX/BTC'],
|
2019-10-28 13:24:12 +00:00
|
|
|
"cum_profits", timeframe="5m")
|
2019-06-29 15:19:42 +00:00
|
|
|
assert "cum_profits" in cum_profits.columns
|
|
|
|
assert cum_profits.iloc[0]['cum_profits'] == 0
|
|
|
|
assert cum_profits.iloc[-1]['cum_profits'] == 0.0798005
|
2019-10-28 13:30:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_create_cum_profit1(testdatadir):
|
|
|
|
filename = testdatadir / "backtest-result_test.json"
|
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
# Move close-time to "off" the candle, to make sure the logic still works
|
|
|
|
bt_data.loc[:, 'close_time'] = bt_data.loc[:, 'close_time'] + DateOffset(seconds=20)
|
|
|
|
timerange = TimeRange.parse_timerange("20180110-20180112")
|
|
|
|
|
2019-11-02 19:19:13 +00:00
|
|
|
df = load_pair_history(pair="TRX/BTC", timeframe='5m',
|
2019-10-28 13:30:01 +00:00
|
|
|
datadir=testdatadir, timerange=timerange)
|
|
|
|
|
|
|
|
cum_profits = create_cum_profit(df.set_index('date'),
|
2019-10-30 08:20:56 +00:00
|
|
|
bt_data[bt_data["pair"] == 'TRX/BTC'],
|
2019-10-28 13:30:01 +00:00
|
|
|
"cum_profits", timeframe="5m")
|
|
|
|
assert "cum_profits" in cum_profits.columns
|
|
|
|
assert cum_profits.iloc[0]['cum_profits'] == 0
|
|
|
|
assert cum_profits.iloc[-1]['cum_profits'] == 0.0798005
|
2020-03-03 06:20:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_calculate_max_drawdown(testdatadir):
|
|
|
|
filename = testdatadir / "backtest-result_test.json"
|
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
drawdown, h, low = calculate_max_drawdown(bt_data)
|
|
|
|
assert isinstance(drawdown, float)
|
|
|
|
assert pytest.approx(drawdown) == 0.21142322
|
|
|
|
assert isinstance(h, Timestamp)
|
|
|
|
assert isinstance(low, Timestamp)
|
|
|
|
assert h == Timestamp('2018-01-24 14:25:00', tz='UTC')
|
|
|
|
assert low == Timestamp('2018-01-30 04:45:00', tz='UTC')
|
|
|
|
with pytest.raises(ValueError, match='Trade dataframe empty.'):
|
|
|
|
drawdown, h, low = calculate_max_drawdown(DataFrame())
|
2020-04-05 12:29:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_calculate_max_drawdown2():
|
|
|
|
values = [0.011580, 0.010048, 0.011340, 0.012161, 0.010416, 0.010009, 0.020024,
|
|
|
|
-0.024662, -0.022350, 0.020496, -0.029859, -0.030511, 0.010041, 0.010872,
|
|
|
|
-0.025782, 0.010400, 0.012374, 0.012467, 0.114741, 0.010303, 0.010088,
|
|
|
|
-0.033961, 0.010680, 0.010886, -0.029274, 0.011178, 0.010693, 0.010711]
|
|
|
|
|
|
|
|
dates = [Arrow(2020, 1, 1).shift(days=i) for i in range(len(values))]
|
|
|
|
df = DataFrame(zip(values, dates), columns=['profit', 'open_time'])
|
2020-04-14 06:02:42 +00:00
|
|
|
# sort by profit and reset index
|
|
|
|
df = df.sort_values('profit').reset_index(drop=True)
|
|
|
|
df1 = df.copy()
|
2020-04-05 12:29:03 +00:00
|
|
|
drawdown, h, low = calculate_max_drawdown(df, date_col='open_time', value_col='profit')
|
2020-04-14 06:02:42 +00:00
|
|
|
# Ensure df has not been altered.
|
|
|
|
assert df.equals(df1)
|
|
|
|
|
2020-04-05 12:29:03 +00:00
|
|
|
assert isinstance(drawdown, float)
|
|
|
|
# High must be before low
|
|
|
|
assert h < low
|
|
|
|
assert drawdown == 0.091755
|
2020-04-05 12:43:01 +00:00
|
|
|
|
|
|
|
df = DataFrame(zip(values[:5], dates[:5]), columns=['profit', 'open_time'])
|
|
|
|
with pytest.raises(ValueError, match='No losing trade, therefore no drawdown.'):
|
|
|
|
calculate_max_drawdown(df, date_col='open_time', value_col='profit')
|