2021-04-25 08:10:09 +00:00
|
|
|
from math import isclose
|
2020-03-15 08:39:45 +00:00
|
|
|
from pathlib import Path
|
2019-06-16 08:57:21 +00:00
|
|
|
from unittest.mock import MagicMock
|
2019-03-16 16:50:57 +00:00
|
|
|
|
2019-06-16 09:12:19 +00:00
|
|
|
import pytest
|
2019-06-29 15:19:42 +00:00
|
|
|
from arrow import Arrow
|
2020-03-15 08:39:45 +00:00
|
|
|
from pandas import DataFrame, DateOffset, Timestamp, to_datetime
|
2019-06-16 09:12:19 +00:00
|
|
|
|
2019-08-14 08:07:32 +00:00
|
|
|
from freqtrade.configuration import TimeRange
|
2020-06-28 07:27:19 +00:00
|
|
|
from freqtrade.constants import LAST_BT_RESULT_FN
|
2022-01-06 18:49:25 +00:00
|
|
|
from freqtrade.data.btanalysis import (BT_DATA_COLUMNS, analyze_trade_parallelism, calculate_csum,
|
2021-02-14 18:44:13 +00:00
|
|
|
calculate_market_change, calculate_max_drawdown,
|
2022-01-01 13:39:58 +00:00
|
|
|
calculate_underwater, combine_dataframes_with_mean,
|
|
|
|
create_cum_profit, extract_trades_of_period,
|
|
|
|
get_latest_backtest_filename, get_latest_hyperopt_file,
|
2022-01-16 18:11:20 +00:00
|
|
|
load_backtest_data, load_backtest_metadata, load_trades,
|
|
|
|
load_trades_from_db)
|
2019-09-07 18:34:25 +00:00
|
|
|
from freqtrade.data.history import load_data, load_pair_history
|
2022-01-06 18:49:25 +00:00
|
|
|
from freqtrade.exceptions import OperationalException
|
2021-09-21 18:18:14 +00:00
|
|
|
from tests.conftest import CURRENT_TEST_STRATEGY, create_mock_trades
|
2020-09-10 05:40:19 +00:00
|
|
|
from tests.conftest_trades import MOCK_TRADE_COUNT
|
2019-03-16 16:50:57 +00:00
|
|
|
|
|
|
|
|
2020-06-27 04:46:54 +00:00
|
|
|
def test_get_latest_backtest_filename(testdatadir, mocker):
|
|
|
|
with pytest.raises(ValueError, match=r"Directory .* does not exist\."):
|
|
|
|
get_latest_backtest_filename(testdatadir / 'does_not_exist')
|
|
|
|
|
|
|
|
with pytest.raises(ValueError,
|
|
|
|
match=r"Directory .* does not seem to contain .*"):
|
|
|
|
get_latest_backtest_filename(testdatadir.parent)
|
|
|
|
|
|
|
|
res = get_latest_backtest_filename(testdatadir)
|
|
|
|
assert res == 'backtest-result_new.json'
|
|
|
|
|
2020-06-27 13:59:22 +00:00
|
|
|
res = get_latest_backtest_filename(str(testdatadir))
|
|
|
|
assert res == 'backtest-result_new.json'
|
2020-06-27 04:46:54 +00:00
|
|
|
|
2020-06-27 13:59:22 +00:00
|
|
|
mocker.patch("freqtrade.data.btanalysis.json_load", return_value={})
|
2020-06-27 04:46:54 +00:00
|
|
|
|
|
|
|
with pytest.raises(ValueError, match=r"Invalid '.last_result.json' format."):
|
|
|
|
get_latest_backtest_filename(testdatadir)
|
|
|
|
|
|
|
|
|
2022-01-16 18:11:20 +00:00
|
|
|
def test_get_latest_hyperopt_file(testdatadir):
|
2020-09-27 15:00:23 +00:00
|
|
|
res = get_latest_hyperopt_file(testdatadir / 'does_not_exist', 'testfile.pickle')
|
|
|
|
assert res == testdatadir / 'does_not_exist/testfile.pickle'
|
|
|
|
|
|
|
|
res = get_latest_hyperopt_file(testdatadir.parent)
|
|
|
|
assert res == testdatadir.parent / "hyperopt_results.pickle"
|
|
|
|
|
2020-09-27 22:35:19 +00:00
|
|
|
res = get_latest_hyperopt_file(str(testdatadir.parent))
|
|
|
|
assert res == testdatadir.parent / "hyperopt_results.pickle"
|
|
|
|
|
2022-01-21 14:23:06 +00:00
|
|
|
# Test with absolute path
|
|
|
|
with pytest.raises(
|
|
|
|
OperationalException,
|
|
|
|
match="--hyperopt-filename expects only the filename, not an absolute path."):
|
|
|
|
get_latest_hyperopt_file(str(testdatadir.parent), str(testdatadir.parent))
|
|
|
|
|
2020-09-27 15:00:23 +00:00
|
|
|
|
2022-01-16 18:11:20 +00:00
|
|
|
def test_load_backtest_metadata(mocker, testdatadir):
|
|
|
|
res = load_backtest_metadata(testdatadir / 'nonexistant.file.json')
|
|
|
|
assert res == {}
|
|
|
|
|
|
|
|
mocker.patch('freqtrade.data.btanalysis.get_backtest_metadata_filename')
|
|
|
|
mocker.patch('freqtrade.data.btanalysis.json_load', side_effect=Exception())
|
|
|
|
with pytest.raises(OperationalException,
|
|
|
|
match=r"Unexpected error.*loading backtest metadata\."):
|
|
|
|
load_backtest_metadata(testdatadir / 'nonexistant.file.json')
|
|
|
|
|
2020-09-27 15:00:23 +00:00
|
|
|
|
2022-01-06 18:49:25 +00:00
|
|
|
def test_load_backtest_data_old_format(testdatadir, mocker):
|
2019-03-16 16:50:57 +00:00
|
|
|
|
2022-01-06 18:49:25 +00:00
|
|
|
filename = testdatadir / "backtest-result_test222.json"
|
|
|
|
mocker.patch('freqtrade.data.btanalysis.load_backtest_stats', return_value=[])
|
2019-03-16 16:50:57 +00:00
|
|
|
|
2022-01-06 18:49:25 +00:00
|
|
|
with pytest.raises(OperationalException,
|
|
|
|
match=r"Backtest-results with only trades data are no longer supported."):
|
|
|
|
load_backtest_data(filename)
|
2019-06-16 08:57:21 +00:00
|
|
|
|
|
|
|
|
2020-06-27 08:06:59 +00:00
|
|
|
def test_load_backtest_data_new_format(testdatadir):
|
|
|
|
|
|
|
|
filename = testdatadir / "backtest-result_new.json"
|
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
assert isinstance(bt_data, DataFrame)
|
2022-01-05 19:17:04 +00:00
|
|
|
assert set(bt_data.columns) == set(BT_DATA_COLUMNS + ['close_timestamp', 'open_timestamp'])
|
2020-06-27 08:06:59 +00:00
|
|
|
assert len(bt_data) == 179
|
|
|
|
|
|
|
|
# Test loading from string (must yield same result)
|
|
|
|
bt_data2 = load_backtest_data(str(filename))
|
|
|
|
assert bt_data.equals(bt_data2)
|
|
|
|
|
2020-06-28 07:51:49 +00:00
|
|
|
# Test loading from folder (must yield same result)
|
|
|
|
bt_data3 = load_backtest_data(testdatadir)
|
|
|
|
assert bt_data.equals(bt_data3)
|
|
|
|
|
2020-06-27 08:06:59 +00:00
|
|
|
with pytest.raises(ValueError, match=r"File .* does not exist\."):
|
|
|
|
load_backtest_data(str("filename") + "nofile")
|
|
|
|
|
2020-06-27 13:59:22 +00:00
|
|
|
with pytest.raises(ValueError, match=r"Unknown dataformat."):
|
2020-06-28 07:27:19 +00:00
|
|
|
load_backtest_data(testdatadir / LAST_BT_RESULT_FN)
|
2020-06-27 13:59:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_load_backtest_data_multi(testdatadir):
|
|
|
|
|
|
|
|
filename = testdatadir / "backtest-result_multistrat.json"
|
2021-08-26 05:25:53 +00:00
|
|
|
for strategy in ('StrategyTestV2', 'TestStrategy'):
|
2020-06-27 13:59:22 +00:00
|
|
|
bt_data = load_backtest_data(filename, strategy=strategy)
|
|
|
|
assert isinstance(bt_data, DataFrame)
|
2022-01-07 16:17:35 +00:00
|
|
|
assert set(bt_data.columns) == set(
|
|
|
|
BT_DATA_COLUMNS + ['close_timestamp', 'open_timestamp'])
|
2020-06-27 13:59:22 +00:00
|
|
|
assert len(bt_data) == 179
|
|
|
|
|
|
|
|
# Test loading from string (must yield same result)
|
|
|
|
bt_data2 = load_backtest_data(str(filename), strategy=strategy)
|
|
|
|
assert bt_data.equals(bt_data2)
|
|
|
|
|
|
|
|
with pytest.raises(ValueError, match=r"Strategy XYZ not available in the backtest result\."):
|
|
|
|
load_backtest_data(filename, strategy='XYZ')
|
|
|
|
|
|
|
|
with pytest.raises(ValueError, match=r"Detected backtest result with more than one strategy.*"):
|
|
|
|
load_backtest_data(filename)
|
|
|
|
|
2020-06-27 08:06:59 +00:00
|
|
|
|
2019-06-16 08:57:21 +00:00
|
|
|
@pytest.mark.usefixtures("init_persistence")
|
2021-09-20 02:24:22 +00:00
|
|
|
@pytest.mark.parametrize('is_short', [False, True])
|
|
|
|
def test_load_trades_from_db(default_conf, fee, is_short, mocker):
|
2019-06-16 08:57:21 +00:00
|
|
|
|
2021-09-20 02:24:22 +00:00
|
|
|
create_mock_trades(fee, is_short)
|
2019-06-16 08:57:21 +00:00
|
|
|
# remove init so it does not init again
|
2020-10-16 05:39:12 +00:00
|
|
|
init_mock = mocker.patch('freqtrade.data.btanalysis.init_db', MagicMock())
|
2019-06-16 08:57:21 +00:00
|
|
|
|
2019-06-22 14:20:41 +00:00
|
|
|
trades = load_trades_from_db(db_url=default_conf['db_url'])
|
2019-06-16 08:57:21 +00:00
|
|
|
assert init_mock.call_count == 1
|
2020-09-10 05:40:19 +00:00
|
|
|
assert len(trades) == MOCK_TRADE_COUNT
|
2019-06-16 08:57:21 +00:00
|
|
|
assert isinstance(trades, DataFrame)
|
|
|
|
assert "pair" in trades.columns
|
2020-06-26 07:21:28 +00:00
|
|
|
assert "open_date" in trades.columns
|
2021-01-23 19:49:49 +00:00
|
|
|
assert "profit_ratio" in trades.columns
|
2019-08-03 17:55:54 +00:00
|
|
|
|
|
|
|
for col in BT_DATA_COLUMNS:
|
|
|
|
if col not in ['index', 'open_at_end']:
|
|
|
|
assert col in trades.columns
|
2021-09-21 18:18:14 +00:00
|
|
|
trades = load_trades_from_db(db_url=default_conf['db_url'], strategy=CURRENT_TEST_STRATEGY)
|
2021-01-01 18:38:28 +00:00
|
|
|
assert len(trades) == 4
|
2020-06-27 07:59:23 +00:00
|
|
|
trades = load_trades_from_db(db_url=default_conf['db_url'], strategy='NoneStrategy')
|
|
|
|
assert len(trades) == 0
|
2019-06-16 09:12:19 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_extract_trades_of_period(testdatadir):
|
2019-06-16 09:12:19 +00:00
|
|
|
pair = "UNITTEST/BTC"
|
2019-10-19 12:53:56 +00:00
|
|
|
# 2018-11-14 06:07:00
|
|
|
|
timerange = TimeRange('date', None, 1510639620, 0)
|
2019-06-16 09:12:19 +00:00
|
|
|
|
2019-11-02 19:19:13 +00:00
|
|
|
data = load_pair_history(pair=pair, timeframe='1m',
|
2019-09-07 18:56:03 +00:00
|
|
|
datadir=testdatadir, timerange=timerange)
|
2019-06-16 09:12:19 +00:00
|
|
|
|
|
|
|
trades = DataFrame(
|
|
|
|
{'pair': [pair, pair, pair, pair],
|
2021-01-29 18:06:46 +00:00
|
|
|
'profit_ratio': [0.0, 0.1, -0.2, -0.5],
|
2019-06-16 09:12:19 +00:00
|
|
|
'profit_abs': [0.0, 1, -2, -5],
|
2020-06-26 07:21:28 +00:00
|
|
|
'open_date': to_datetime([Arrow(2017, 11, 13, 15, 40, 0).datetime,
|
2019-06-16 09:12:19 +00:00
|
|
|
Arrow(2017, 11, 14, 9, 41, 0).datetime,
|
|
|
|
Arrow(2017, 11, 14, 14, 20, 0).datetime,
|
|
|
|
Arrow(2017, 11, 15, 3, 40, 0).datetime,
|
|
|
|
], utc=True
|
|
|
|
),
|
2020-06-26 07:21:28 +00:00
|
|
|
'close_date': to_datetime([Arrow(2017, 11, 13, 16, 40, 0).datetime,
|
2019-06-16 09:12:19 +00:00
|
|
|
Arrow(2017, 11, 14, 10, 41, 0).datetime,
|
|
|
|
Arrow(2017, 11, 14, 15, 25, 0).datetime,
|
|
|
|
Arrow(2017, 11, 15, 3, 55, 0).datetime,
|
|
|
|
], utc=True)
|
|
|
|
})
|
|
|
|
trades1 = extract_trades_of_period(data, trades)
|
|
|
|
# First and last trade are dropped as they are out of range
|
|
|
|
assert len(trades1) == 2
|
2020-06-26 07:21:28 +00:00
|
|
|
assert trades1.iloc[0].open_date == Arrow(2017, 11, 14, 9, 41, 0).datetime
|
|
|
|
assert trades1.iloc[0].close_date == Arrow(2017, 11, 14, 10, 41, 0).datetime
|
|
|
|
assert trades1.iloc[-1].open_date == Arrow(2017, 11, 14, 14, 20, 0).datetime
|
|
|
|
assert trades1.iloc[-1].close_date == Arrow(2017, 11, 14, 15, 25, 0).datetime
|
2019-06-29 15:19:42 +00:00
|
|
|
|
|
|
|
|
2022-01-06 18:28:04 +00:00
|
|
|
def test_analyze_trade_parallelism(testdatadir):
|
|
|
|
filename = testdatadir / "backtest-result_new.json"
|
2019-10-30 13:07:23 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
|
|
|
|
res = analyze_trade_parallelism(bt_data, "5m")
|
|
|
|
assert isinstance(res, DataFrame)
|
|
|
|
assert 'open_trades' in res.columns
|
|
|
|
assert res['open_trades'].max() == 3
|
|
|
|
assert res['open_trades'].min() == 0
|
|
|
|
|
|
|
|
|
2019-06-29 18:50:31 +00:00
|
|
|
def test_load_trades(default_conf, mocker):
|
|
|
|
db_mock = mocker.patch("freqtrade.data.btanalysis.load_trades_from_db", MagicMock())
|
|
|
|
bt_mock = mocker.patch("freqtrade.data.btanalysis.load_backtest_data", MagicMock())
|
|
|
|
|
2019-08-22 18:17:36 +00:00
|
|
|
load_trades("DB",
|
|
|
|
db_url=default_conf.get('db_url'),
|
|
|
|
exportfilename=default_conf.get('exportfilename'),
|
2020-06-27 07:59:23 +00:00
|
|
|
no_trades=False,
|
2021-09-21 18:18:14 +00:00
|
|
|
strategy=CURRENT_TEST_STRATEGY,
|
2019-08-22 18:17:36 +00:00
|
|
|
)
|
2019-06-29 18:50:31 +00:00
|
|
|
|
|
|
|
assert db_mock.call_count == 1
|
|
|
|
assert bt_mock.call_count == 0
|
|
|
|
|
|
|
|
db_mock.reset_mock()
|
|
|
|
bt_mock.reset_mock()
|
2020-03-15 08:39:45 +00:00
|
|
|
default_conf['exportfilename'] = Path("testfile.json")
|
2019-08-22 18:17:36 +00:00
|
|
|
load_trades("file",
|
|
|
|
db_url=default_conf.get('db_url'),
|
2020-03-14 21:15:03 +00:00
|
|
|
exportfilename=default_conf.get('exportfilename'),
|
|
|
|
)
|
2019-06-29 18:50:31 +00:00
|
|
|
|
|
|
|
assert db_mock.call_count == 0
|
|
|
|
assert bt_mock.call_count == 1
|
|
|
|
|
2020-03-14 23:09:08 +00:00
|
|
|
db_mock.reset_mock()
|
|
|
|
bt_mock.reset_mock()
|
|
|
|
default_conf['exportfilename'] = "testfile.json"
|
|
|
|
load_trades("file",
|
|
|
|
db_url=default_conf.get('db_url'),
|
|
|
|
exportfilename=default_conf.get('exportfilename'),
|
2020-03-15 20:20:32 +00:00
|
|
|
no_trades=True
|
2020-03-14 23:09:08 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
assert db_mock.call_count == 0
|
|
|
|
assert bt_mock.call_count == 0
|
|
|
|
|
2019-06-29 18:50:31 +00:00
|
|
|
|
2020-06-25 18:39:55 +00:00
|
|
|
def test_calculate_market_change(testdatadir):
|
|
|
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
|
|
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='5m')
|
|
|
|
result = calculate_market_change(data)
|
|
|
|
assert isinstance(result, float)
|
|
|
|
assert pytest.approx(result) == 0.00955514
|
|
|
|
|
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
def test_combine_dataframes_with_mean(testdatadir):
|
2019-10-02 08:59:45 +00:00
|
|
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
2020-03-08 10:35:31 +00:00
|
|
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='5m')
|
|
|
|
df = combine_dataframes_with_mean(data)
|
2019-06-30 08:04:43 +00:00
|
|
|
assert isinstance(df, DataFrame)
|
|
|
|
assert "ETH/BTC" in df.columns
|
2019-10-02 08:59:45 +00:00
|
|
|
assert "ADA/BTC" in df.columns
|
2019-06-30 08:04:43 +00:00
|
|
|
assert "mean" in df.columns
|
|
|
|
|
|
|
|
|
2021-12-30 09:14:45 +00:00
|
|
|
def test_combine_dataframes_with_mean_no_data(testdatadir):
|
|
|
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
|
|
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='6m')
|
|
|
|
with pytest.raises(ValueError, match=r"No objects to concatenate"):
|
|
|
|
combine_dataframes_with_mean(data)
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_create_cum_profit(testdatadir):
|
2022-01-06 18:28:04 +00:00
|
|
|
filename = testdatadir / "backtest-result_new.json"
|
2019-06-29 15:19:42 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
2019-08-14 08:07:32 +00:00
|
|
|
timerange = TimeRange.parse_timerange("20180110-20180112")
|
2019-06-29 15:19:42 +00:00
|
|
|
|
2019-11-02 19:19:13 +00:00
|
|
|
df = load_pair_history(pair="TRX/BTC", timeframe='5m',
|
2019-09-07 18:56:03 +00:00
|
|
|
datadir=testdatadir, timerange=timerange)
|
2019-06-29 15:19:42 +00:00
|
|
|
|
|
|
|
cum_profits = create_cum_profit(df.set_index('date'),
|
2019-10-30 08:20:56 +00:00
|
|
|
bt_data[bt_data["pair"] == 'TRX/BTC'],
|
2019-10-28 13:24:12 +00:00
|
|
|
"cum_profits", timeframe="5m")
|
2019-06-29 15:19:42 +00:00
|
|
|
assert "cum_profits" in cum_profits.columns
|
|
|
|
assert cum_profits.iloc[0]['cum_profits'] == 0
|
2021-04-25 08:10:09 +00:00
|
|
|
assert isclose(cum_profits.iloc[-1]['cum_profits'], 8.723007518796964e-06)
|
2019-10-28 13:30:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_create_cum_profit1(testdatadir):
|
2022-01-06 18:28:04 +00:00
|
|
|
filename = testdatadir / "backtest-result_new.json"
|
2019-10-28 13:30:01 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
# Move close-time to "off" the candle, to make sure the logic still works
|
2020-06-26 07:21:28 +00:00
|
|
|
bt_data.loc[:, 'close_date'] = bt_data.loc[:, 'close_date'] + DateOffset(seconds=20)
|
2019-10-28 13:30:01 +00:00
|
|
|
timerange = TimeRange.parse_timerange("20180110-20180112")
|
|
|
|
|
2019-11-02 19:19:13 +00:00
|
|
|
df = load_pair_history(pair="TRX/BTC", timeframe='5m',
|
2019-10-28 13:30:01 +00:00
|
|
|
datadir=testdatadir, timerange=timerange)
|
|
|
|
|
|
|
|
cum_profits = create_cum_profit(df.set_index('date'),
|
2019-10-30 08:20:56 +00:00
|
|
|
bt_data[bt_data["pair"] == 'TRX/BTC'],
|
2019-10-28 13:30:01 +00:00
|
|
|
"cum_profits", timeframe="5m")
|
|
|
|
assert "cum_profits" in cum_profits.columns
|
|
|
|
assert cum_profits.iloc[0]['cum_profits'] == 0
|
2021-04-25 08:10:09 +00:00
|
|
|
assert isclose(cum_profits.iloc[-1]['cum_profits'], 8.723007518796964e-06)
|
2020-03-03 06:20:41 +00:00
|
|
|
|
2020-05-21 05:12:23 +00:00
|
|
|
with pytest.raises(ValueError, match='Trade dataframe empty.'):
|
|
|
|
create_cum_profit(df.set_index('date'), bt_data[bt_data["pair"] == 'NOTAPAIR'],
|
|
|
|
"cum_profits", timeframe="5m")
|
|
|
|
|
2020-03-03 06:20:41 +00:00
|
|
|
|
|
|
|
def test_calculate_max_drawdown(testdatadir):
|
2022-01-04 15:16:08 +00:00
|
|
|
filename = testdatadir / "backtest-result_new.json"
|
2020-03-03 06:20:41 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
2022-01-04 16:56:41 +00:00
|
|
|
_, hdate, lowdate, hval, lval, drawdown = calculate_max_drawdown(
|
2022-01-04 16:31:59 +00:00
|
|
|
bt_data, value_col="profit_abs")
|
2020-03-03 06:20:41 +00:00
|
|
|
assert isinstance(drawdown, float)
|
2022-01-05 19:17:04 +00:00
|
|
|
assert pytest.approx(drawdown) == 0.12071099
|
2021-02-14 18:30:17 +00:00
|
|
|
assert isinstance(hdate, Timestamp)
|
|
|
|
assert isinstance(lowdate, Timestamp)
|
|
|
|
assert isinstance(hval, float)
|
|
|
|
assert isinstance(lval, float)
|
2022-01-04 15:16:08 +00:00
|
|
|
assert hdate == Timestamp('2018-01-25 01:30:00', tz='UTC')
|
2022-01-05 19:17:04 +00:00
|
|
|
assert lowdate == Timestamp('2018-01-25 03:50:00', tz='UTC')
|
2022-01-01 13:39:58 +00:00
|
|
|
|
|
|
|
underwater = calculate_underwater(bt_data)
|
|
|
|
assert isinstance(underwater, DataFrame)
|
|
|
|
|
2020-03-03 06:20:41 +00:00
|
|
|
with pytest.raises(ValueError, match='Trade dataframe empty.'):
|
2022-01-04 15:16:08 +00:00
|
|
|
calculate_max_drawdown(DataFrame())
|
2020-04-05 12:29:03 +00:00
|
|
|
|
2022-01-01 13:39:58 +00:00
|
|
|
with pytest.raises(ValueError, match='Trade dataframe empty.'):
|
|
|
|
calculate_underwater(DataFrame())
|
|
|
|
|
2020-04-05 12:29:03 +00:00
|
|
|
|
2021-02-14 18:44:13 +00:00
|
|
|
def test_calculate_csum(testdatadir):
|
2022-01-06 18:28:04 +00:00
|
|
|
filename = testdatadir / "backtest-result_new.json"
|
2021-02-14 18:44:13 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
csum_min, csum_max = calculate_csum(bt_data)
|
|
|
|
|
|
|
|
assert isinstance(csum_min, float)
|
|
|
|
assert isinstance(csum_max, float)
|
|
|
|
assert csum_min < 0.01
|
|
|
|
assert csum_max > 0.02
|
2021-02-16 19:12:59 +00:00
|
|
|
csum_min1, csum_max1 = calculate_csum(bt_data, 5)
|
|
|
|
|
|
|
|
assert csum_min1 == csum_min + 5
|
|
|
|
assert csum_max1 == csum_max + 5
|
|
|
|
|
2021-02-14 18:44:13 +00:00
|
|
|
with pytest.raises(ValueError, match='Trade dataframe empty.'):
|
|
|
|
csum_min, csum_max = calculate_csum(DataFrame())
|
|
|
|
|
|
|
|
|
2020-04-05 12:29:03 +00:00
|
|
|
def test_calculate_max_drawdown2():
|
|
|
|
values = [0.011580, 0.010048, 0.011340, 0.012161, 0.010416, 0.010009, 0.020024,
|
|
|
|
-0.024662, -0.022350, 0.020496, -0.029859, -0.030511, 0.010041, 0.010872,
|
|
|
|
-0.025782, 0.010400, 0.012374, 0.012467, 0.114741, 0.010303, 0.010088,
|
|
|
|
-0.033961, 0.010680, 0.010886, -0.029274, 0.011178, 0.010693, 0.010711]
|
|
|
|
|
|
|
|
dates = [Arrow(2020, 1, 1).shift(days=i) for i in range(len(values))]
|
2020-06-26 07:21:28 +00:00
|
|
|
df = DataFrame(zip(values, dates), columns=['profit', 'open_date'])
|
2020-04-14 06:02:42 +00:00
|
|
|
# sort by profit and reset index
|
|
|
|
df = df.sort_values('profit').reset_index(drop=True)
|
|
|
|
df1 = df.copy()
|
2022-01-04 15:16:08 +00:00
|
|
|
drawdown, hdate, ldate, hval, lval, drawdown_rel = calculate_max_drawdown(
|
2021-02-14 18:30:17 +00:00
|
|
|
df, date_col='open_date', value_col='profit')
|
2020-04-14 06:02:42 +00:00
|
|
|
# Ensure df has not been altered.
|
|
|
|
assert df.equals(df1)
|
|
|
|
|
2020-04-05 12:29:03 +00:00
|
|
|
assert isinstance(drawdown, float)
|
2022-01-04 15:16:08 +00:00
|
|
|
assert isinstance(drawdown_rel, float)
|
2020-04-05 12:29:03 +00:00
|
|
|
# High must be before low
|
2021-02-14 18:30:17 +00:00
|
|
|
assert hdate < ldate
|
|
|
|
# High value must be higher than low value
|
|
|
|
assert hval > lval
|
2020-04-05 12:29:03 +00:00
|
|
|
assert drawdown == 0.091755
|
2020-04-05 12:43:01 +00:00
|
|
|
|
2020-06-26 07:21:28 +00:00
|
|
|
df = DataFrame(zip(values[:5], dates[:5]), columns=['profit', 'open_date'])
|
2020-04-05 12:43:01 +00:00
|
|
|
with pytest.raises(ValueError, match='No losing trade, therefore no drawdown.'):
|
2020-06-26 07:21:28 +00:00
|
|
|
calculate_max_drawdown(df, date_col='open_date', value_col='profit')
|