2018-01-28 07:38:41 +00:00
|
|
|
# pragma pylint: disable=missing-docstring, C0103
|
2018-12-11 18:48:36 +00:00
|
|
|
import logging
|
2018-02-04 08:28:02 +00:00
|
|
|
|
2019-12-25 14:47:04 +00:00
|
|
|
from freqtrade.configuration.timerange import TimeRange
|
|
|
|
from freqtrade.data.converter import (ohlcv_fill_up_missing_data,
|
|
|
|
parse_ticker_dataframe, trim_dataframe)
|
|
|
|
from freqtrade.data.history import (get_timerange, load_data,
|
|
|
|
load_pair_history, validate_backtest_data)
|
2019-09-08 07:54:15 +00:00
|
|
|
from tests.conftest import log_has
|
2018-02-04 08:28:02 +00:00
|
|
|
|
|
|
|
|
2017-11-07 19:12:56 +00:00
|
|
|
def test_dataframe_correct_columns(result):
|
2018-12-31 08:18:22 +00:00
|
|
|
assert result.columns.tolist() == ['date', 'open', 'high', 'low', 'close', 'volume']
|
2017-10-01 08:02:47 +00:00
|
|
|
|
2017-10-30 23:36:35 +00:00
|
|
|
|
2018-12-31 18:15:05 +00:00
|
|
|
def test_parse_ticker_dataframe(ticker_history_list, caplog):
|
2018-07-16 05:59:14 +00:00
|
|
|
columns = ['date', 'open', 'high', 'low', 'close', 'volume']
|
2018-07-16 05:11:17 +00:00
|
|
|
|
2018-12-11 18:48:36 +00:00
|
|
|
caplog.set_level(logging.DEBUG)
|
2018-07-16 05:59:14 +00:00
|
|
|
# Test file with BV data
|
2019-06-15 11:47:20 +00:00
|
|
|
dataframe = parse_ticker_dataframe(ticker_history_list, '5m',
|
|
|
|
pair="UNITTEST/BTC", fill_missing=True)
|
2018-07-16 05:59:14 +00:00
|
|
|
assert dataframe.columns.tolist() == columns
|
2019-08-11 18:16:34 +00:00
|
|
|
assert log_has('Parsing tickerlist to dataframe', caplog)
|
2018-12-31 08:18:22 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_ohlcv_fill_up_missing_data(testdatadir, caplog):
|
|
|
|
data = load_pair_history(datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe='1m',
|
2018-12-31 18:15:05 +00:00
|
|
|
pair='UNITTEST/BTC',
|
|
|
|
fill_up_missing=False)
|
2018-12-31 08:18:22 +00:00
|
|
|
caplog.set_level(logging.DEBUG)
|
2019-06-15 11:47:20 +00:00
|
|
|
data2 = ohlcv_fill_up_missing_data(data, '1m', 'UNITTEST/BTC')
|
2018-12-31 08:18:22 +00:00
|
|
|
assert len(data2) > len(data)
|
|
|
|
# Column names should not change
|
|
|
|
assert (data.columns == data2.columns).all()
|
|
|
|
|
2019-06-15 11:47:20 +00:00
|
|
|
assert log_has(f"Missing data fillup for UNITTEST/BTC: before: "
|
2019-08-11 18:16:34 +00:00
|
|
|
f"{len(data)} - after: {len(data2)}", caplog)
|
2018-12-31 08:24:04 +00:00
|
|
|
|
|
|
|
# Test fillup actually fixes invalid backtest data
|
2019-12-17 22:06:03 +00:00
|
|
|
min_date, max_date = get_timerange({'UNITTEST/BTC': data})
|
2019-06-15 11:32:05 +00:00
|
|
|
assert validate_backtest_data(data, 'UNITTEST/BTC', min_date, max_date, 1)
|
|
|
|
assert not validate_backtest_data(data2, 'UNITTEST/BTC', min_date, max_date, 1)
|
2018-12-31 18:40:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_ohlcv_fill_up_missing_data2(caplog):
|
2019-11-02 19:26:26 +00:00
|
|
|
timeframe = '5m'
|
2018-12-31 18:40:14 +00:00
|
|
|
ticks = [[
|
|
|
|
1511686200000, # 8:50:00
|
|
|
|
8.794e-05, # open
|
|
|
|
8.948e-05, # high
|
|
|
|
8.794e-05, # low
|
|
|
|
8.88e-05, # close
|
|
|
|
2255, # volume (in quote currency)
|
|
|
|
],
|
|
|
|
[
|
|
|
|
1511686500000, # 8:55:00
|
|
|
|
8.88e-05,
|
|
|
|
8.942e-05,
|
|
|
|
8.88e-05,
|
|
|
|
8.893e-05,
|
|
|
|
9911,
|
|
|
|
],
|
|
|
|
[
|
|
|
|
1511687100000, # 9:05:00
|
|
|
|
8.891e-05,
|
|
|
|
8.893e-05,
|
|
|
|
8.875e-05,
|
|
|
|
8.877e-05,
|
|
|
|
2251
|
|
|
|
],
|
|
|
|
[
|
|
|
|
1511687400000, # 9:10:00
|
|
|
|
8.877e-05,
|
|
|
|
8.883e-05,
|
|
|
|
8.895e-05,
|
|
|
|
8.817e-05,
|
|
|
|
123551
|
|
|
|
]
|
|
|
|
]
|
|
|
|
|
|
|
|
# Generate test-data without filling missing
|
2019-11-02 19:26:26 +00:00
|
|
|
data = parse_ticker_dataframe(ticks, timeframe, pair="UNITTEST/BTC", fill_missing=False)
|
2018-12-31 18:40:14 +00:00
|
|
|
assert len(data) == 3
|
|
|
|
caplog.set_level(logging.DEBUG)
|
2019-11-02 19:26:26 +00:00
|
|
|
data2 = ohlcv_fill_up_missing_data(data, timeframe, "UNITTEST/BTC")
|
2018-12-31 18:40:14 +00:00
|
|
|
assert len(data2) == 4
|
|
|
|
# 3rd candle has been filled
|
|
|
|
row = data2.loc[2, :]
|
|
|
|
assert row['volume'] == 0
|
|
|
|
# close shoult match close of previous candle
|
|
|
|
assert row['close'] == data.loc[1, 'close']
|
|
|
|
assert row['open'] == row['close']
|
|
|
|
assert row['high'] == row['close']
|
|
|
|
assert row['low'] == row['close']
|
|
|
|
# Column names should not change
|
|
|
|
assert (data.columns == data2.columns).all()
|
|
|
|
|
2019-06-15 14:58:17 +00:00
|
|
|
assert log_has(f"Missing data fillup for UNITTEST/BTC: before: "
|
2019-08-11 18:16:34 +00:00
|
|
|
f"{len(data)} - after: {len(data2)}", caplog)
|
2019-06-09 12:51:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_ohlcv_drop_incomplete(caplog):
|
2019-11-02 19:26:26 +00:00
|
|
|
timeframe = '1d'
|
2019-06-09 12:51:58 +00:00
|
|
|
ticks = [[
|
|
|
|
1559750400000, # 2019-06-04
|
|
|
|
8.794e-05, # open
|
|
|
|
8.948e-05, # high
|
|
|
|
8.794e-05, # low
|
|
|
|
8.88e-05, # close
|
|
|
|
2255, # volume (in quote currency)
|
|
|
|
],
|
|
|
|
[
|
|
|
|
1559836800000, # 2019-06-05
|
|
|
|
8.88e-05,
|
|
|
|
8.942e-05,
|
|
|
|
8.88e-05,
|
|
|
|
8.893e-05,
|
|
|
|
9911,
|
|
|
|
],
|
|
|
|
[
|
|
|
|
1559923200000, # 2019-06-06
|
|
|
|
8.891e-05,
|
|
|
|
8.893e-05,
|
|
|
|
8.875e-05,
|
|
|
|
8.877e-05,
|
|
|
|
2251
|
|
|
|
],
|
|
|
|
[
|
|
|
|
1560009600000, # 2019-06-07
|
|
|
|
8.877e-05,
|
|
|
|
8.883e-05,
|
|
|
|
8.895e-05,
|
|
|
|
8.817e-05,
|
|
|
|
123551
|
|
|
|
]
|
|
|
|
]
|
|
|
|
caplog.set_level(logging.DEBUG)
|
2019-11-02 19:26:26 +00:00
|
|
|
data = parse_ticker_dataframe(ticks, timeframe, pair="UNITTEST/BTC",
|
2019-06-15 11:47:20 +00:00
|
|
|
fill_missing=False, drop_incomplete=False)
|
2019-06-09 12:51:58 +00:00
|
|
|
assert len(data) == 4
|
2019-08-11 18:16:34 +00:00
|
|
|
assert not log_has("Dropping last candle", caplog)
|
2019-06-09 12:51:58 +00:00
|
|
|
|
|
|
|
# Drop last candle
|
2019-11-02 19:26:26 +00:00
|
|
|
data = parse_ticker_dataframe(ticks, timeframe, pair="UNITTEST/BTC",
|
2019-06-15 11:47:20 +00:00
|
|
|
fill_missing=False, drop_incomplete=True)
|
2019-06-09 12:51:58 +00:00
|
|
|
assert len(data) == 3
|
|
|
|
|
2019-08-11 18:16:34 +00:00
|
|
|
assert log_has("Dropping last candle", caplog)
|
2019-12-25 14:47:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_trim_dataframe(testdatadir) -> None:
|
|
|
|
data = load_data(
|
|
|
|
datadir=testdatadir,
|
|
|
|
timeframe='1m',
|
|
|
|
pairs=['UNITTEST/BTC']
|
|
|
|
)['UNITTEST/BTC']
|
|
|
|
min_date = int(data.iloc[0]['date'].timestamp())
|
|
|
|
max_date = int(data.iloc[-1]['date'].timestamp())
|
|
|
|
data_modify = data.copy()
|
|
|
|
|
|
|
|
# Remove first 30 minutes (1800 s)
|
|
|
|
tr = TimeRange('date', None, min_date + 1800, 0)
|
|
|
|
data_modify = trim_dataframe(data_modify, tr)
|
|
|
|
assert not data_modify.equals(data)
|
|
|
|
assert len(data_modify) < len(data)
|
|
|
|
assert len(data_modify) == len(data) - 30
|
|
|
|
assert all(data_modify.iloc[-1] == data.iloc[-1])
|
|
|
|
assert all(data_modify.iloc[0] == data.iloc[30])
|
|
|
|
|
|
|
|
data_modify = data.copy()
|
|
|
|
# Remove last 30 minutes (1800 s)
|
|
|
|
tr = TimeRange(None, 'date', 0, max_date - 1800)
|
|
|
|
data_modify = trim_dataframe(data_modify, tr)
|
|
|
|
assert not data_modify.equals(data)
|
|
|
|
assert len(data_modify) < len(data)
|
|
|
|
assert len(data_modify) == len(data) - 30
|
|
|
|
assert all(data_modify.iloc[0] == data.iloc[0])
|
|
|
|
assert all(data_modify.iloc[-1] == data.iloc[-31])
|
|
|
|
|
|
|
|
data_modify = data.copy()
|
|
|
|
# Remove first 25 and last 30 minutes (1800 s)
|
|
|
|
tr = TimeRange('date', 'date', min_date + 1500, max_date - 1800)
|
|
|
|
data_modify = trim_dataframe(data_modify, tr)
|
|
|
|
assert not data_modify.equals(data)
|
|
|
|
assert len(data_modify) < len(data)
|
|
|
|
assert len(data_modify) == len(data) - 55
|
|
|
|
# first row matches 25th original row
|
|
|
|
assert all(data_modify.iloc[0] == data.iloc[25])
|