2018-12-13 05:35:42 +00:00
|
|
|
# pragma pylint: disable=missing-docstring, protected-access, C0103
|
|
|
|
|
|
|
|
import json
|
|
|
|
import uuid
|
2019-05-25 18:25:59 +00:00
|
|
|
from pathlib import Path
|
2018-12-13 05:35:42 +00:00
|
|
|
from shutil import copyfile
|
2019-08-25 13:02:40 +00:00
|
|
|
from unittest.mock import MagicMock, PropertyMock
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
import arrow
|
2018-12-16 09:17:11 +00:00
|
|
|
import pytest
|
2019-05-25 18:25:59 +00:00
|
|
|
from pandas import DataFrame
|
2018-12-13 05:35:42 +00:00
|
|
|
|
2018-12-16 09:17:11 +00:00
|
|
|
from freqtrade import OperationalException
|
2019-07-11 18:23:23 +00:00
|
|
|
from freqtrade.configuration import TimeRange
|
2018-12-13 05:35:42 +00:00
|
|
|
from freqtrade.data import history
|
2019-10-13 14:04:40 +00:00
|
|
|
from freqtrade.data.history import (_load_cached_data_for_updating,
|
2019-12-16 18:57:03 +00:00
|
|
|
_download_pair_history,
|
|
|
|
_download_trades_history,
|
2019-10-13 14:04:40 +00:00
|
|
|
convert_trades_to_ohlcv,
|
2019-08-14 16:56:46 +00:00
|
|
|
load_tickerdata_file, pair_data_filename,
|
2019-08-14 16:58:27 +00:00
|
|
|
pair_trades_filename,
|
2019-10-13 14:04:40 +00:00
|
|
|
refresh_backtest_ohlcv_data,
|
|
|
|
refresh_backtest_trades_data,
|
|
|
|
trim_tickerlist)
|
2019-05-25 18:25:59 +00:00
|
|
|
from freqtrade.exchange import timeframe_to_minutes
|
2018-12-13 05:35:42 +00:00
|
|
|
from freqtrade.misc import file_dump_json
|
2019-05-25 18:25:59 +00:00
|
|
|
from freqtrade.strategy.default_strategy import DefaultStrategy
|
2019-08-14 16:56:46 +00:00
|
|
|
from tests.conftest import (get_patched_exchange, log_has, log_has_re,
|
|
|
|
patch_exchange)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
# Change this if modifying UNITTEST/BTC testdatafile
|
|
|
|
_BTC_UNITTEST_LENGTH = 13681
|
|
|
|
|
|
|
|
|
2019-10-08 19:10:43 +00:00
|
|
|
def _backup_file(file: Path, copy_file: bool = False) -> None:
|
2018-12-13 05:35:42 +00:00
|
|
|
"""
|
|
|
|
Backup existing file to avoid deleting the user file
|
|
|
|
:param file: complete path to the file
|
|
|
|
:param touch_file: create an empty file in replacement
|
|
|
|
:return: None
|
|
|
|
"""
|
2019-10-08 19:10:43 +00:00
|
|
|
file_swp = str(file) + '.swp'
|
|
|
|
if file.is_file():
|
|
|
|
file.rename(file_swp)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
if copy_file:
|
|
|
|
copyfile(file_swp, file)
|
|
|
|
|
|
|
|
|
2019-10-08 19:10:43 +00:00
|
|
|
def _clean_test_file(file: Path) -> None:
|
2018-12-13 05:35:42 +00:00
|
|
|
"""
|
|
|
|
Backup existing file to avoid deleting the user file
|
|
|
|
:param file: complete path to the file
|
|
|
|
:return: None
|
|
|
|
"""
|
2019-10-08 19:10:43 +00:00
|
|
|
file_swp = Path(str(file) + '.swp')
|
2018-12-13 05:35:42 +00:00
|
|
|
# 1. Delete file from the test
|
2019-10-08 19:10:43 +00:00
|
|
|
if file.is_file():
|
|
|
|
file.unlink()
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
# 2. Rollback to the initial file
|
2019-10-08 19:10:43 +00:00
|
|
|
if file_swp.is_file():
|
|
|
|
file_swp.rename(file)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_load_data_30min_ticker(mocker, caplog, default_conf, testdatadir) -> None:
|
2019-11-02 19:19:13 +00:00
|
|
|
ld = history.load_pair_history(pair='UNITTEST/BTC', timeframe='30m', datadir=testdatadir)
|
2018-12-15 19:31:25 +00:00
|
|
|
assert isinstance(ld, DataFrame)
|
2019-05-17 16:05:36 +00:00
|
|
|
assert not log_has(
|
2019-11-02 19:19:13 +00:00
|
|
|
'Download history data for pair: "UNITTEST/BTC", timeframe: 30m '
|
2019-08-11 18:16:34 +00:00
|
|
|
'and store in None.', caplog
|
2019-05-17 16:05:36 +00:00
|
|
|
)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_load_data_7min_ticker(mocker, caplog, default_conf, testdatadir) -> None:
|
2019-11-02 19:19:13 +00:00
|
|
|
ld = history.load_pair_history(pair='UNITTEST/BTC', timeframe='7m', datadir=testdatadir)
|
2019-12-04 05:57:44 +00:00
|
|
|
assert isinstance(ld, DataFrame)
|
|
|
|
assert ld.empty
|
2018-12-15 19:31:25 +00:00
|
|
|
assert log_has(
|
2019-11-02 19:19:13 +00:00
|
|
|
'No history data for pair: "UNITTEST/BTC", timeframe: 7m. '
|
2019-09-20 18:12:35 +00:00
|
|
|
'Use `freqtrade download-data` to download the data', caplog
|
2019-04-22 18:24:45 +00:00
|
|
|
)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_load_data_1min_ticker(ticker_history, mocker, caplog, testdatadir) -> None:
|
2019-08-14 08:14:54 +00:00
|
|
|
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ticker_history)
|
2019-10-08 19:10:43 +00:00
|
|
|
file = testdatadir / 'UNITTEST_BTC-1m.json'
|
2018-12-13 05:35:42 +00:00
|
|
|
_backup_file(file, copy_file=True)
|
2019-11-02 19:19:13 +00:00
|
|
|
history.load_data(datadir=testdatadir, timeframe='1m', pairs=['UNITTEST/BTC'])
|
2019-10-08 19:10:43 +00:00
|
|
|
assert file.is_file()
|
2019-05-17 16:05:36 +00:00
|
|
|
assert not log_has(
|
|
|
|
'Download history data for pair: "UNITTEST/BTC", interval: 1m '
|
2019-08-11 18:16:34 +00:00
|
|
|
'and store in None.', caplog
|
2019-05-17 16:05:36 +00:00
|
|
|
)
|
2018-12-13 05:35:42 +00:00
|
|
|
_clean_test_file(file)
|
|
|
|
|
|
|
|
|
2019-10-27 09:00:44 +00:00
|
|
|
def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) -> None:
|
|
|
|
ltfmock = mocker.patch('freqtrade.data.history.load_tickerdata_file',
|
|
|
|
MagicMock(return_value=None))
|
|
|
|
timerange = TimeRange('date', None, 1510639620, 0)
|
2019-11-02 19:19:13 +00:00
|
|
|
history.load_pair_history(pair='UNITTEST/BTC', timeframe='1m',
|
2019-10-27 09:00:44 +00:00
|
|
|
datadir=testdatadir, timerange=timerange,
|
|
|
|
startup_candles=20,
|
|
|
|
)
|
2019-10-31 05:51:36 +00:00
|
|
|
|
2019-10-27 09:00:44 +00:00
|
|
|
assert ltfmock.call_count == 1
|
|
|
|
assert ltfmock.call_args_list[0][1]['timerange'] != timerange
|
|
|
|
# startts is 20 minutes earlier
|
|
|
|
assert ltfmock.call_args_list[0][1]['timerange'].startts == timerange.startts - 20 * 60
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog,
|
|
|
|
default_conf, testdatadir) -> None:
|
2018-12-13 05:35:42 +00:00
|
|
|
"""
|
2018-12-16 09:17:11 +00:00
|
|
|
Test load_pair_history() with 1 min ticker
|
2018-12-13 05:35:42 +00:00
|
|
|
"""
|
2019-08-14 08:14:54 +00:00
|
|
|
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ticker_history_list)
|
2018-12-13 05:35:42 +00:00
|
|
|
exchange = get_patched_exchange(mocker, default_conf)
|
2019-10-08 19:10:43 +00:00
|
|
|
file = testdatadir / 'MEME_BTC-1m.json'
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
_backup_file(file)
|
|
|
|
# do not download a new pair if refresh_pairs isn't set
|
2019-09-07 18:56:03 +00:00
|
|
|
history.load_pair_history(datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe='1m',
|
2018-12-16 09:17:11 +00:00
|
|
|
pair='MEME/BTC')
|
2019-10-08 19:10:43 +00:00
|
|
|
assert not file.is_file()
|
2019-04-22 18:24:45 +00:00
|
|
|
assert log_has(
|
2019-11-02 19:19:13 +00:00
|
|
|
'No history data for pair: "MEME/BTC", timeframe: 1m. '
|
2019-09-20 18:12:35 +00:00
|
|
|
'Use `freqtrade download-data` to download the data', caplog
|
2019-04-22 18:24:45 +00:00
|
|
|
)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
# download a new pair if refresh_pairs is set
|
2019-09-07 18:56:03 +00:00
|
|
|
history.load_pair_history(datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe='1m',
|
2018-12-16 09:17:11 +00:00
|
|
|
refresh_pairs=True,
|
|
|
|
exchange=exchange,
|
|
|
|
pair='MEME/BTC')
|
2019-10-08 19:10:43 +00:00
|
|
|
assert file.is_file()
|
2019-09-07 18:56:03 +00:00
|
|
|
assert log_has_re(
|
2019-11-02 19:19:13 +00:00
|
|
|
'Download history data for pair: "MEME/BTC", timeframe: 1m '
|
2019-09-07 18:56:03 +00:00
|
|
|
'and store in .*', caplog
|
2019-05-17 16:05:36 +00:00
|
|
|
)
|
2018-12-16 09:17:11 +00:00
|
|
|
with pytest.raises(OperationalException, match=r'Exchange needs to be initialized when.*'):
|
2019-09-07 18:56:03 +00:00
|
|
|
history.load_pair_history(datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe='1m',
|
2018-12-16 09:17:11 +00:00
|
|
|
refresh_pairs=True,
|
|
|
|
exchange=None,
|
|
|
|
pair='MEME/BTC')
|
2018-12-13 05:35:42 +00:00
|
|
|
_clean_test_file(file)
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_testdata_path(testdatadir) -> None:
|
2019-09-08 08:07:09 +00:00
|
|
|
assert str(Path('tests') / 'testdata') in str(testdatadir)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
|
2019-08-14 16:56:46 +00:00
|
|
|
def test_pair_data_filename():
|
|
|
|
fn = pair_data_filename(Path('freqtrade/hello/world'), 'ETH/BTC', '5m')
|
|
|
|
assert isinstance(fn, Path)
|
|
|
|
assert fn == Path('freqtrade/hello/world/ETH_BTC-5m.json')
|
|
|
|
|
|
|
|
|
2019-08-14 16:58:27 +00:00
|
|
|
def test_pair_trades_filename():
|
2019-08-15 14:49:54 +00:00
|
|
|
fn = pair_trades_filename(Path('freqtrade/hello/world'), 'ETH/BTC')
|
2019-08-14 16:58:27 +00:00
|
|
|
assert isinstance(fn, Path)
|
2019-08-16 08:26:20 +00:00
|
|
|
assert fn == Path('freqtrade/hello/world/ETH_BTC-trades.json.gz')
|
2019-08-14 16:58:27 +00:00
|
|
|
|
|
|
|
|
2018-12-13 05:35:42 +00:00
|
|
|
def test_load_cached_data_for_updating(mocker) -> None:
|
2018-12-15 13:10:45 +00:00
|
|
|
datadir = Path(__file__).parent.parent.joinpath('testdata')
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
test_data = None
|
2018-12-15 13:10:45 +00:00
|
|
|
test_filename = datadir.joinpath('UNITTEST_BTC-1m.json')
|
2018-12-13 05:35:42 +00:00
|
|
|
with open(test_filename, "rt") as file:
|
|
|
|
test_data = json.load(file)
|
|
|
|
|
|
|
|
# change now time to test 'line' cases
|
|
|
|
# now = last cached item + 1 hour
|
|
|
|
now_ts = test_data[-1][0] / 1000 + 60 * 60
|
|
|
|
mocker.patch('arrow.utcnow', return_value=arrow.get(now_ts))
|
|
|
|
|
|
|
|
# timeframe starts earlier than the cached data
|
|
|
|
# should fully update data
|
|
|
|
timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0)
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == []
|
|
|
|
assert start_ts == test_data[0][0] - 1000
|
|
|
|
|
|
|
|
# same with 'line' timeframe
|
|
|
|
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 120
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m',
|
|
|
|
TimeRange(None, 'line', 0, -num_lines))
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == []
|
|
|
|
assert start_ts < test_data[0][0] - 1
|
|
|
|
|
|
|
|
# timeframe starts in the center of the cached data
|
|
|
|
# should return the chached data w/o the last item
|
|
|
|
timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0)
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == test_data[:-1]
|
|
|
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
|
|
|
|
|
|
|
# same with 'line' timeframe
|
|
|
|
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 30
|
|
|
|
timerange = TimeRange(None, 'line', 0, -num_lines)
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == test_data[:-1]
|
|
|
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
|
|
|
|
|
|
|
# timeframe starts after the chached data
|
|
|
|
# should return the chached data w/o the last item
|
|
|
|
timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 1, 0)
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == test_data[:-1]
|
|
|
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
|
|
|
|
2019-08-15 18:28:32 +00:00
|
|
|
# Try loading last 30 lines.
|
2019-10-06 15:10:40 +00:00
|
|
|
# Not supported by _load_cached_data_for_updating, we always need to get the full data.
|
2018-12-13 05:35:42 +00:00
|
|
|
num_lines = 30
|
|
|
|
timerange = TimeRange(None, 'line', 0, -num_lines)
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == test_data[:-1]
|
|
|
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
|
|
|
|
|
|
|
# no timeframe is set
|
|
|
|
# should return the chached data w/o the last item
|
|
|
|
num_lines = 30
|
|
|
|
timerange = TimeRange(None, 'line', 0, -num_lines)
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == test_data[:-1]
|
|
|
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
|
|
|
|
|
|
|
# no datafile exist
|
|
|
|
# should return timestamp start time
|
|
|
|
timerange = TimeRange('date', None, now_ts - 10000, 0)
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == []
|
|
|
|
assert start_ts == (now_ts - 10000) * 1000
|
|
|
|
|
|
|
|
# same with 'line' timeframe
|
|
|
|
num_lines = 30
|
|
|
|
timerange = TimeRange(None, 'line', 0, -num_lines)
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == []
|
|
|
|
assert start_ts == (now_ts - num_lines * 60) * 1000
|
|
|
|
|
|
|
|
# no datafile exist, no timeframe is set
|
|
|
|
# should return an empty array and None
|
2019-10-06 15:10:40 +00:00
|
|
|
data, start_ts = _load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', None)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert data == []
|
|
|
|
assert start_ts is None
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_download_pair_history(ticker_history_list, mocker, default_conf, testdatadir) -> None:
|
2019-08-14 08:14:54 +00:00
|
|
|
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ticker_history_list)
|
2018-12-13 05:35:42 +00:00
|
|
|
exchange = get_patched_exchange(mocker, default_conf)
|
2019-10-08 19:10:43 +00:00
|
|
|
file1_1 = testdatadir / 'MEME_BTC-1m.json'
|
|
|
|
file1_5 = testdatadir / 'MEME_BTC-5m.json'
|
|
|
|
file2_1 = testdatadir / 'CFI_BTC-1m.json'
|
|
|
|
file2_5 = testdatadir / 'CFI_BTC-5m.json'
|
2018-12-16 09:30:13 +00:00
|
|
|
|
2018-12-13 05:35:42 +00:00
|
|
|
_backup_file(file1_1)
|
|
|
|
_backup_file(file1_5)
|
2018-12-16 09:30:13 +00:00
|
|
|
_backup_file(file2_1)
|
|
|
|
_backup_file(file2_5)
|
|
|
|
|
2019-10-08 19:10:43 +00:00
|
|
|
assert not file1_1.is_file()
|
|
|
|
assert not file2_1.is_file()
|
2018-12-16 09:30:13 +00:00
|
|
|
|
2019-12-16 18:57:03 +00:00
|
|
|
assert _download_pair_history(datadir=testdatadir, exchange=exchange,
|
|
|
|
pair='MEME/BTC',
|
|
|
|
timeframe='1m')
|
|
|
|
assert _download_pair_history(datadir=testdatadir, exchange=exchange,
|
|
|
|
pair='CFI/BTC',
|
|
|
|
timeframe='1m')
|
2018-12-16 09:30:13 +00:00
|
|
|
assert not exchange._pairs_last_refresh_time
|
2019-10-08 19:10:43 +00:00
|
|
|
assert file1_1.is_file()
|
|
|
|
assert file2_1.is_file()
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
# clean files freshly downloaded
|
|
|
|
_clean_test_file(file1_1)
|
2018-12-16 09:30:13 +00:00
|
|
|
_clean_test_file(file2_1)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
2019-10-08 19:10:43 +00:00
|
|
|
assert not file1_5.is_file()
|
|
|
|
assert not file2_5.is_file()
|
2018-12-13 05:35:42 +00:00
|
|
|
|
2019-12-16 18:57:03 +00:00
|
|
|
assert _download_pair_history(datadir=testdatadir, exchange=exchange,
|
|
|
|
pair='MEME/BTC',
|
|
|
|
timeframe='5m')
|
|
|
|
assert _download_pair_history(datadir=testdatadir, exchange=exchange,
|
|
|
|
pair='CFI/BTC',
|
|
|
|
timeframe='5m')
|
2018-12-13 05:35:42 +00:00
|
|
|
assert not exchange._pairs_last_refresh_time
|
2019-10-08 19:10:43 +00:00
|
|
|
assert file1_5.is_file()
|
|
|
|
assert file2_5.is_file()
|
2018-12-13 05:35:42 +00:00
|
|
|
|
2018-12-16 09:30:13 +00:00
|
|
|
# clean files freshly downloaded
|
|
|
|
_clean_test_file(file1_5)
|
|
|
|
_clean_test_file(file2_5)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
|
2018-12-13 05:35:42 +00:00
|
|
|
tick = [
|
|
|
|
[1509836520000, 0.00162008, 0.00162008, 0.00162008, 0.00162008, 108.14853839],
|
|
|
|
[1509836580000, 0.00161, 0.00161, 0.00161, 0.00161, 82.390199]
|
|
|
|
]
|
|
|
|
json_dump_mock = mocker.patch('freqtrade.misc.file_dump_json', return_value=None)
|
2019-08-14 08:14:54 +00:00
|
|
|
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=tick)
|
2018-12-13 05:35:42 +00:00
|
|
|
exchange = get_patched_exchange(mocker, default_conf)
|
2019-12-16 18:57:03 +00:00
|
|
|
_download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", timeframe='1m')
|
|
|
|
_download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", timeframe='3m')
|
2018-12-13 05:35:42 +00:00
|
|
|
assert json_dump_mock.call_count == 2
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_download_backtesting_data_exception(ticker_history, mocker, caplog,
|
|
|
|
default_conf, testdatadir) -> None:
|
2019-08-14 08:14:54 +00:00
|
|
|
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv',
|
2019-05-17 16:05:36 +00:00
|
|
|
side_effect=Exception('File Error'))
|
2018-12-16 09:30:13 +00:00
|
|
|
|
|
|
|
exchange = get_patched_exchange(mocker, default_conf)
|
|
|
|
|
2019-10-08 19:10:43 +00:00
|
|
|
file1_1 = testdatadir / 'MEME_BTC-1m.json'
|
|
|
|
file1_5 = testdatadir / 'MEME_BTC-5m.json'
|
2018-12-16 09:30:13 +00:00
|
|
|
_backup_file(file1_1)
|
|
|
|
_backup_file(file1_5)
|
|
|
|
|
2019-12-16 18:57:03 +00:00
|
|
|
assert not _download_pair_history(datadir=testdatadir, exchange=exchange,
|
|
|
|
pair='MEME/BTC',
|
|
|
|
timeframe='1m')
|
2018-12-16 09:30:13 +00:00
|
|
|
# clean files freshly downloaded
|
|
|
|
_clean_test_file(file1_1)
|
|
|
|
_clean_test_file(file1_5)
|
2019-05-17 16:05:36 +00:00
|
|
|
assert log_has(
|
2019-11-02 19:19:13 +00:00
|
|
|
'Failed to download history data for pair: "MEME/BTC", timeframe: 1m. '
|
2019-08-11 18:16:34 +00:00
|
|
|
'Error: File Error', caplog
|
2019-05-17 16:05:36 +00:00
|
|
|
)
|
2018-12-16 09:30:13 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_load_tickerdata_file(testdatadir) -> None:
|
2018-12-13 05:35:42 +00:00
|
|
|
# 7 does not exist in either format.
|
2019-09-07 18:56:03 +00:00
|
|
|
assert not load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '7m')
|
2018-12-13 05:35:42 +00:00
|
|
|
# 1 exists only as a .json
|
2019-09-07 18:56:03 +00:00
|
|
|
tickerdata = load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '1m')
|
2018-12-13 05:35:42 +00:00
|
|
|
assert _BTC_UNITTEST_LENGTH == len(tickerdata)
|
|
|
|
# 8 .json is empty and will fail if it's loaded. .json.gz is a copy of 1.json
|
2019-09-07 18:56:03 +00:00
|
|
|
tickerdata = load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '8m')
|
2018-12-13 05:35:42 +00:00
|
|
|
assert _BTC_UNITTEST_LENGTH == len(tickerdata)
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_load_partial_missing(testdatadir, caplog) -> None:
|
2018-12-13 05:35:42 +00:00
|
|
|
# Make sure we start fresh - test missing data at start
|
|
|
|
start = arrow.get('2018-01-01T00:00:00')
|
|
|
|
end = arrow.get('2018-01-11T00:00:00')
|
2019-09-07 18:56:03 +00:00
|
|
|
tickerdata = history.load_data(testdatadir, '5m', ['UNITTEST/BTC'],
|
2019-10-31 05:51:36 +00:00
|
|
|
startup_candles=20,
|
2018-12-13 05:35:42 +00:00
|
|
|
timerange=TimeRange('date', 'date',
|
|
|
|
start.timestamp, end.timestamp))
|
2019-10-31 05:51:36 +00:00
|
|
|
assert log_has(
|
|
|
|
'Using indicator startup period: 20 ...', caplog
|
|
|
|
)
|
2018-12-13 05:35:42 +00:00
|
|
|
# timedifference in 5 minutes
|
|
|
|
td = ((end - start).total_seconds() // 60 // 5) + 1
|
|
|
|
assert td != len(tickerdata['UNITTEST/BTC'])
|
2018-12-15 13:42:21 +00:00
|
|
|
start_real = tickerdata['UNITTEST/BTC'].iloc[0, 0]
|
2018-12-13 05:35:42 +00:00
|
|
|
assert log_has(f'Missing data at start for pair '
|
|
|
|
f'UNITTEST/BTC, data starts at {start_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
2019-08-11 18:16:34 +00:00
|
|
|
caplog)
|
2018-12-13 05:35:42 +00:00
|
|
|
# Make sure we start fresh - test missing data at end
|
|
|
|
caplog.clear()
|
|
|
|
start = arrow.get('2018-01-10T00:00:00')
|
|
|
|
end = arrow.get('2018-02-20T00:00:00')
|
2019-11-02 19:19:13 +00:00
|
|
|
tickerdata = history.load_data(datadir=testdatadir, timeframe='5m',
|
2019-09-20 18:22:51 +00:00
|
|
|
pairs=['UNITTEST/BTC'],
|
2018-12-13 05:35:42 +00:00
|
|
|
timerange=TimeRange('date', 'date',
|
|
|
|
start.timestamp, end.timestamp))
|
|
|
|
# timedifference in 5 minutes
|
|
|
|
td = ((end - start).total_seconds() // 60 // 5) + 1
|
|
|
|
assert td != len(tickerdata['UNITTEST/BTC'])
|
2018-12-15 13:42:21 +00:00
|
|
|
# Shift endtime with +5 - as last candle is dropped (partial candle)
|
|
|
|
end_real = arrow.get(tickerdata['UNITTEST/BTC'].iloc[-1, 0]).shift(minutes=5)
|
2018-12-13 05:35:42 +00:00
|
|
|
assert log_has(f'Missing data at end for pair '
|
|
|
|
f'UNITTEST/BTC, data ends at {end_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
2019-08-11 18:16:34 +00:00
|
|
|
caplog)
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_init(default_conf, mocker) -> None:
|
|
|
|
exchange = get_patched_exchange(mocker, default_conf)
|
|
|
|
assert {} == history.load_data(
|
2018-12-15 19:14:13 +00:00
|
|
|
datadir='',
|
2018-12-13 05:35:42 +00:00
|
|
|
exchange=exchange,
|
|
|
|
pairs=[],
|
|
|
|
refresh_pairs=True,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe=default_conf['ticker_interval']
|
2018-12-13 05:35:42 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-10-08 19:10:43 +00:00
|
|
|
def test_trim_tickerlist(testdatadir) -> None:
|
|
|
|
file = testdatadir / 'UNITTEST_BTC-1m.json'
|
2018-12-13 05:35:42 +00:00
|
|
|
with open(file) as data_file:
|
|
|
|
ticker_list = json.load(data_file)
|
|
|
|
ticker_list_len = len(ticker_list)
|
|
|
|
|
|
|
|
# Test the pattern ^(\d{8})-(\d{8})$
|
|
|
|
# This pattern extract a window between the dates
|
|
|
|
timerange = TimeRange('date', 'date', ticker_list[5][0] / 1000, ticker_list[10][0] / 1000 - 1)
|
|
|
|
ticker = trim_tickerlist(ticker_list, timerange)
|
|
|
|
ticker_len = len(ticker)
|
|
|
|
|
|
|
|
assert ticker_len == 5
|
|
|
|
assert ticker_list[0] is not ticker[0] # The first element should be different
|
|
|
|
assert ticker_list[5] is ticker[0] # The list starts at the index 5
|
|
|
|
assert ticker_list[9] is ticker[-1] # The list ends at the index 9 (5 elements)
|
|
|
|
|
|
|
|
# Test the pattern ^-(\d{8})$
|
|
|
|
# This pattern extracts elements from the start to the date
|
|
|
|
timerange = TimeRange(None, 'date', 0, ticker_list[10][0] / 1000 - 1)
|
|
|
|
ticker = trim_tickerlist(ticker_list, timerange)
|
|
|
|
ticker_len = len(ticker)
|
|
|
|
|
|
|
|
assert ticker_len == 10
|
|
|
|
assert ticker_list[0] is ticker[0] # The start of the list is included
|
|
|
|
assert ticker_list[9] is ticker[-1] # The element 10 is not included
|
|
|
|
|
|
|
|
# Test the pattern ^(\d{8})-$
|
|
|
|
# This pattern extracts elements from the date to now
|
|
|
|
timerange = TimeRange('date', None, ticker_list[10][0] / 1000 - 1, None)
|
|
|
|
ticker = trim_tickerlist(ticker_list, timerange)
|
|
|
|
ticker_len = len(ticker)
|
|
|
|
|
|
|
|
assert ticker_len == ticker_list_len - 10
|
|
|
|
assert ticker_list[10] is ticker[0] # The first element is element #10
|
|
|
|
assert ticker_list[-1] is ticker[-1] # The last element is the same
|
|
|
|
|
|
|
|
# Test a wrong pattern
|
|
|
|
# This pattern must return the list unchanged
|
|
|
|
timerange = TimeRange(None, None, None, 5)
|
|
|
|
ticker = trim_tickerlist(ticker_list, timerange)
|
|
|
|
ticker_len = len(ticker)
|
|
|
|
|
|
|
|
assert ticker_list_len == ticker_len
|
|
|
|
|
2018-12-16 09:19:49 +00:00
|
|
|
# passing empty list
|
|
|
|
timerange = TimeRange(None, None, None, 5)
|
|
|
|
ticker = trim_tickerlist([], timerange)
|
|
|
|
assert 0 == len(ticker)
|
|
|
|
assert not ticker
|
|
|
|
|
2018-12-13 05:35:42 +00:00
|
|
|
|
2019-10-23 17:54:43 +00:00
|
|
|
def test_trim_dataframe(testdatadir) -> None:
|
|
|
|
data = history.load_data(
|
|
|
|
datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe='1m',
|
2019-10-23 17:54:43 +00:00
|
|
|
pairs=['UNITTEST/BTC']
|
|
|
|
)['UNITTEST/BTC']
|
|
|
|
min_date = int(data.iloc[0]['date'].timestamp())
|
|
|
|
max_date = int(data.iloc[-1]['date'].timestamp())
|
|
|
|
data_modify = data.copy()
|
|
|
|
|
|
|
|
# Remove first 30 minutes (1800 s)
|
|
|
|
tr = TimeRange('date', None, min_date + 1800, 0)
|
|
|
|
data_modify = history.trim_dataframe(data_modify, tr)
|
|
|
|
assert not data_modify.equals(data)
|
|
|
|
assert len(data_modify) < len(data)
|
|
|
|
assert len(data_modify) == len(data) - 30
|
|
|
|
assert all(data_modify.iloc[-1] == data.iloc[-1])
|
|
|
|
assert all(data_modify.iloc[0] == data.iloc[30])
|
|
|
|
|
|
|
|
data_modify = data.copy()
|
|
|
|
# Remove last 30 minutes (1800 s)
|
|
|
|
tr = TimeRange(None, 'date', 0, max_date - 1800)
|
|
|
|
data_modify = history.trim_dataframe(data_modify, tr)
|
|
|
|
assert not data_modify.equals(data)
|
|
|
|
assert len(data_modify) < len(data)
|
|
|
|
assert len(data_modify) == len(data) - 30
|
|
|
|
assert all(data_modify.iloc[0] == data.iloc[0])
|
|
|
|
assert all(data_modify.iloc[-1] == data.iloc[-31])
|
|
|
|
|
|
|
|
data_modify = data.copy()
|
|
|
|
# Remove first 25 and last 30 minutes (1800 s)
|
|
|
|
tr = TimeRange('date', 'date', min_date + 1500, max_date - 1800)
|
|
|
|
data_modify = history.trim_dataframe(data_modify, tr)
|
|
|
|
assert not data_modify.equals(data)
|
|
|
|
assert len(data_modify) < len(data)
|
|
|
|
assert len(data_modify) == len(data) - 55
|
|
|
|
# first row matches 25th original row
|
|
|
|
assert all(data_modify.iloc[0] == data.iloc[25])
|
|
|
|
|
|
|
|
|
2019-10-08 19:10:43 +00:00
|
|
|
def test_file_dump_json_tofile(testdatadir) -> None:
|
|
|
|
file = testdatadir / 'test_{id}.json'.format(id=str(uuid.uuid4()))
|
2018-12-13 05:35:42 +00:00
|
|
|
data = {'bar': 'foo'}
|
|
|
|
|
|
|
|
# check the file we will create does not exist
|
2019-10-08 19:10:43 +00:00
|
|
|
assert not file.is_file()
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
# Create the Json file
|
|
|
|
file_dump_json(file, data)
|
|
|
|
|
|
|
|
# Check the file was create
|
2019-10-08 19:10:43 +00:00
|
|
|
assert file.is_file()
|
2018-12-13 05:35:42 +00:00
|
|
|
|
|
|
|
# Open the Json file created and test the data is in it
|
2019-10-08 19:10:43 +00:00
|
|
|
with file.open() as data_file:
|
2018-12-13 05:35:42 +00:00
|
|
|
json_from_file = json.load(data_file)
|
|
|
|
|
|
|
|
assert 'bar' in json_from_file
|
|
|
|
assert json_from_file['bar'] == 'foo'
|
|
|
|
|
|
|
|
# Remove the file
|
|
|
|
_clean_test_file(file)
|
2019-05-25 18:25:59 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_get_timeframe(default_conf, mocker, testdatadir) -> None:
|
2019-05-25 18:25:59 +00:00
|
|
|
patch_exchange(mocker)
|
|
|
|
strategy = DefaultStrategy(default_conf)
|
|
|
|
|
|
|
|
data = strategy.tickerdata_to_dataframe(
|
|
|
|
history.load_data(
|
2019-09-07 18:56:03 +00:00
|
|
|
datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe='1m',
|
2019-05-25 18:25:59 +00:00
|
|
|
pairs=['UNITTEST/BTC']
|
|
|
|
)
|
|
|
|
)
|
|
|
|
min_date, max_date = history.get_timeframe(data)
|
|
|
|
assert min_date.isoformat() == '2017-11-04T23:02:00+00:00'
|
|
|
|
assert max_date.isoformat() == '2017-11-14T22:58:00+00:00'
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_validate_backtest_data_warn(default_conf, mocker, caplog, testdatadir) -> None:
|
2019-05-25 18:25:59 +00:00
|
|
|
patch_exchange(mocker)
|
|
|
|
strategy = DefaultStrategy(default_conf)
|
|
|
|
|
|
|
|
data = strategy.tickerdata_to_dataframe(
|
|
|
|
history.load_data(
|
2019-09-07 18:56:03 +00:00
|
|
|
datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe='1m',
|
2019-05-25 18:25:59 +00:00
|
|
|
pairs=['UNITTEST/BTC'],
|
|
|
|
fill_up_missing=False
|
|
|
|
)
|
|
|
|
)
|
|
|
|
min_date, max_date = history.get_timeframe(data)
|
|
|
|
caplog.clear()
|
2019-06-15 11:32:05 +00:00
|
|
|
assert history.validate_backtest_data(data['UNITTEST/BTC'], 'UNITTEST/BTC',
|
|
|
|
min_date, max_date, timeframe_to_minutes('1m'))
|
2019-05-25 18:25:59 +00:00
|
|
|
assert len(caplog.record_tuples) == 1
|
|
|
|
assert log_has(
|
|
|
|
"UNITTEST/BTC has missing frames: expected 14396, got 13680, that's 716 missing values",
|
2019-08-11 18:16:34 +00:00
|
|
|
caplog)
|
2019-05-25 18:25:59 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_validate_backtest_data(default_conf, mocker, caplog, testdatadir) -> None:
|
2019-05-25 18:25:59 +00:00
|
|
|
patch_exchange(mocker)
|
|
|
|
strategy = DefaultStrategy(default_conf)
|
|
|
|
|
|
|
|
timerange = TimeRange('index', 'index', 200, 250)
|
|
|
|
data = strategy.tickerdata_to_dataframe(
|
|
|
|
history.load_data(
|
2019-09-07 18:56:03 +00:00
|
|
|
datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe='5m',
|
2019-05-25 18:25:59 +00:00
|
|
|
pairs=['UNITTEST/BTC'],
|
|
|
|
timerange=timerange
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
min_date, max_date = history.get_timeframe(data)
|
|
|
|
caplog.clear()
|
2019-06-15 11:32:05 +00:00
|
|
|
assert not history.validate_backtest_data(data['UNITTEST/BTC'], 'UNITTEST/BTC',
|
|
|
|
min_date, max_date, timeframe_to_minutes('5m'))
|
2019-05-25 18:25:59 +00:00
|
|
|
assert len(caplog.record_tuples) == 0
|
2019-08-25 13:02:40 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_refresh_backtest_ohlcv_data(mocker, default_conf, markets, caplog, testdatadir):
|
2019-12-16 18:57:03 +00:00
|
|
|
dl_mock = mocker.patch('freqtrade.data.history._download_pair_history', MagicMock())
|
2019-08-25 13:02:40 +00:00
|
|
|
mocker.patch(
|
|
|
|
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value=markets)
|
|
|
|
)
|
|
|
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
|
|
|
mocker.patch.object(Path, "unlink", MagicMock())
|
|
|
|
|
|
|
|
ex = get_patched_exchange(mocker, default_conf)
|
|
|
|
timerange = TimeRange.parse_timerange("20190101-20190102")
|
|
|
|
refresh_backtest_ohlcv_data(exchange=ex, pairs=["ETH/BTC", "XRP/BTC"],
|
2019-12-16 18:43:33 +00:00
|
|
|
timeframes=["1m", "5m"], datadir=testdatadir,
|
2019-08-25 13:02:40 +00:00
|
|
|
timerange=timerange, erase=True
|
|
|
|
)
|
|
|
|
|
|
|
|
assert dl_mock.call_count == 4
|
|
|
|
assert dl_mock.call_args[1]['timerange'].starttype == 'date'
|
|
|
|
|
|
|
|
assert log_has("Downloading pair ETH/BTC, interval 1m.", caplog)
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_download_data_no_markets(mocker, default_conf, caplog, testdatadir):
|
2019-12-16 18:57:03 +00:00
|
|
|
dl_mock = mocker.patch('freqtrade.data.history._download_pair_history', MagicMock())
|
2019-10-26 11:24:26 +00:00
|
|
|
|
|
|
|
ex = get_patched_exchange(mocker, default_conf)
|
2019-08-25 13:02:40 +00:00
|
|
|
mocker.patch(
|
|
|
|
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value={})
|
|
|
|
)
|
|
|
|
timerange = TimeRange.parse_timerange("20190101-20190102")
|
2019-10-26 11:24:26 +00:00
|
|
|
unav_pairs = refresh_backtest_ohlcv_data(exchange=ex, pairs=["BTT/BTC", "LTC/USDT"],
|
2019-08-25 13:02:40 +00:00
|
|
|
timeframes=["1m", "5m"],
|
2019-12-16 18:43:33 +00:00
|
|
|
datadir=testdatadir,
|
2019-08-25 13:02:40 +00:00
|
|
|
timerange=timerange, erase=False
|
|
|
|
)
|
|
|
|
|
|
|
|
assert dl_mock.call_count == 0
|
2019-10-26 11:24:26 +00:00
|
|
|
assert "BTT/BTC" in unav_pairs
|
|
|
|
assert "LTC/USDT" in unav_pairs
|
|
|
|
assert log_has("Skipping pair BTT/BTC...", caplog)
|
2019-10-08 18:45:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_refresh_backtest_trades_data(mocker, default_conf, markets, caplog, testdatadir):
|
2019-12-16 18:57:03 +00:00
|
|
|
dl_mock = mocker.patch('freqtrade.data.history._download_trades_history', MagicMock())
|
2019-10-08 18:45:35 +00:00
|
|
|
mocker.patch(
|
|
|
|
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value=markets)
|
|
|
|
)
|
|
|
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
|
|
|
mocker.patch.object(Path, "unlink", MagicMock())
|
|
|
|
|
|
|
|
ex = get_patched_exchange(mocker, default_conf)
|
|
|
|
timerange = TimeRange.parse_timerange("20190101-20190102")
|
|
|
|
unavailable_pairs = refresh_backtest_trades_data(exchange=ex,
|
|
|
|
pairs=["ETH/BTC", "XRP/BTC", "XRP/ETH"],
|
|
|
|
datadir=testdatadir,
|
|
|
|
timerange=timerange, erase=True
|
|
|
|
)
|
|
|
|
|
|
|
|
assert dl_mock.call_count == 2
|
|
|
|
assert dl_mock.call_args[1]['timerange'].starttype == 'date'
|
|
|
|
|
|
|
|
assert log_has("Downloading trades for pair ETH/BTC.", caplog)
|
|
|
|
assert unavailable_pairs == ["XRP/ETH"]
|
|
|
|
assert log_has("Skipping pair XRP/ETH...", caplog)
|
2019-10-08 19:18:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_download_trades_history(trades_history, mocker, default_conf, testdatadir, caplog) -> None:
|
|
|
|
|
|
|
|
ght_mock = MagicMock(side_effect=lambda pair, *args, **kwargs: (pair, trades_history))
|
|
|
|
mocker.patch('freqtrade.exchange.Exchange.get_historic_trades',
|
|
|
|
ght_mock)
|
|
|
|
exchange = get_patched_exchange(mocker, default_conf)
|
|
|
|
file1 = testdatadir / 'ETH_BTC-trades.json.gz'
|
|
|
|
|
|
|
|
_backup_file(file1)
|
|
|
|
|
|
|
|
assert not file1.is_file()
|
|
|
|
|
2019-12-16 18:57:03 +00:00
|
|
|
assert _download_trades_history(datadir=testdatadir, exchange=exchange,
|
|
|
|
pair='ETH/BTC')
|
2019-10-08 19:18:56 +00:00
|
|
|
assert log_has("New Amount of trades: 5", caplog)
|
|
|
|
assert file1.is_file()
|
|
|
|
|
|
|
|
# clean files freshly downloaded
|
|
|
|
_clean_test_file(file1)
|
|
|
|
|
|
|
|
mocker.patch('freqtrade.exchange.Exchange.get_historic_trades',
|
|
|
|
MagicMock(side_effect=ValueError))
|
|
|
|
|
2019-12-16 18:57:03 +00:00
|
|
|
assert not _download_trades_history(datadir=testdatadir, exchange=exchange,
|
|
|
|
pair='ETH/BTC')
|
2019-10-08 19:18:56 +00:00
|
|
|
assert log_has_re('Failed to download historic trades for pair: "ETH/BTC".*', caplog)
|
|
|
|
|
|
|
|
|
2019-10-13 14:04:40 +00:00
|
|
|
def test_convert_trades_to_ohlcv(mocker, default_conf, testdatadir, caplog):
|
2019-10-13 17:21:27 +00:00
|
|
|
|
2019-10-13 14:04:40 +00:00
|
|
|
pair = 'XRP/ETH'
|
|
|
|
file1 = testdatadir / 'XRP_ETH-1m.json'
|
|
|
|
file5 = testdatadir / 'XRP_ETH-5m.json'
|
|
|
|
# Compare downloaded dataset with converted dataset
|
|
|
|
dfbak_1m = history.load_pair_history(datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe="1m",
|
2019-10-13 14:04:40 +00:00
|
|
|
pair=pair)
|
|
|
|
dfbak_5m = history.load_pair_history(datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe="5m",
|
2019-10-13 14:04:40 +00:00
|
|
|
pair=pair)
|
|
|
|
|
2019-10-13 17:21:27 +00:00
|
|
|
_backup_file(file1, copy_file=True)
|
2019-10-13 14:04:40 +00:00
|
|
|
_backup_file(file5)
|
|
|
|
|
|
|
|
tr = TimeRange.parse_timerange('20191011-20191012')
|
|
|
|
|
2019-10-13 17:21:27 +00:00
|
|
|
convert_trades_to_ohlcv([pair], timeframes=['1m', '5m'],
|
2019-10-13 14:04:40 +00:00
|
|
|
datadir=testdatadir, timerange=tr, erase=True)
|
|
|
|
|
2019-10-13 17:21:27 +00:00
|
|
|
assert log_has("Deleting existing data for pair XRP/ETH, interval 1m.", caplog)
|
2019-10-13 14:04:40 +00:00
|
|
|
# Load new data
|
|
|
|
df_1m = history.load_pair_history(datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe="1m",
|
2019-10-13 14:04:40 +00:00
|
|
|
pair=pair)
|
|
|
|
df_5m = history.load_pair_history(datadir=testdatadir,
|
2019-11-02 19:19:13 +00:00
|
|
|
timeframe="5m",
|
2019-10-13 14:04:40 +00:00
|
|
|
pair=pair)
|
|
|
|
|
|
|
|
assert df_1m.equals(dfbak_1m)
|
|
|
|
assert df_5m.equals(dfbak_5m)
|
|
|
|
|
|
|
|
_clean_test_file(file1)
|
|
|
|
_clean_test_file(file5)
|