Fix review comments. Add support of datetime timeganges
This commit is contained in:
parent
2fe7812e20
commit
82ea56c8fd
@ -80,12 +80,9 @@ The full timerange specification:
|
||||
- Use last 123 tickframes of data: `--timerange=-123`
|
||||
- Use first 123 tickframes of data: `--timerange=123-`
|
||||
- Use tickframes from line 123 through 456: `--timerange=123-456`
|
||||
|
||||
|
||||
Incoming feature, not implemented yet:
|
||||
- `--timerange=-20180131`
|
||||
- `--timerange=20180101-`
|
||||
- `--timerange=20180101-20181231`
|
||||
- Use tickframes till 2018/01/31: `--timerange=-20180131`
|
||||
- Use tickframes since 2018/01/31: `--timerange=20180131-`
|
||||
- Use tickframes since 2018/01/31 till 2018/03/01 : `--timerange=20180131-20180301`
|
||||
|
||||
|
||||
**Update testdata directory**
|
||||
|
@ -6,6 +6,7 @@ import argparse
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import arrow
|
||||
from typing import List, Tuple, Optional
|
||||
|
||||
from freqtrade import __version__
|
||||
@ -228,12 +229,16 @@ class Arguments(object):
|
||||
stop = None
|
||||
if stype[0]:
|
||||
start = rvals[index]
|
||||
if stype[0] != 'date':
|
||||
if stype[0] == 'date':
|
||||
start = arrow.get(start, 'YYYYMMDD').timestamp
|
||||
else:
|
||||
start = int(start)
|
||||
index += 1
|
||||
if stype[1]:
|
||||
stop = rvals[index]
|
||||
if stype[1] != 'date':
|
||||
if stype[1] == 'date':
|
||||
stop = arrow.get(stop, 'YYYYMMDD').timestamp
|
||||
else:
|
||||
stop = int(stop)
|
||||
return stype, start, stop
|
||||
raise Exception('Incorrect syntax for timerange "%s"' % text)
|
||||
|
@ -271,7 +271,26 @@ def get_ticker(pair: str, refresh: Optional[bool] = True) -> dict:
|
||||
@retrier
|
||||
def get_ticker_history(pair: str, tick_interval: str, since: Optional[int] = None) -> List[Dict]:
|
||||
try:
|
||||
return _API.fetch_ohlcv(pair, timeframe=tick_interval, since=since)
|
||||
# download data until it reaches today now time
|
||||
#
|
||||
# it looks as if some exchanges return cached data
|
||||
# and update it with some delay so 10 mins interval is added
|
||||
data = []
|
||||
while not since or since < arrow.utcnow().shift(minutes=-10).timestamp * 1000:
|
||||
data_part = _API.fetch_ohlcv(pair, timeframe=tick_interval, since=since)
|
||||
|
||||
if not data_part:
|
||||
break
|
||||
|
||||
logger.info('Downloaded data for time range [%s, %s]',
|
||||
arrow.get(data_part[0][0] / 1000).format(),
|
||||
arrow.get(data_part[-1][0] / 1000).format())
|
||||
|
||||
data.extend(data_part)
|
||||
since = data[-1][0] + 1
|
||||
|
||||
return data
|
||||
|
||||
except ccxt.NetworkError as e:
|
||||
raise NetworkException(
|
||||
'Could not load ticker history due to networking error. Message: {}'.format(e)
|
||||
|
@ -4,10 +4,13 @@ import gzip
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import arrow
|
||||
from typing import Optional, List, Dict, Tuple
|
||||
|
||||
from freqtrade import misc
|
||||
from freqtrade.exchange import get_ticker_history
|
||||
from freqtrade.constants import Constants
|
||||
|
||||
from user_data.hyperopt_conf import hyperopt_optimize_conf
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -15,14 +18,30 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def trim_tickerlist(tickerlist: List[Dict], timerange: Tuple[Tuple, int, int]) -> List[Dict]:
|
||||
stype, start, stop = timerange
|
||||
if stype == (None, 'line'):
|
||||
return tickerlist[stop:]
|
||||
elif stype == ('line', None):
|
||||
return tickerlist[0:start]
|
||||
elif stype == ('index', 'index'):
|
||||
return tickerlist[start:stop]
|
||||
|
||||
return tickerlist
|
||||
start_index = 0
|
||||
stop_index = len(tickerlist)
|
||||
|
||||
if stype[0] == 'line':
|
||||
stop_index = start
|
||||
if stype[0] == 'index':
|
||||
start_index = start
|
||||
elif stype[0] == 'date':
|
||||
while tickerlist[start_index][0] < start * 1000:
|
||||
start_index += 1
|
||||
|
||||
if stype[1] == 'line':
|
||||
start_index = len(tickerlist) + stop
|
||||
if stype[1] == 'index':
|
||||
stop_index = stop
|
||||
elif stype[1] == 'date':
|
||||
while tickerlist[stop_index-1][0] > stop * 1000:
|
||||
stop_index -= 1
|
||||
|
||||
if start_index > stop_index:
|
||||
raise ValueError(f'The timerange [{start},{stop}] is incorrect')
|
||||
|
||||
return tickerlist[start_index:stop_index]
|
||||
|
||||
|
||||
def load_tickerdata_file(
|
||||
@ -75,7 +94,7 @@ def load_data(datadir: str,
|
||||
# If the user force the refresh of pairs
|
||||
if refresh_pairs:
|
||||
logger.info('Download data for all pairs and store them in %s', datadir)
|
||||
download_pairs(datadir, _pairs, ticker_interval)
|
||||
download_pairs(datadir, _pairs, ticker_interval, timerange=timerange)
|
||||
|
||||
for pair in _pairs:
|
||||
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)
|
||||
@ -97,11 +116,13 @@ def make_testdata_path(datadir: str) -> str:
|
||||
)
|
||||
|
||||
|
||||
def download_pairs(datadir, pairs: List[str], ticker_interval: str) -> bool:
|
||||
def download_pairs(datadir, pairs: List[str],
|
||||
ticker_interval: str,
|
||||
timerange: Optional[Tuple[Tuple, int, int]] = None) -> bool:
|
||||
"""For each pairs passed in parameters, download the ticker intervals"""
|
||||
for pair in pairs:
|
||||
try:
|
||||
download_backtesting_testdata(datadir, pair=pair, interval=ticker_interval)
|
||||
download_backtesting_testdata(datadir, pair=pair, interval=ticker_interval, timerange=timerange)
|
||||
except BaseException:
|
||||
logger.info(
|
||||
'Failed to download the pair: "%s", Interval: %s',
|
||||
@ -112,12 +133,31 @@ def download_pairs(datadir, pairs: List[str], ticker_interval: str) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def get_start_ts_from_timerange(timerange: Tuple[Tuple, int, int], interval: str) -> int:
|
||||
if not timerange:
|
||||
return None
|
||||
|
||||
if timerange[0][0] == 'date':
|
||||
return timerange[1] * 1000
|
||||
|
||||
if timerange[0][1] == 'line':
|
||||
num_minutes = timerange[2] * Constants.TICKER_INTERVAL_MINUTES[interval]
|
||||
return arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# FIX: 20180110, suggest rename interval to tick_interval
|
||||
def download_backtesting_testdata(datadir: str, pair: str, interval: str = '5m') -> bool:
|
||||
def download_backtesting_testdata(datadir: str,
|
||||
pair: str,
|
||||
interval: str = '5m',
|
||||
timerange: Optional[Tuple[Tuple, int, int]] = None) -> bool:
|
||||
"""
|
||||
Download the latest 1 and 5 ticker intervals from Bittrex for the pairs passed in parameters
|
||||
Download the latest ticker intervals from the exchange for the pairs passed in parameters
|
||||
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
|
||||
:param pairs: list of pairs to download
|
||||
:param interval: ticker interval
|
||||
:param timerange: range of time to download
|
||||
:return: bool
|
||||
"""
|
||||
|
||||
@ -134,23 +174,33 @@ def download_backtesting_testdata(datadir: str, pair: str, interval: str = '5m')
|
||||
interval=interval,
|
||||
))
|
||||
|
||||
since = get_start_ts_from_timerange(timerange, interval)
|
||||
|
||||
if os.path.isfile(filename):
|
||||
with open(filename, "rt") as file:
|
||||
data = json.load(file)
|
||||
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]))
|
||||
logger.debug("Current End: %s", misc.format_ms_time(data[-1:][0][0]))
|
||||
|
||||
if since:
|
||||
if since < data[0][0]:
|
||||
# fully update the data
|
||||
data = []
|
||||
else:
|
||||
# download unexist data only
|
||||
since = max(since, data[-1][0] + 1)
|
||||
else:
|
||||
# download unexist data only
|
||||
since = data[-1][0] + 1
|
||||
else:
|
||||
data = []
|
||||
logger.debug("Current Start: None")
|
||||
logger.debug("Current End: None")
|
||||
|
||||
new_data = get_ticker_history(pair=pair, tick_interval=interval)
|
||||
for row in new_data:
|
||||
if row not in data:
|
||||
data.append(row)
|
||||
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
|
||||
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
|
||||
|
||||
new_data = get_ticker_history(pair=pair, tick_interval=interval, since=since)
|
||||
data.extend(new_data)
|
||||
|
||||
logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
|
||||
logger.debug("New End: %s", misc.format_ms_time(data[-1:][0][0]))
|
||||
data = sorted(data, key=lambda data: data[0])
|
||||
logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
|
||||
|
||||
misc.file_dump_json(filename, data)
|
||||
|
||||
|
@ -321,6 +321,15 @@ def test_get_ticker(default_conf, mocker):
|
||||
get_ticker(pair='ETH/BTC', refresh=True)
|
||||
|
||||
|
||||
def make_fetch_ohlcv_mock(data):
|
||||
def fetch_ohlcv_mock(pair, timeframe, since):
|
||||
if since:
|
||||
assert since > data[-1][0]
|
||||
return []
|
||||
return data
|
||||
return fetch_ohlcv_mock
|
||||
|
||||
|
||||
def test_get_ticker_history(default_conf, mocker):
|
||||
api_mock = MagicMock()
|
||||
tick = [
|
||||
@ -334,7 +343,7 @@ def test_get_ticker_history(default_conf, mocker):
|
||||
]
|
||||
]
|
||||
type(api_mock).has = PropertyMock(return_value={'fetchOHLCV': True})
|
||||
api_mock.fetch_ohlcv = MagicMock(return_value=tick)
|
||||
api_mock.fetch_ohlcv = MagicMock(side_effect=make_fetch_ohlcv_mock(tick))
|
||||
mocker.patch('freqtrade.exchange._API', api_mock)
|
||||
|
||||
# retrieve original ticker
|
||||
@ -357,7 +366,7 @@ def test_get_ticker_history(default_conf, mocker):
|
||||
10, # volume (in quote currency)
|
||||
]
|
||||
]
|
||||
api_mock.fetch_ohlcv = MagicMock(return_value=new_tick)
|
||||
api_mock.fetch_ohlcv = MagicMock(side_effect=make_fetch_ohlcv_mock(new_tick))
|
||||
mocker.patch('freqtrade.exchange._API', api_mock)
|
||||
|
||||
ticks = get_ticker_history('ETH/BTC', default_conf['ticker_interval'])
|
||||
|
@ -3,12 +3,14 @@
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
import arrow
|
||||
from shutil import copyfile
|
||||
|
||||
from freqtrade import optimize
|
||||
from freqtrade.misc import file_dump_json
|
||||
from freqtrade.optimize.__init__ import make_testdata_path, download_pairs, \
|
||||
download_backtesting_testdata, load_tickerdata_file, trim_tickerlist
|
||||
download_backtesting_testdata, load_tickerdata_file, trim_tickerlist, \
|
||||
get_start_ts_from_timerange
|
||||
from freqtrade.tests.conftest import log_has
|
||||
|
||||
# Change this if modifying UNITTEST/BTC testdatafile
|
||||
@ -145,6 +147,28 @@ def test_download_pairs(ticker_history, mocker) -> None:
|
||||
_clean_test_file(file2_5)
|
||||
|
||||
|
||||
def test_get_start_ts_from_timerange(mocker) -> None:
|
||||
start = get_start_ts_from_timerange(None, '1m')
|
||||
assert start is None
|
||||
|
||||
# check 'date'
|
||||
start = get_start_ts_from_timerange((('date', 'date'), 1000, 2000), '1m')
|
||||
assert start == 1000 * 1000
|
||||
|
||||
start = get_start_ts_from_timerange((('date', 'date'), 1000, 2000), '5m')
|
||||
assert start == 1000 * 1000
|
||||
|
||||
# check 'line'
|
||||
mock_now = arrow.get(1367900664)
|
||||
mocker.patch('arrow.utcnow', return_value=mock_now)
|
||||
|
||||
start = get_start_ts_from_timerange(((None, 'line'), None, -200), '1m')
|
||||
assert start == (1367900664 - 200 * 60) * 1000
|
||||
|
||||
start = get_start_ts_from_timerange(((None, 'line'), None, -200), '5m')
|
||||
assert start == (1367900664 - 5 * 200 * 60) * 1000
|
||||
|
||||
|
||||
def test_download_pairs_exception(ticker_history, mocker, caplog) -> None:
|
||||
mocker.patch('freqtrade.optimize.__init__.get_ticker_history', return_value=ticker_history)
|
||||
mocker.patch('freqtrade.optimize.__init__.download_backtesting_testdata',
|
||||
@ -221,12 +245,12 @@ def test_trim_tickerlist() -> None:
|
||||
ticker_list_len = len(ticker_list)
|
||||
|
||||
# Test the pattern ^(-\d+)$
|
||||
# This pattern remove X element from the beginning
|
||||
timerange = ((None, 'line'), None, 5)
|
||||
# This pattern uses the latest N elements
|
||||
timerange = ((None, 'line'), None, -5)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_list_len == ticker_len + 5
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is not ticker[0] # The first element should be different
|
||||
assert ticker_list[-1] is ticker[-1] # The last element must be the same
|
||||
|
||||
@ -251,6 +275,37 @@ def test_trim_tickerlist() -> None:
|
||||
assert ticker_list[5] is ticker[0] # The list starts at the index 5
|
||||
assert ticker_list[9] is ticker[-1] # The list ends at the index 9 (5 elements)
|
||||
|
||||
# Test the pattern ^(\d{8})-(\d{8})$
|
||||
# This pattern extract a window between the dates
|
||||
timerange = (('date', 'date'), ticker_list[5][0] / 1000, ticker_list[10][0] / 1000 - 1)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is not ticker[0] # The first element should be different
|
||||
assert ticker_list[5] is ticker[0] # The list starts at the index 5
|
||||
assert ticker_list[9] is ticker[-1] # The list ends at the index 9 (5 elements)
|
||||
|
||||
# Test the pattern ^-(\d{8})$
|
||||
# This pattern extracts elements from the start to the date
|
||||
timerange = ((None, 'date'), None, ticker_list[10][0] / 1000 - 1)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 10
|
||||
assert ticker_list[0] is ticker[0] # The start of the list is included
|
||||
assert ticker_list[9] is ticker[-1] # The element 10 is not included
|
||||
|
||||
# Test the pattern ^(\d{8})-$
|
||||
# This pattern extracts elements from the date to now
|
||||
timerange = (('date', None), ticker_list[10][0] / 1000 - 1, None)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == ticker_list_len - 10
|
||||
assert ticker_list[10] is ticker[0] # The first element is element #10
|
||||
assert ticker_list[-1] is ticker[-1] # The last element is the same
|
||||
|
||||
# Test a wrong pattern
|
||||
# This pattern must return the list unchanged
|
||||
timerange = ((None, None), None, 5)
|
||||
|
@ -89,6 +89,12 @@ def test_parse_args_dynamic_whitelist_invalid_values() -> None:
|
||||
def test_parse_timerange_incorrect() -> None:
|
||||
assert ((None, 'line'), None, -200) == Arguments.parse_timerange('-200')
|
||||
assert (('line', None), 200, None) == Arguments.parse_timerange('200-')
|
||||
assert (('index', 'index'), 200, 500) == Arguments.parse_timerange('200-500')
|
||||
|
||||
assert (('date', None), 1274486400, None) == Arguments.parse_timerange('20100522-')
|
||||
assert ((None, 'date'), None, 1274486400) == Arguments.parse_timerange('-20100522')
|
||||
assert (('date', 'date'), 1274486400, 1438214400) == Arguments.parse_timerange('20100522-20150730')
|
||||
|
||||
with pytest.raises(Exception, match=r'Incorrect syntax.*'):
|
||||
Arguments.parse_timerange('-')
|
||||
|
||||
|
@ -4,8 +4,9 @@
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
import arrow
|
||||
|
||||
from freqtrade import (exchange, arguments, misc)
|
||||
|
||||
DEFAULT_DL_PATH = 'freqtrade/tests/testdata'
|
||||
|
||||
@ -15,9 +16,6 @@ args = arguments.parse_args()
|
||||
|
||||
TICKER_INTERVALS = ['1m', '5m']
|
||||
PAIRS = []
|
||||
MIN_SECCONDS = 60
|
||||
HOUR_SECCONDS = 60 * MIN_SECCONDS
|
||||
DAY_SECCONDS = 24 * HOUR_SECCONDS
|
||||
|
||||
if args.pairs_file:
|
||||
with open(args.pairs_file) as file:
|
||||
@ -28,6 +26,11 @@ dl_path = DEFAULT_DL_PATH
|
||||
if args.export and os.path.exists(args.export):
|
||||
dl_path = args.export
|
||||
|
||||
since_time = None
|
||||
if args.days:
|
||||
since_time = arrow.utcnow().shift(days=-args.days).timestamp * 1000
|
||||
|
||||
|
||||
print(f'About to download pairs: {PAIRS} to {dl_path}')
|
||||
|
||||
# Init exchange
|
||||
@ -35,29 +38,19 @@ exchange._API = exchange.init_ccxt({'key': '',
|
||||
'secret': '',
|
||||
'name': args.exchange})
|
||||
|
||||
|
||||
for pair in PAIRS:
|
||||
for tick_interval in TICKER_INTERVALS:
|
||||
print(f'downloading pair {pair}, interval {tick_interval}')
|
||||
|
||||
since_time = None
|
||||
if args.days:
|
||||
since_time = int((time.time() - args.days * DAY_SECCONDS) * 1000)
|
||||
data = exchange.get_ticker_history(pair, tick_interval, since=since_time)
|
||||
if not data:
|
||||
print('\tNo data was downloaded')
|
||||
break
|
||||
|
||||
# download data until it reaches today now time
|
||||
data = []
|
||||
while not since_time or since_time < (time.time() - 10 * MIN_SECCONDS) * 1000:
|
||||
data_part = exchange.get_ticker_history(pair, tick_interval, since=since_time)
|
||||
|
||||
if not data_part:
|
||||
print('\tNo data since %s' % datetime.datetime.utcfromtimestamp(since_time / 1000).strftime('%Y-%m-%dT%H:%M:%S'))
|
||||
break
|
||||
|
||||
print('\tData received for period %s - %s' %
|
||||
(datetime.datetime.utcfromtimestamp(data_part[0][0] / 1000).strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
datetime.datetime.utcfromtimestamp(data_part[-1][0] / 1000).strftime('%Y-%m-%dT%H:%M:%S')))
|
||||
|
||||
data.extend(data_part)
|
||||
since_time = data[-1][0] + 1
|
||||
print('\tData was downloaded for period %s - %s' % (
|
||||
arrow.get(data[0][0] / 1000).format(),
|
||||
arrow.get(data[-1][0] / 1000).format()))
|
||||
|
||||
# save data
|
||||
pair_print = pair.replace('/', '_')
|
||||
|
Loading…
Reference in New Issue
Block a user