handle index out of range error

trim_tickerlist is called from load_datafile.
trim_tickerlist will throw an index out of range error when parsing a ticker json file for a timerange outside its current contents.
This patch to load_datafile checks the timerange is in the file or not handles the exception by downloading the file with complete range. If the Exchange does not have the full range a message is logged and the file not sent to ticker-parse.

In summary, is json file does not have full requested time range
1. download file
2. if still missing time range, log and continue without ticker trim.

Here is an example of the error prior to path and after patching.
This is from binannce using the pair "ZEN/BTC" and timerange "20180522-20180523"
"""
  File "/Users/creslin/PycharmProjects/freqtrade/freqtrade/optimize/__init__.py", line 107, in load_data
    pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)
  File "/Users/creslin/PycharmProjects/freqtrade/freqtrade/optimize/__init__.py", line 84, in load_tickerdata_file
    pairdata = trim_tickerlist(pairdata, timerange)
  File "/Users/creslin/PycharmProjects/freqtrade/freqtrade/optimize/__init__.py", line 36, in trim_tickerlist
while tickerlist[start_index][0] < start * 1000:
IndexError: list index out of range
""""

"""
2018-05-31 14:08:04,680 - freqtrade.optimize - INFO - Start timerange not in cached data
2018-05-31 14:08:04,680 - freqtrade.optimize - INFO - Download the pair: "ZEN/BTC", Interval: 5m
dumping json to "/Users/creslin/PycharmProjects/freqtrade/freqtrade/tests/testdata/ZEN_BTC-5m.json"
2018-05-31 14:08:08,225 - freqtrade.optimize - INFO - Start timerange unavailable from exchange
"""
This commit is contained in:
creslinux 2018-05-31 19:02:02 +03:00
parent 52386d8153
commit 1e707ea9c9

View File

@ -29,7 +29,7 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: Tuple[Tuple, int, int]) -
if stype[0] == 'index': if stype[0] == 'index':
start_index = start start_index = start
elif stype[0] == 'date': elif stype[0] == 'date':
while start_index < len(tickerlist) and tickerlist[start_index][0] < start * 1000: while tickerlist[start_index][0] < start * 1000:
start_index += 1 start_index += 1
if stype[1] == 'line': if stype[1] == 'line':
@ -37,7 +37,7 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: Tuple[Tuple, int, int]) -
if stype[1] == 'index': if stype[1] == 'index':
stop_index = stop stop_index = stop
elif stype[1] == 'date': elif stype[1] == 'date':
while stop_index > 0 and tickerlist[stop_index-1][0] > stop * 1000: while tickerlist[stop_index-1][0] > stop * 1000:
stop_index -= 1 stop_index -= 1
if start_index > stop_index: if start_index > stop_index:
@ -49,7 +49,8 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: Tuple[Tuple, int, int]) -
def load_tickerdata_file( def load_tickerdata_file(
datadir: str, pair: str, datadir: str, pair: str,
ticker_interval: str, ticker_interval: str,
timerange: Optional[Tuple[Tuple, int, int]] = None) -> Optional[List[Dict]]: timerange: Optional[Tuple[Tuple, int, int]] = None,
load_count=1 ) -> Optional[List[Dict]]:
""" """
Load a pair from file, Load a pair from file,
:return dict OR empty if unsuccesful :return dict OR empty if unsuccesful
@ -74,6 +75,34 @@ def load_tickerdata_file(
pairdata = json.load(tickerdata) pairdata = json.load(tickerdata)
else: else:
return None return None
"""
Check if timerange is in the pairdata loaded
Return None is not, which will then download the file.
This is to avoid trim_tickerlist throwing
"list index out of range" error else.
If we've been around the download loop and still missng a start
or end date, return pair data there is without trimming and log
exchange does not have the range requested
"""
if timerange:
stype, start, stop = timerange
if stype[0] == 'date':
if ((pairdata[0][0]) > (start * 1000)):
if load_count > 1:
logger.info('Start timerange unavailable from exchange')
return pairdata
else:
logger.info('Start timerange not in cached data')
return None
if stype[1] == 'date':
if (pairdata[(len(pairdata) - 1)][0]) < (stop * 1000):
logger.info('End timerange not in cached data')
if load_count > 1:
logger.info('End timerange for unavailable from exchange')
return pairdata
else:
logger.info('End timerange for not in cached data')
return None
if timerange: if timerange:
pairdata = trim_tickerlist(pairdata, timerange) pairdata = trim_tickerlist(pairdata, timerange)
@ -99,12 +128,18 @@ def load_data(datadir: str,
download_pairs(datadir, _pairs, ticker_interval, timerange=timerange) download_pairs(datadir, _pairs, ticker_interval, timerange=timerange)
for pair in _pairs: for pair in _pairs:
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange) pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange, load_count=1)
if pairdata: if not pairdata:
result[pair] = pairdata # download the tickerdata from exchange
else: download_backtesting_testdata(datadir,
logger.warn('No data for pair %s, use --update-pairs-cached to download the data', pair) pair=pair,
tick_interval=ticker_interval,
timerange=timerange)
# and retry reading the pair
# TODO if load_tickerdata returns None we're doing nothing with it.
# Added load_count argument
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange, load_count=2)
result[pair] = pairdata
return result return result