avoid list index out of range in trim_tickerlist

trim_tickerlist is called from load_datafile. 
 trim_tickerlist will throw an index out of range error when parsing a json file for a timerange outside its contents.
This patch to load_datafile checks the timerange is in the file or not handles the exception as:
 - first time hit: 
---- Return None existing code will try to download file/timerange from exchange
 - second time hit: 
---- Log that the exchange does not have the timerange requested 
---- Bypass trim_tickerlist function to avoid triggering the index out of range function 

Here is an example of the error prior to path and after patching. 
This is from binannce using the pair "ZEN/BTC" and timerange "20180522-20180523"
"""
  File "/Users/creslin/PycharmProjects/freqtrade/freqtrade/optimize/__init__.py", line 107, in load_data
    pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)
  File "/Users/creslin/PycharmProjects/freqtrade/freqtrade/optimize/__init__.py", line 84, in load_tickerdata_file
    pairdata = trim_tickerlist(pairdata, timerange)
  File "/Users/creslin/PycharmProjects/freqtrade/freqtrade/optimize/__init__.py", line 36, in trim_tickerlist
while tickerlist[start_index][0] < start * 1000:
IndexError: list index out of range
""""

"""
2018-05-31 14:08:04,680 - freqtrade.optimize - INFO - Start timerange not in cached data
2018-05-31 14:08:04,680 - freqtrade.optimize - INFO - Download the pair: "ZEN/BTC", Interval: 5m
dumping json to "/Users/creslin/PycharmProjects/freqtrade/freqtrade/tests/testdata/ZEN_BTC-5m.json"
2018-05-31 14:08:08,225 - freqtrade.optimize - INFO - Start timerange unavailable from exchange
"""
This commit is contained in:
creslin 2018-05-31 14:33:22 +03:00 committed by GitHub
parent 491aaef73c
commit ba31c809cb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -49,7 +49,8 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: Tuple[Tuple, int, int]) -
def load_tickerdata_file( def load_tickerdata_file(
datadir: str, pair: str, datadir: str, pair: str,
ticker_interval: str, ticker_interval: str,
timerange: Optional[Tuple[Tuple, int, int]] = None) -> Optional[List[Dict]]: timerange: Optional[Tuple[Tuple, int, int]] = None,
load_count=1 ) -> Optional[List[Dict]]:
""" """
Load a pair from file, Load a pair from file,
:return dict OR empty if unsuccesful :return dict OR empty if unsuccesful
@ -74,7 +75,35 @@ def load_tickerdata_file(
pairdata = json.load(tickerdata) pairdata = json.load(tickerdata)
else: else:
return None return None
"""
Check if timerange is in the pairdata loaded
Return None is not, which will then download the file.
This is to avoid trim_tickerlist throwing
"list index out of range" error else.
If we've been around the download loop and still missng a start
or end date, return pair data there is without trimming and log
exchange does not have the range requested
"""
if timerange:
stype, start, stop = timerange
if stype[0] == 'date':
if (pairdata[0][0]) > (start * 1000):
if load_count > 1:
logger.info('Start timerange unavailable from exchange')
return pairdata
else:
logger.info('Start timerange not in cached data')
return None
if stype[1] == 'date':
if (pairdata[(len(pairdata) - 1)][0]) < (stop * 1000):
logger.info('End timerange not in cached data')
if load_count > 1:
logger.info('End timerange for unavailable from exchange')
return pairdata
else:
logger.info('End timerange for not in cached data')
return None
if timerange: if timerange:
pairdata = trim_tickerlist(pairdata, timerange) pairdata = trim_tickerlist(pairdata, timerange)
return pairdata return pairdata
@ -98,7 +127,7 @@ def load_data(datadir: str,
download_pairs(datadir, _pairs, ticker_interval, timerange=timerange) download_pairs(datadir, _pairs, ticker_interval, timerange=timerange)
for pair in _pairs: for pair in _pairs:
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange) pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange, load_count=1)
if not pairdata: if not pairdata:
# download the tickerdata from exchange # download the tickerdata from exchange
download_backtesting_testdata(datadir, download_backtesting_testdata(datadir,
@ -106,7 +135,9 @@ def load_data(datadir: str,
tick_interval=ticker_interval, tick_interval=ticker_interval,
timerange=timerange) timerange=timerange)
# and retry reading the pair # and retry reading the pair
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange) # TODO if load_tickerdata returns None we're doing nothing with it.
# Added load_count argument
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange, load_count=2)
result[pair] = pairdata result[pair] = pairdata
return result return result