Cleanup some comments and code formatting

This commit is contained in:
Matthias 2018-12-15 19:52:52 +01:00
parent d0c9791ca6
commit acd07d40a0

View File

@ -70,7 +70,7 @@ def load_tickerdata_file(
ticker_interval: str, ticker_interval: str,
timerange: Optional[TimeRange] = None) -> Optional[List[Dict]]: timerange: Optional[TimeRange] = None) -> Optional[List[Dict]]:
""" """
Load a pair from file, Load a pair from file, either .json.gz or .json
:return dict(<pair>:<tickerlist>) or None if unsuccesful :return dict(<pair>:<tickerlist>) or None if unsuccesful
""" """
path = make_testdata_path(datadir) path = make_testdata_path(datadir)
@ -78,8 +78,7 @@ def load_tickerdata_file(
file = path.joinpath(f'{pair_s}-{ticker_interval}.json') file = path.joinpath(f'{pair_s}-{ticker_interval}.json')
gzipfile = file.with_suffix(file.suffix + '.gz') gzipfile = file.with_suffix(file.suffix + '.gz')
# If the file does not exist we download it when None is returned. # Try gzip file first, otherwise regular json file.
# If file exists, read the file, load the json
if gzipfile.is_file(): if gzipfile.is_file():
logger.debug('Loading ticker data from file %s', gzipfile) logger.debug('Loading ticker data from file %s', gzipfile)
with gzip.open(gzipfile) as tickerdata: with gzip.open(gzipfile) as tickerdata:
@ -129,12 +128,9 @@ def load_data(datadir: Optional[Path],
arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S')) arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
result[pair] = parse_ticker_dataframe(pairdata) result[pair] = parse_ticker_dataframe(pairdata)
else: else:
logger.warning( logger.warning('No data for pair: "%s", Interval: %s. '
'No data for pair: "%s", Interval: %s. ' 'Use --refresh-pairs-cached to download the data',
'Use --refresh-pairs-cached to download the data', pair, ticker_interval)
pair,
ticker_interval
)
return result return result
@ -156,20 +152,15 @@ def download_pairs(datadir, exchange: Exchange, pairs: List[str],
tick_interval=ticker_interval, tick_interval=ticker_interval,
timerange=timerange) timerange=timerange)
except BaseException: except BaseException:
logger.info( logger.info('Failed to download the pair: "%s", Interval: %s',
'Failed to download the pair: "%s", Interval: %s', pair, ticker_interval)
pair,
ticker_interval
)
return False return False
return True return True
def load_cached_data_for_updating(filename: Path, def load_cached_data_for_updating(filename: Path, tick_interval: str,
tick_interval: str, timerange: Optional[TimeRange]) -> Tuple[List[Any],
timerange: Optional[TimeRange]) -> Tuple[ Optional[int]]:
List[Any],
Optional[int]]:
""" """
Load cached data and choose what part of the data should be updated Load cached data and choose what part of the data should be updated
""" """
@ -188,8 +179,7 @@ def load_cached_data_for_updating(filename: Path,
if filename.is_file(): if filename.is_file():
with open(filename, "rt") as file: with open(filename, "rt") as file:
data = json_load(file) data = json_load(file)
# remove the last item, because we are not sure if it is correct # remove the last item, could be incomplete candle
# it could be fetched when the candle was incompleted
if data: if data:
data.pop() data.pop()
else: else:
@ -197,12 +187,10 @@ def load_cached_data_for_updating(filename: Path,
if data: if data:
if since_ms and since_ms < data[0][0]: if since_ms and since_ms < data[0][0]:
# the data is requested for earlier period than the cache has # Earlier data than existing data requested, redownload all
# so fully redownload all the data
data = [] data = []
else: else:
# a part of the data was already downloaded, so # a part of the data was already downloaded, so download unexist data only
# download unexist data only
since_ms = data[-1][0] + 1 since_ms = data[-1][0] + 1
return (data, since_ms) return (data, since_ms)
@ -230,11 +218,7 @@ def download_backtesting_testdata(datadir: Path,
filepair = pair.replace("/", "_") filepair = pair.replace("/", "_")
filename = path.joinpath(f'{filepair}-{tick_interval}.json') filename = path.joinpath(f'{filepair}-{tick_interval}.json')
logger.info( logger.info('Download the pair: "%s", Interval: %s', pair, tick_interval)
'Download the pair: "%s", Interval: %s',
pair,
tick_interval
)
data, since_ms = load_cached_data_for_updating(filename, tick_interval, timerange) data, since_ms = load_cached_data_for_updating(filename, tick_interval, timerange)