Merge pull request #1880 from hroff-1902/exchange-debuglog

minor: exchange debug logging humanized
This commit is contained in:
Matthias 2019-05-26 19:26:19 +02:00 committed by GitHub
commit 3e2c808b4b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 16 additions and 5 deletions

View File

@ -510,7 +510,11 @@ class Exchange(object):
_LIMIT = 500 _LIMIT = 500
one_call = timeframe_to_msecs(ticker_interval) * _LIMIT one_call = timeframe_to_msecs(ticker_interval) * _LIMIT
logger.debug("one_call: %s msecs", one_call) logger.debug(
"one_call: %s msecs (%s)",
one_call,
arrow.utcnow().shift(seconds=one_call // 1000).humanize(only_distance=True)
)
input_coroutines = [self._async_get_candle_history( input_coroutines = [self._async_get_candle_history(
pair, ticker_interval, since) for since in pair, ticker_interval, since) for since in
range(since_ms, arrow.utcnow().timestamp * 1000, one_call)] range(since_ms, arrow.utcnow().timestamp * 1000, one_call)]
@ -541,7 +545,10 @@ class Exchange(object):
or self._now_is_time_to_refresh(pair, ticker_interval)): or self._now_is_time_to_refresh(pair, ticker_interval)):
input_coroutines.append(self._async_get_candle_history(pair, ticker_interval)) input_coroutines.append(self._async_get_candle_history(pair, ticker_interval))
else: else:
logger.debug("Using cached ohlcv data for %s, %s ...", pair, ticker_interval) logger.debug(
"Using cached ohlcv data for pair %s, interval %s ...",
pair, ticker_interval
)
tickers = asyncio.get_event_loop().run_until_complete( tickers = asyncio.get_event_loop().run_until_complete(
asyncio.gather(*input_coroutines, return_exceptions=True)) asyncio.gather(*input_coroutines, return_exceptions=True))
@ -578,7 +585,11 @@ class Exchange(object):
""" """
try: try:
# fetch ohlcv asynchronously # fetch ohlcv asynchronously
logger.debug("fetching %s, %s since %s ...", pair, ticker_interval, since_ms) s = '(' + arrow.get(since_ms // 1000).isoformat() + ') ' if since_ms is not None else ''
logger.debug(
"Fetching pair %s, interval %s, since %s %s...",
pair, ticker_interval, since_ms, s
)
data = await self._api_async.fetch_ohlcv(pair, timeframe=ticker_interval, data = await self._api_async.fetch_ohlcv(pair, timeframe=ticker_interval,
since=since_ms) since=since_ms)
@ -593,7 +604,7 @@ class Exchange(object):
except IndexError: except IndexError:
logger.exception("Error loading %s. Result was %s.", pair, data) logger.exception("Error loading %s. Result was %s.", pair, data)
return pair, ticker_interval, [] return pair, ticker_interval, []
logger.debug("done fetching %s, %s ...", pair, ticker_interval) logger.debug("Done fetching pair %s, interval %s ...", pair, ticker_interval)
return pair, ticker_interval, data return pair, ticker_interval, data
except ccxt.NotSupported as e: except ccxt.NotSupported as e:

View File

@ -1016,7 +1016,7 @@ def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None:
exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m'), ('XRP/ETH', '5m')]) exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m'), ('XRP/ETH', '5m')])
assert exchange._api_async.fetch_ohlcv.call_count == 2 assert exchange._api_async.fetch_ohlcv.call_count == 2
assert log_has(f"Using cached ohlcv data for {pairs[0][0]}, {pairs[0][1]} ...", assert log_has(f"Using cached ohlcv data for pair {pairs[0][0]}, interval {pairs[0][1]} ...",
caplog.record_tuples) caplog.record_tuples)