Merge pull request #3857 from freqtrade/arrow_deprecation_timestamp

Convert timestamp to int_timestamp for all arrow occurances
This commit is contained in:
Matthias 2020-11-02 16:40:43 +01:00 committed by GitHub
commit 887d78171c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 49 additions and 49 deletions

View File

@ -1,10 +1,9 @@
import logging import logging
import sys import sys
from collections import defaultdict from collections import defaultdict
from datetime import datetime, timedelta
from typing import Any, Dict, List from typing import Any, Dict, List
import arrow
from freqtrade.configuration import TimeRange, setup_utils_configuration from freqtrade.configuration import TimeRange, setup_utils_configuration
from freqtrade.data.converter import convert_ohlcv_format, convert_trades_format from freqtrade.data.converter import convert_ohlcv_format, convert_trades_format
from freqtrade.data.history import (convert_trades_to_ohlcv, refresh_backtest_ohlcv_data, from freqtrade.data.history import (convert_trades_to_ohlcv, refresh_backtest_ohlcv_data,
@ -29,7 +28,7 @@ def start_download_data(args: Dict[str, Any]) -> None:
"You can only specify one or the other.") "You can only specify one or the other.")
timerange = TimeRange() timerange = TimeRange()
if 'days' in config: if 'days' in config:
time_since = arrow.utcnow().shift(days=-config['days']).strftime("%Y%m%d") time_since = (datetime.now() - timedelta(days=config['days'])).strftime("%Y%m%d")
timerange = TimeRange.parse_timerange(f'{time_since}-') timerange = TimeRange.parse_timerange(f'{time_since}-')
if 'timerange' in config: if 'timerange' in config:

View File

@ -52,11 +52,11 @@ class TimeRange:
:return: None (Modifies the object in place) :return: None (Modifies the object in place)
""" """
if (not self.starttype or (startup_candles if (not self.starttype or (startup_candles
and min_date.timestamp >= self.startts)): and min_date.int_timestamp >= self.startts)):
# If no startts was defined, or backtest-data starts at the defined backtest-date # If no startts was defined, or backtest-data starts at the defined backtest-date
logger.warning("Moving start-date by %s candles to account for startup time.", logger.warning("Moving start-date by %s candles to account for startup time.",
startup_candles) startup_candles)
self.startts = (min_date.timestamp + timeframe_secs * startup_candles) self.startts = (min_date.int_timestamp + timeframe_secs * startup_candles)
self.starttype = 'date' self.starttype = 'date'
@staticmethod @staticmethod
@ -89,7 +89,7 @@ class TimeRange:
if stype[0]: if stype[0]:
starts = rvals[index] starts = rvals[index]
if stype[0] == 'date' and len(starts) == 8: if stype[0] == 'date' and len(starts) == 8:
start = arrow.get(starts, 'YYYYMMDD').timestamp start = arrow.get(starts, 'YYYYMMDD').int_timestamp
elif len(starts) == 13: elif len(starts) == 13:
start = int(starts) // 1000 start = int(starts) // 1000
else: else:
@ -98,7 +98,7 @@ class TimeRange:
if stype[1]: if stype[1]:
stops = rvals[index] stops = rvals[index]
if stype[1] == 'date' and len(stops) == 8: if stype[1] == 'date' and len(stops) == 8:
stop = arrow.get(stops, 'YYYYMMDD').timestamp stop = arrow.get(stops, 'YYYYMMDD').int_timestamp
elif len(stops) == 13: elif len(stops) == 13:
stop = int(stops) // 1000 stop = int(stops) // 1000
else: else:

View File

@ -8,7 +8,6 @@ import logging
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import Any, Dict, List, Optional, Tuple from typing import Any, Dict, List, Optional, Tuple
from arrow import Arrow
from pandas import DataFrame from pandas import DataFrame
from freqtrade.constants import ListPairsWithTimeframes, PairWithTimeframe from freqtrade.constants import ListPairsWithTimeframes, PairWithTimeframe
@ -38,7 +37,7 @@ class DataProvider:
:param timeframe: Timeframe to get data for :param timeframe: Timeframe to get data for
:param dataframe: analyzed dataframe :param dataframe: analyzed dataframe
""" """
self.__cached_pairs[(pair, timeframe)] = (dataframe, Arrow.utcnow().datetime) self.__cached_pairs[(pair, timeframe)] = (dataframe, datetime.now(timezone.utc))
def add_pairlisthandler(self, pairlists) -> None: def add_pairlisthandler(self, pairlists) -> None:
""" """

View File

@ -87,7 +87,7 @@ class Edge:
heartbeat = self.edge_config.get('process_throttle_secs') heartbeat = self.edge_config.get('process_throttle_secs')
if (self._last_updated > 0) and ( if (self._last_updated > 0) and (
self._last_updated + heartbeat > arrow.utcnow().timestamp): self._last_updated + heartbeat > arrow.utcnow().int_timestamp):
return False return False
data: Dict[str, Any] = {} data: Dict[str, Any] = {}
@ -146,7 +146,7 @@ class Edge:
# Fill missing, calculable columns, profit, duration , abs etc. # Fill missing, calculable columns, profit, duration , abs etc.
trades_df = self._fill_calculable_fields(DataFrame(trades)) trades_df = self._fill_calculable_fields(DataFrame(trades))
self._cached_pairs = self._process_expectancy(trades_df) self._cached_pairs = self._process_expectancy(trades_df)
self._last_updated = arrow.utcnow().timestamp self._last_updated = arrow.utcnow().int_timestamp
return True return True

View File

@ -291,7 +291,7 @@ class Exchange:
try: try:
self._api.load_markets() self._api.load_markets()
self._load_async_markets() self._load_async_markets()
self._last_markets_refresh = arrow.utcnow().timestamp self._last_markets_refresh = arrow.utcnow().int_timestamp
except ccxt.BaseError as e: except ccxt.BaseError as e:
logger.warning('Unable to initialize markets. Reason: %s', e) logger.warning('Unable to initialize markets. Reason: %s', e)
@ -300,14 +300,14 @@ class Exchange:
# Check whether markets have to be reloaded # Check whether markets have to be reloaded
if (self._last_markets_refresh > 0) and ( if (self._last_markets_refresh > 0) and (
self._last_markets_refresh + self.markets_refresh_interval self._last_markets_refresh + self.markets_refresh_interval
> arrow.utcnow().timestamp): > arrow.utcnow().int_timestamp):
return None return None
logger.debug("Performing scheduled market reload..") logger.debug("Performing scheduled market reload..")
try: try:
self._api.load_markets(reload=True) self._api.load_markets(reload=True)
# Also reload async markets to avoid issues with newly listed pairs # Also reload async markets to avoid issues with newly listed pairs
self._load_async_markets(reload=True) self._load_async_markets(reload=True)
self._last_markets_refresh = arrow.utcnow().timestamp self._last_markets_refresh = arrow.utcnow().int_timestamp
except ccxt.BaseError: except ccxt.BaseError:
logger.exception("Could not reload markets.") logger.exception("Could not reload markets.")
@ -501,7 +501,7 @@ class Exchange:
'side': side, 'side': side,
'remaining': _amount, 'remaining': _amount,
'datetime': arrow.utcnow().isoformat(), 'datetime': arrow.utcnow().isoformat(),
'timestamp': int(arrow.utcnow().timestamp * 1000), 'timestamp': int(arrow.utcnow().int_timestamp * 1000),
'status': "closed" if ordertype == "market" else "open", 'status': "closed" if ordertype == "market" else "open",
'fee': None, 'fee': None,
'info': {} 'info': {}
@ -699,7 +699,7 @@ class Exchange:
) )
input_coroutines = [self._async_get_candle_history( input_coroutines = [self._async_get_candle_history(
pair, timeframe, since) for since in pair, timeframe, since) for since in
range(since_ms, arrow.utcnow().timestamp * 1000, one_call)] range(since_ms, arrow.utcnow().int_timestamp * 1000, one_call)]
results = await asyncio.gather(*input_coroutines, return_exceptions=True) results = await asyncio.gather(*input_coroutines, return_exceptions=True)
@ -766,7 +766,7 @@ class Exchange:
interval_in_sec = timeframe_to_seconds(timeframe) interval_in_sec = timeframe_to_seconds(timeframe)
return not ((self._pairs_last_refresh_time.get((pair, timeframe), 0) return not ((self._pairs_last_refresh_time.get((pair, timeframe), 0)
+ interval_in_sec) >= arrow.utcnow().timestamp) + interval_in_sec) >= arrow.utcnow().int_timestamp)
@retrier_async @retrier_async
async def _async_get_candle_history(self, pair: str, timeframe: str, async def _async_get_candle_history(self, pair: str, timeframe: str,

View File

@ -268,9 +268,9 @@ def generate_backtest_stats(btdata: Dict[str, DataFrame],
'profit_total': results['profit_percent'].sum(), 'profit_total': results['profit_percent'].sum(),
'profit_total_abs': results['profit_abs'].sum(), 'profit_total_abs': results['profit_abs'].sum(),
'backtest_start': min_date.datetime, 'backtest_start': min_date.datetime,
'backtest_start_ts': min_date.timestamp * 1000, 'backtest_start_ts': min_date.int_timestamp * 1000,
'backtest_end': max_date.datetime, 'backtest_end': max_date.datetime,
'backtest_end_ts': max_date.timestamp * 1000, 'backtest_end_ts': max_date.int_timestamp * 1000,
'backtest_days': backtest_days, 'backtest_days': backtest_days,
'trades_per_day': round(len(results) / backtest_days, 2) if backtest_days > 0 else 0, 'trades_per_day': round(len(results) / backtest_days, 2) if backtest_days > 0 else 0,

View File

@ -108,13 +108,13 @@ class Wallets:
for trading operations, the latest balance is needed. for trading operations, the latest balance is needed.
:param require_update: Allow skipping an update if balances were recently refreshed :param require_update: Allow skipping an update if balances were recently refreshed
""" """
if (require_update or (self._last_wallet_refresh + 3600 < arrow.utcnow().timestamp)): if (require_update or (self._last_wallet_refresh + 3600 < arrow.utcnow().int_timestamp)):
if self._config['dry_run']: if self._config['dry_run']:
self._update_dry() self._update_dry()
else: else:
self._update_live() self._update_live()
logger.info('Wallets synced.') logger.info('Wallets synced.')
self._last_wallet_refresh = arrow.utcnow().timestamp self._last_wallet_refresh = arrow.utcnow().int_timestamp
def get_all_balances(self) -> Dict[str, Any]: def get_all_balances(self) -> Dict[str, Any]:
return self._wallets return self._wallets

View File

@ -69,7 +69,7 @@ setup(name='freqtrade',
'ccxt>=1.24.96', 'ccxt>=1.24.96',
'SQLAlchemy', 'SQLAlchemy',
'python-telegram-bot', 'python-telegram-bot',
'arrow', 'arrow>=0.17.0',
'cachetools', 'cachetools',
'requests', 'requests',
'urllib3', 'urllib3',

View File

@ -601,7 +601,7 @@ def test_download_data_timerange(mocker, caplog, markets):
start_download_data(get_args(args)) start_download_data(get_args(args))
assert dl_mock.call_count == 1 assert dl_mock.call_count == 1
# 20days ago # 20days ago
days_ago = arrow.get(arrow.utcnow().shift(days=-20).date()).timestamp days_ago = arrow.get(arrow.utcnow().shift(days=-20).date()).int_timestamp
assert dl_mock.call_args_list[0][1]['timerange'].startts == days_ago assert dl_mock.call_args_list[0][1]['timerange'].startts == days_ago
dl_mock.reset_mock() dl_mock.reset_mock()
@ -614,7 +614,8 @@ def test_download_data_timerange(mocker, caplog, markets):
start_download_data(get_args(args)) start_download_data(get_args(args))
assert dl_mock.call_count == 1 assert dl_mock.call_count == 1
assert dl_mock.call_args_list[0][1]['timerange'].startts == arrow.Arrow(2020, 1, 1).timestamp assert dl_mock.call_args_list[0][1]['timerange'].startts == arrow.Arrow(
2020, 1, 1).int_timestamp
def test_download_data_no_markets(mocker, caplog): def test_download_data_no_markets(mocker, caplog):

View File

@ -792,7 +792,7 @@ def limit_buy_order_open():
'side': 'buy', 'side': 'buy',
'symbol': 'mocked', 'symbol': 'mocked',
'datetime': arrow.utcnow().isoformat(), 'datetime': arrow.utcnow().isoformat(),
'timestamp': arrow.utcnow().timestamp, 'timestamp': arrow.utcnow().int_timestamp,
'price': 0.00001099, 'price': 0.00001099,
'amount': 90.99181073, 'amount': 90.99181073,
'filled': 0.0, 'filled': 0.0,
@ -911,7 +911,7 @@ def limit_buy_order_canceled_empty(request):
'info': {}, 'info': {},
'id': '1234512345', 'id': '1234512345',
'clientOrderId': None, 'clientOrderId': None,
'timestamp': arrow.utcnow().shift(minutes=-601).timestamp, 'timestamp': arrow.utcnow().shift(minutes=-601).int_timestamp,
'datetime': arrow.utcnow().shift(minutes=-601).isoformat(), 'datetime': arrow.utcnow().shift(minutes=-601).isoformat(),
'lastTradeTimestamp': None, 'lastTradeTimestamp': None,
'symbol': 'LTC/USDT', 'symbol': 'LTC/USDT',
@ -932,7 +932,7 @@ def limit_buy_order_canceled_empty(request):
'info': {}, 'info': {},
'id': 'AZNPFF-4AC4N-7MKTAT', 'id': 'AZNPFF-4AC4N-7MKTAT',
'clientOrderId': None, 'clientOrderId': None,
'timestamp': arrow.utcnow().shift(minutes=-601).timestamp, 'timestamp': arrow.utcnow().shift(minutes=-601).int_timestamp,
'datetime': arrow.utcnow().shift(minutes=-601).isoformat(), 'datetime': arrow.utcnow().shift(minutes=-601).isoformat(),
'lastTradeTimestamp': None, 'lastTradeTimestamp': None,
'status': 'canceled', 'status': 'canceled',
@ -953,7 +953,7 @@ def limit_buy_order_canceled_empty(request):
'info': {}, 'info': {},
'id': '1234512345', 'id': '1234512345',
'clientOrderId': 'alb1234123', 'clientOrderId': 'alb1234123',
'timestamp': arrow.utcnow().shift(minutes=-601).timestamp, 'timestamp': arrow.utcnow().shift(minutes=-601).int_timestamp,
'datetime': arrow.utcnow().shift(minutes=-601).isoformat(), 'datetime': arrow.utcnow().shift(minutes=-601).isoformat(),
'lastTradeTimestamp': None, 'lastTradeTimestamp': None,
'symbol': 'LTC/USDT', 'symbol': 'LTC/USDT',
@ -974,7 +974,7 @@ def limit_buy_order_canceled_empty(request):
'info': {}, 'info': {},
'id': '1234512345', 'id': '1234512345',
'clientOrderId': 'alb1234123', 'clientOrderId': 'alb1234123',
'timestamp': arrow.utcnow().shift(minutes=-601).timestamp, 'timestamp': arrow.utcnow().shift(minutes=-601).int_timestamp,
'datetime': arrow.utcnow().shift(minutes=-601).isoformat(), 'datetime': arrow.utcnow().shift(minutes=-601).isoformat(),
'lastTradeTimestamp': None, 'lastTradeTimestamp': None,
'symbol': 'LTC/USDT', 'symbol': 'LTC/USDT',
@ -1000,7 +1000,7 @@ def limit_sell_order_open():
'side': 'sell', 'side': 'sell',
'pair': 'mocked', 'pair': 'mocked',
'datetime': arrow.utcnow().isoformat(), 'datetime': arrow.utcnow().isoformat(),
'timestamp': arrow.utcnow().timestamp, 'timestamp': arrow.utcnow().int_timestamp,
'price': 0.00001173, 'price': 0.00001173,
'amount': 90.99181073, 'amount': 90.99181073,
'filled': 0.0, 'filled': 0.0,

View File

@ -323,7 +323,7 @@ def test_load_partial_missing(testdatadir, caplog) -> None:
start = arrow.get('2018-01-01T00:00:00') start = arrow.get('2018-01-01T00:00:00')
end = arrow.get('2018-01-11T00:00:00') end = arrow.get('2018-01-11T00:00:00')
data = load_data(testdatadir, '5m', ['UNITTEST/BTC'], startup_candles=20, data = load_data(testdatadir, '5m', ['UNITTEST/BTC'], startup_candles=20,
timerange=TimeRange('date', 'date', start.timestamp, end.timestamp)) timerange=TimeRange('date', 'date', start.int_timestamp, end.int_timestamp))
assert log_has( assert log_has(
'Using indicator startup period: 20 ...', caplog 'Using indicator startup period: 20 ...', caplog
) )
@ -339,7 +339,7 @@ def test_load_partial_missing(testdatadir, caplog) -> None:
start = arrow.get('2018-01-10T00:00:00') start = arrow.get('2018-01-10T00:00:00')
end = arrow.get('2018-02-20T00:00:00') end = arrow.get('2018-02-20T00:00:00')
data = load_data(datadir=testdatadir, timeframe='5m', pairs=['UNITTEST/BTC'], data = load_data(datadir=testdatadir, timeframe='5m', pairs=['UNITTEST/BTC'],
timerange=TimeRange('date', 'date', start.timestamp, end.timestamp)) timerange=TimeRange('date', 'date', start.int_timestamp, end.int_timestamp))
# timedifference in 5 minutes # timedifference in 5 minutes
td = ((end - start).total_seconds() // 60 // 5) + 1 td = ((end - start).total_seconds() // 60 // 5) + 1
assert td != len(data['UNITTEST/BTC']) assert td != len(data['UNITTEST/BTC'])

View File

@ -50,7 +50,7 @@ def _build_dataframe(buy_ohlc_sell_matrice):
'date': tests_start_time.shift( 'date': tests_start_time.shift(
minutes=( minutes=(
ohlc[0] * ohlc[0] *
timeframe_in_minute)).timestamp * timeframe_in_minute)).int_timestamp *
1000, 1000,
'buy': ohlc[1], 'buy': ohlc[1],
'open': ohlc[2], 'open': ohlc[2],
@ -71,7 +71,7 @@ def _build_dataframe(buy_ohlc_sell_matrice):
def _time_on_candle(number): def _time_on_candle(number):
return np.datetime64(tests_start_time.shift( return np.datetime64(tests_start_time.shift(
minutes=(number * timeframe_in_minute)).timestamp * 1000, 'ms') minutes=(number * timeframe_in_minute)).int_timestamp * 1000, 'ms')
# End helper functions # End helper functions
@ -251,7 +251,7 @@ def test_edge_heartbeat_calculate(mocker, edge_conf):
heartbeat = edge_conf['edge']['process_throttle_secs'] heartbeat = edge_conf['edge']['process_throttle_secs']
# should not recalculate if heartbeat not reached # should not recalculate if heartbeat not reached
edge._last_updated = arrow.utcnow().timestamp - heartbeat + 1 edge._last_updated = arrow.utcnow().int_timestamp - heartbeat + 1
assert edge.calculate() is False assert edge.calculate() is False
@ -263,7 +263,7 @@ def mocked_load_data(datadir, pairs=[], timeframe='0m',
NEOBTC = [ NEOBTC = [
[ [
tests_start_time.shift(minutes=(x * timeframe_in_minute)).timestamp * 1000, tests_start_time.shift(minutes=(x * timeframe_in_minute)).int_timestamp * 1000,
math.sin(x * hz) / 1000 + base, math.sin(x * hz) / 1000 + base,
math.sin(x * hz) / 1000 + base + 0.0001, math.sin(x * hz) / 1000 + base + 0.0001,
math.sin(x * hz) / 1000 + base - 0.0001, math.sin(x * hz) / 1000 + base - 0.0001,
@ -275,7 +275,7 @@ def mocked_load_data(datadir, pairs=[], timeframe='0m',
base = 0.002 base = 0.002
LTCBTC = [ LTCBTC = [
[ [
tests_start_time.shift(minutes=(x * timeframe_in_minute)).timestamp * 1000, tests_start_time.shift(minutes=(x * timeframe_in_minute)).int_timestamp * 1000,
math.sin(x * hz) / 1000 + base, math.sin(x * hz) / 1000 + base,
math.sin(x * hz) / 1000 + base + 0.0001, math.sin(x * hz) / 1000 + base + 0.0001,
math.sin(x * hz) / 1000 + base - 0.0001, math.sin(x * hz) / 1000 + base - 0.0001,
@ -299,7 +299,7 @@ def test_edge_process_downloaded_data(mocker, edge_conf):
assert edge.calculate() assert edge.calculate()
assert len(edge._cached_pairs) == 2 assert len(edge._cached_pairs) == 2
assert edge._last_updated <= arrow.utcnow().timestamp + 2 assert edge._last_updated <= arrow.utcnow().int_timestamp + 2
def test_edge_process_no_data(mocker, edge_conf, caplog): def test_edge_process_no_data(mocker, edge_conf, caplog):

View File

@ -393,7 +393,7 @@ def test_reload_markets(default_conf, mocker, caplog):
exchange = get_patched_exchange(mocker, default_conf, api_mock, id="binance", exchange = get_patched_exchange(mocker, default_conf, api_mock, id="binance",
mock_markets=False) mock_markets=False)
exchange._load_async_markets = MagicMock() exchange._load_async_markets = MagicMock()
exchange._last_markets_refresh = arrow.utcnow().timestamp exchange._last_markets_refresh = arrow.utcnow().int_timestamp
updated_markets = {'ETH/BTC': {}, "LTC/BTC": {}} updated_markets = {'ETH/BTC': {}, "LTC/BTC": {}}
assert exchange.markets == initial_markets assert exchange.markets == initial_markets
@ -404,7 +404,7 @@ def test_reload_markets(default_conf, mocker, caplog):
assert exchange._load_async_markets.call_count == 0 assert exchange._load_async_markets.call_count == 0
# more than 10 minutes have passed, reload is executed # more than 10 minutes have passed, reload is executed
exchange._last_markets_refresh = arrow.utcnow().timestamp - 15 * 60 exchange._last_markets_refresh = arrow.utcnow().int_timestamp - 15 * 60
exchange.reload_markets() exchange.reload_markets()
assert exchange.markets == updated_markets assert exchange.markets == updated_markets
assert exchange._load_async_markets.call_count == 1 assert exchange._load_async_markets.call_count == 1
@ -1272,7 +1272,7 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name):
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name) exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
ohlcv = [ ohlcv = [
[ [
arrow.utcnow().timestamp * 1000, # unix timestamp ms arrow.utcnow().int_timestamp * 1000, # unix timestamp ms
1, # open 1, # open
2, # high 2, # high
3, # low 3, # low
@ -1289,7 +1289,8 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name):
# one_call calculation * 1.8 should do 2 calls # one_call calculation * 1.8 should do 2 calls
since = 5 * 60 * exchange._ft_has['ohlcv_candle_limit'] * 1.8 since = 5 * 60 * exchange._ft_has['ohlcv_candle_limit'] * 1.8
ret = exchange.get_historic_ohlcv(pair, "5m", int((arrow.utcnow().timestamp - since) * 1000)) ret = exchange.get_historic_ohlcv(pair, "5m", int((
arrow.utcnow().int_timestamp - since) * 1000))
assert exchange._async_get_candle_history.call_count == 2 assert exchange._async_get_candle_history.call_count == 2
# Returns twice the above OHLCV data # Returns twice the above OHLCV data
@ -1308,7 +1309,7 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name):
def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None: def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None:
ohlcv = [ ohlcv = [
[ [
(arrow.utcnow().timestamp - 1) * 1000, # unix timestamp ms (arrow.utcnow().int_timestamp - 1) * 1000, # unix timestamp ms
1, # open 1, # open
2, # high 2, # high
3, # low 3, # low
@ -1316,7 +1317,7 @@ def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None:
5, # volume (in quote currency) 5, # volume (in quote currency)
], ],
[ [
arrow.utcnow().timestamp * 1000, # unix timestamp ms arrow.utcnow().int_timestamp * 1000, # unix timestamp ms
3, # open 3, # open
1, # high 1, # high
4, # low 4, # low
@ -1362,7 +1363,7 @@ def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None:
async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_name): async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_name):
ohlcv = [ ohlcv = [
[ [
arrow.utcnow().timestamp * 1000, # unix timestamp ms arrow.utcnow().int_timestamp * 1000, # unix timestamp ms
1, # open 1, # open
2, # high 2, # high
3, # low 3, # low
@ -1397,14 +1398,14 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.BaseError("Unknown error")) api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.BaseError("Unknown error"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
await exchange._async_get_candle_history(pair, "5m", await exchange._async_get_candle_history(pair, "5m",
(arrow.utcnow().timestamp - 2000) * 1000) (arrow.utcnow().int_timestamp - 2000) * 1000)
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching ' with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
r'historical candle \(OHLCV\) data\..*'): r'historical candle \(OHLCV\) data\..*'):
api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.NotSupported("Not supported")) api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
await exchange._async_get_candle_history(pair, "5m", await exchange._async_get_candle_history(pair, "5m",
(arrow.utcnow().timestamp - 2000) * 1000) (arrow.utcnow().int_timestamp - 2000) * 1000)
@pytest.mark.asyncio @pytest.mark.asyncio
@ -1650,13 +1651,13 @@ async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
with pytest.raises(OperationalException, match=r'Could not fetch trade data*'): with pytest.raises(OperationalException, match=r'Could not fetch trade data*'):
api_mock.fetch_trades = MagicMock(side_effect=ccxt.BaseError("Unknown error")) api_mock.fetch_trades = MagicMock(side_effect=ccxt.BaseError("Unknown error"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().timestamp - 2000) * 1000) await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching ' with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
r'historical trade data\..*'): r'historical trade data\..*'):
api_mock.fetch_trades = MagicMock(side_effect=ccxt.NotSupported("Not supported")) api_mock.fetch_trades = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().timestamp - 2000) * 1000) await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
@pytest.mark.asyncio @pytest.mark.asyncio