Merge pull request #3129 from freqtrade/trades_to_list

convert dl-trades datadownload to list
This commit is contained in:
Matthias 2020-05-13 09:41:26 +02:00 committed by GitHub
commit d86855f2f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 221 additions and 90 deletions

View File

@ -24,6 +24,9 @@ AVAILABLE_DATAHANDLERS = ['json', 'jsongz']
DRY_RUN_WALLET = 1000 DRY_RUN_WALLET = 1000
MATH_CLOSE_PREC = 1e-14 # Precision used for float comparisons MATH_CLOSE_PREC = 1e-14 # Precision used for float comparisons
DEFAULT_DATAFRAME_COLUMNS = ['date', 'open', 'high', 'low', 'close', 'volume'] DEFAULT_DATAFRAME_COLUMNS = ['date', 'open', 'high', 'low', 'close', 'volume']
# Don't modify sequence of DEFAULT_TRADES_COLUMNS
# it has wide consequences for stored trades files
DEFAULT_TRADES_COLUMNS = ['timestamp', 'id', 'type', 'side', 'price', 'amount', 'cost']
USERPATH_HYPEROPTS = 'hyperopts' USERPATH_HYPEROPTS = 'hyperopts'
USERPATH_STRATEGIES = 'strategies' USERPATH_STRATEGIES = 'strategies'

View File

@ -1,14 +1,17 @@
""" """
Functions to convert data from one format to another Functions to convert data from one format to another
""" """
import itertools
import logging import logging
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import Any, Dict from operator import itemgetter
from typing import Any, Dict, List
import pandas as pd import pandas as pd
from pandas import DataFrame, to_datetime from pandas import DataFrame, to_datetime
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS,
DEFAULT_TRADES_COLUMNS)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -154,7 +157,27 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
return frame return frame
def trades_to_ohlcv(trades: list, timeframe: str) -> DataFrame: def trades_remove_duplicates(trades: List[List]) -> List[List]:
"""
Removes duplicates from the trades list.
Uses itertools.groupby to avoid converting to pandas.
Tests show it as being pretty efficient on lists of 4M Lists.
:param trades: List of Lists with constants.DEFAULT_TRADES_COLUMNS as columns
:return: same format as above, but with duplicates removed
"""
return [i for i, _ in itertools.groupby(sorted(trades, key=itemgetter(0)))]
def trades_dict_to_list(trades: List[Dict]) -> List[List]:
"""
Convert fetch_trades result into a List (to be more memory efficient).
:param trades: List of trades, as returned by ccxt.fetch_trades.
:return: List of Lists, with constants.DEFAULT_TRADES_COLUMNS as columns
"""
return [[t[col] for col in DEFAULT_TRADES_COLUMNS] for t in trades]
def trades_to_ohlcv(trades: List, timeframe: str) -> DataFrame:
""" """
Converts trades list to OHLCV list Converts trades list to OHLCV list
TODO: This should get a dedicated test TODO: This should get a dedicated test
@ -164,9 +187,10 @@ def trades_to_ohlcv(trades: list, timeframe: str) -> DataFrame:
""" """
from freqtrade.exchange import timeframe_to_minutes from freqtrade.exchange import timeframe_to_minutes
timeframe_minutes = timeframe_to_minutes(timeframe) timeframe_minutes = timeframe_to_minutes(timeframe)
df = pd.DataFrame(trades) df = pd.DataFrame(trades, columns=DEFAULT_TRADES_COLUMNS)
df['datetime'] = pd.to_datetime(df['datetime']) df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms',
df = df.set_index('datetime') utc=True,)
df = df.set_index('timestamp')
df_new = df['price'].resample(f'{timeframe_minutes}min').ohlc() df_new = df['price'].resample(f'{timeframe_minutes}min').ohlc()
df_new['volume'] = df['amount'].resample(f'{timeframe_minutes}min').sum() df_new['volume'] = df['amount'].resample(f'{timeframe_minutes}min').sum()

View File

@ -9,10 +9,13 @@ from pandas import DataFrame
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
from freqtrade.data.converter import ohlcv_to_dataframe, trades_to_ohlcv from freqtrade.data.converter import (ohlcv_to_dataframe,
trades_remove_duplicates,
trades_to_ohlcv)
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
from freqtrade.exchange import Exchange from freqtrade.exchange import Exchange
from freqtrade.misc import format_ms_time
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -257,27 +260,40 @@ def _download_trades_history(exchange: Exchange,
""" """
try: try:
since = timerange.startts * 1000 if timerange and timerange.starttype == 'date' else None since = timerange.startts * 1000 if \
(timerange and timerange.starttype == 'date') else int(arrow.utcnow().shift(
days=-30).float_timestamp) * 1000
trades = data_handler.trades_load(pair) trades = data_handler.trades_load(pair)
from_id = trades[-1]['id'] if trades else None # TradesList columns are defined in constants.DEFAULT_TRADES_COLUMNS
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
# DEFAULT_TRADES_COLUMNS: 1 -> id
logger.debug("Current Start: %s", trades[0]['datetime'] if trades else 'None') from_id = trades[-1][1] if trades else None
logger.debug("Current End: %s", trades[-1]['datetime'] if trades else 'None') if trades and since < trades[-1][0]:
# Reset since to the last available point
# - 5 seconds (to ensure we're getting all trades)
since = trades[-1][0] - (5 * 1000)
logger.info(f"Using last trade date -5s - Downloading trades for {pair} "
f"since: {format_ms_time(since)}.")
logger.debug(f"Current Start: {format_ms_time(trades[0][0]) if trades else 'None'}")
logger.debug(f"Current End: {format_ms_time(trades[-1][0]) if trades else 'None'}")
logger.info(f"Current Amount of trades: {len(trades)}")
# Default since_ms to 30 days if nothing is given # Default since_ms to 30 days if nothing is given
new_trades = exchange.get_historic_trades(pair=pair, new_trades = exchange.get_historic_trades(pair=pair,
since=since if since else since=since,
int(arrow.utcnow().shift(
days=-30).float_timestamp) * 1000,
from_id=from_id, from_id=from_id,
) )
trades.extend(new_trades[1]) trades.extend(new_trades[1])
# Remove duplicates to make sure we're not storing data we don't need
trades = trades_remove_duplicates(trades)
data_handler.trades_store(pair, data=trades) data_handler.trades_store(pair, data=trades)
logger.debug("New Start: %s", trades[0]['datetime']) logger.debug(f"New Start: {format_ms_time(trades[0][0])}")
logger.debug("New End: %s", trades[-1]['datetime']) logger.debug(f"New End: {format_ms_time(trades[-1][0])}")
logger.info(f"New Amount of trades: {len(trades)}") logger.info(f"New Amount of trades: {len(trades)}")
return True return True

View File

@ -8,16 +8,20 @@ from abc import ABC, abstractclassmethod, abstractmethod
from copy import deepcopy from copy import deepcopy
from datetime import datetime, timezone from datetime import datetime, timezone
from pathlib import Path from pathlib import Path
from typing import Dict, List, Optional, Type from typing import List, Optional, Type
from pandas import DataFrame from pandas import DataFrame
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.data.converter import clean_ohlcv_dataframe, trim_dataframe from freqtrade.data.converter import (clean_ohlcv_dataframe,
trades_remove_duplicates, trim_dataframe)
from freqtrade.exchange import timeframe_to_seconds from freqtrade.exchange import timeframe_to_seconds
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Type for trades list
TradeList = List[List]
class IDataHandler(ABC): class IDataHandler(ABC):
@ -89,23 +93,25 @@ class IDataHandler(ABC):
""" """
@abstractmethod @abstractmethod
def trades_store(self, pair: str, data: List[Dict]) -> None: def trades_store(self, pair: str, data: TradeList) -> None:
""" """
Store trades data (list of Dicts) to file Store trades data (list of Dicts) to file
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Dicts containing trade data :param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
""" """
@abstractmethod @abstractmethod
def trades_append(self, pair: str, data: List[Dict]): def trades_append(self, pair: str, data: TradeList):
""" """
Append data to existing files Append data to existing files
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Dicts containing trade data :param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
""" """
@abstractmethod @abstractmethod
def trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> List[Dict]: def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
""" """
Load a pair from file, either .json.gz or .json Load a pair from file, either .json.gz or .json
:param pair: Load trades for this pair :param pair: Load trades for this pair
@ -121,6 +127,16 @@ class IDataHandler(ABC):
:return: True when deleted, false if file did not exist. :return: True when deleted, false if file did not exist.
""" """
def trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
"""
Load a pair from file, either .json.gz or .json
Removes duplicates in the process.
:param pair: Load trades for this pair
:param timerange: Timerange to load trades for - currently not implemented
:return: List of trades
"""
return trades_remove_duplicates(self._trades_load(pair, timerange=timerange))
def ohlcv_load(self, pair, timeframe: str, def ohlcv_load(self, pair, timeframe: str,
timerange: Optional[TimeRange] = None, timerange: Optional[TimeRange] = None,
fill_missing: bool = True, fill_missing: bool = True,

View File

@ -1,6 +1,7 @@
import logging
import re import re
from pathlib import Path from pathlib import Path
from typing import Dict, List, Optional from typing import List, Optional
import numpy as np import numpy as np
from pandas import DataFrame, read_json, to_datetime from pandas import DataFrame, read_json, to_datetime
@ -8,8 +9,11 @@ from pandas import DataFrame, read_json, to_datetime
from freqtrade import misc from freqtrade import misc
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
from freqtrade.data.converter import trades_dict_to_list
from .idatahandler import IDataHandler from .idatahandler import IDataHandler, TradeList
logger = logging.getLogger(__name__)
class JsonDataHandler(IDataHandler): class JsonDataHandler(IDataHandler):
@ -113,24 +117,26 @@ class JsonDataHandler(IDataHandler):
# Check if regex found something and only return these results to avoid exceptions. # Check if regex found something and only return these results to avoid exceptions.
return [match[0].replace('_', '/') for match in _tmp if match] return [match[0].replace('_', '/') for match in _tmp if match]
def trades_store(self, pair: str, data: List[Dict]) -> None: def trades_store(self, pair: str, data: TradeList) -> None:
""" """
Store trades data (list of Dicts) to file Store trades data (list of Dicts) to file
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Dicts containing trade data :param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
""" """
filename = self._pair_trades_filename(self._datadir, pair) filename = self._pair_trades_filename(self._datadir, pair)
misc.file_dump_json(filename, data, is_zip=self._use_zip) misc.file_dump_json(filename, data, is_zip=self._use_zip)
def trades_append(self, pair: str, data: List[Dict]): def trades_append(self, pair: str, data: TradeList):
""" """
Append data to existing files Append data to existing files
:param pair: Pair - used for filename :param pair: Pair - used for filename
:param data: List of Dicts containing trade data :param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
""" """
raise NotImplementedError() raise NotImplementedError()
def trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> List[Dict]: def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
""" """
Load a pair from file, either .json.gz or .json Load a pair from file, either .json.gz or .json
# TODO: respect timerange ... # TODO: respect timerange ...
@ -140,9 +146,15 @@ class JsonDataHandler(IDataHandler):
""" """
filename = self._pair_trades_filename(self._datadir, pair) filename = self._pair_trades_filename(self._datadir, pair)
tradesdata = misc.file_load_json(filename) tradesdata = misc.file_load_json(filename)
if not tradesdata: if not tradesdata:
return [] return []
if isinstance(tradesdata[0], dict):
# Convert trades dict to list
logger.info("Old trades format detected - converting")
tradesdata = trades_dict_to_list(tradesdata)
pass
return tradesdata return tradesdata
def trades_purge(self, pair: str) -> bool: def trades_purge(self, pair: str) -> bool:

View File

@ -18,13 +18,12 @@ from ccxt.base.decimal_to_precision import (ROUND_DOWN, ROUND_UP, TICK_SIZE,
TRUNCATE, decimal_to_precision) TRUNCATE, decimal_to_precision)
from pandas import DataFrame from pandas import DataFrame
from freqtrade.data.converter import ohlcv_to_dataframe from freqtrade.data.converter import ohlcv_to_dataframe, trades_dict_to_list
from freqtrade.exceptions import (DependencyException, InvalidOrderException, from freqtrade.exceptions import (DependencyException, InvalidOrderException,
OperationalException, TemporaryError) OperationalException, TemporaryError)
from freqtrade.exchange.common import BAD_EXCHANGES, retrier, retrier_async from freqtrade.exchange.common import BAD_EXCHANGES, retrier, retrier_async
from freqtrade.misc import deep_merge_dicts from freqtrade.misc import deep_merge_dicts
CcxtModuleType = Any CcxtModuleType = Any
@ -769,7 +768,7 @@ class Exchange:
@retrier_async @retrier_async
async def _async_fetch_trades(self, pair: str, async def _async_fetch_trades(self, pair: str,
since: Optional[int] = None, since: Optional[int] = None,
params: Optional[dict] = None) -> List[Dict]: params: Optional[dict] = None) -> List[List]:
""" """
Asyncronously gets trade history using fetch_trades. Asyncronously gets trade history using fetch_trades.
Handles exchange errors, does one call to the exchange. Handles exchange errors, does one call to the exchange.
@ -789,7 +788,7 @@ class Exchange:
'(' + arrow.get(since // 1000).isoformat() + ') ' if since is not None else '' '(' + arrow.get(since // 1000).isoformat() + ') ' if since is not None else ''
) )
trades = await self._api_async.fetch_trades(pair, since=since, limit=1000) trades = await self._api_async.fetch_trades(pair, since=since, limit=1000)
return trades return trades_dict_to_list(trades)
except ccxt.NotSupported as e: except ccxt.NotSupported as e:
raise OperationalException( raise OperationalException(
f'Exchange {self._api.name} does not support fetching historical trade data.' f'Exchange {self._api.name} does not support fetching historical trade data.'
@ -803,7 +802,7 @@ class Exchange:
async def _async_get_trade_history_id(self, pair: str, async def _async_get_trade_history_id(self, pair: str,
until: int, until: int,
since: Optional[int] = None, since: Optional[int] = None,
from_id: Optional[str] = None) -> Tuple[str, List[Dict]]: from_id: Optional[str] = None) -> Tuple[str, List[List]]:
""" """
Asyncronously gets trade history using fetch_trades Asyncronously gets trade history using fetch_trades
use this when exchange uses id-based iteration (check `self._trades_pagination`) use this when exchange uses id-based iteration (check `self._trades_pagination`)
@ -814,7 +813,7 @@ class Exchange:
returns tuple: (pair, trades-list) returns tuple: (pair, trades-list)
""" """
trades: List[Dict] = [] trades: List[List] = []
if not from_id: if not from_id:
# Fetch first elements using timebased method to get an ID to paginate on # Fetch first elements using timebased method to get an ID to paginate on
@ -823,7 +822,9 @@ class Exchange:
# e.g. Binance returns the "last 1000" candles within a 1h time interval # e.g. Binance returns the "last 1000" candles within a 1h time interval
# - so we will miss the first trades. # - so we will miss the first trades.
t = await self._async_fetch_trades(pair, since=since) t = await self._async_fetch_trades(pair, since=since)
from_id = t[-1]['id'] # DEFAULT_TRADES_COLUMNS: 0 -> timestamp
# DEFAULT_TRADES_COLUMNS: 1 -> id
from_id = t[-1][1]
trades.extend(t[:-1]) trades.extend(t[:-1])
while True: while True:
t = await self._async_fetch_trades(pair, t = await self._async_fetch_trades(pair,
@ -831,21 +832,21 @@ class Exchange:
if len(t): if len(t):
# Skip last id since its the key for the next call # Skip last id since its the key for the next call
trades.extend(t[:-1]) trades.extend(t[:-1])
if from_id == t[-1]['id'] or t[-1]['timestamp'] > until: if from_id == t[-1][1] or t[-1][0] > until:
logger.debug(f"Stopping because from_id did not change. " logger.debug(f"Stopping because from_id did not change. "
f"Reached {t[-1]['timestamp']} > {until}") f"Reached {t[-1][0]} > {until}")
# Reached the end of the defined-download period - add last trade as well. # Reached the end of the defined-download period - add last trade as well.
trades.extend(t[-1:]) trades.extend(t[-1:])
break break
from_id = t[-1]['id'] from_id = t[-1][1]
else: else:
break break
return (pair, trades) return (pair, trades)
async def _async_get_trade_history_time(self, pair: str, until: int, async def _async_get_trade_history_time(self, pair: str, until: int,
since: Optional[int] = None) -> Tuple[str, List]: since: Optional[int] = None) -> Tuple[str, List[List]]:
""" """
Asyncronously gets trade history using fetch_trades, Asyncronously gets trade history using fetch_trades,
when the exchange uses time-based iteration (check `self._trades_pagination`) when the exchange uses time-based iteration (check `self._trades_pagination`)
@ -855,16 +856,18 @@ class Exchange:
returns tuple: (pair, trades-list) returns tuple: (pair, trades-list)
""" """
trades: List[Dict] = [] trades: List[List] = []
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
# DEFAULT_TRADES_COLUMNS: 1 -> id
while True: while True:
t = await self._async_fetch_trades(pair, since=since) t = await self._async_fetch_trades(pair, since=since)
if len(t): if len(t):
since = t[-1]['timestamp'] since = t[-1][1]
trades.extend(t) trades.extend(t)
# Reached the end of the defined-download period # Reached the end of the defined-download period
if until and t[-1]['timestamp'] > until: if until and t[-1][0] > until:
logger.debug( logger.debug(
f"Stopping because until was reached. {t[-1]['timestamp']} > {until}") f"Stopping because until was reached. {t[-1][0]} > {until}")
break break
else: else:
break break
@ -874,7 +877,7 @@ class Exchange:
async def _async_get_trade_history(self, pair: str, async def _async_get_trade_history(self, pair: str,
since: Optional[int] = None, since: Optional[int] = None,
until: Optional[int] = None, until: Optional[int] = None,
from_id: Optional[str] = None) -> Tuple[str, List[Dict]]: from_id: Optional[str] = None) -> Tuple[str, List[List]]:
""" """
Async wrapper handling downloading trades using either time or id based methods. Async wrapper handling downloading trades using either time or id based methods.
""" """

View File

@ -1422,6 +1422,15 @@ def trades_for_order():
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
def trades_history(): def trades_history():
return [[1565798399463, '126181329', None, 'buy', 0.019627, 0.04, 0.00078508],
[1565798399629, '126181330', None, 'buy', 0.019627, 0.244, 0.004788987999999999],
[1565798399752, '126181331', None, 'sell', 0.019626, 0.011, 0.00021588599999999999],
[1565798399862, '126181332', None, 'sell', 0.019626, 0.011, 0.00021588599999999999],
[1565798399872, '126181333', None, 'sell', 0.019626, 0.011, 0.00021588599999999999]]
@pytest.fixture(scope="function")
def fetch_trades_result():
return [{'info': {'a': 126181329, return [{'info': {'a': 126181329,
'p': '0.01962700', 'p': '0.01962700',
'q': '0.04000000', 'q': '0.04000000',

View File

@ -5,12 +5,10 @@ from freqtrade.configuration.timerange import TimeRange
from freqtrade.data.converter import (convert_ohlcv_format, from freqtrade.data.converter import (convert_ohlcv_format,
convert_trades_format, convert_trades_format,
ohlcv_fill_up_missing_data, ohlcv_fill_up_missing_data,
ohlcv_to_dataframe, ohlcv_to_dataframe, trades_dict_to_list,
trim_dataframe) trades_remove_duplicates, trim_dataframe)
from freqtrade.data.history import (get_timerange, from freqtrade.data.history import (get_timerange, load_data,
load_data, load_pair_history, validate_backtest_data)
load_pair_history,
validate_backtest_data)
from tests.conftest import log_has from tests.conftest import log_has
from tests.data.test_history import _backup_file, _clean_test_file from tests.data.test_history import _backup_file, _clean_test_file
@ -197,32 +195,60 @@ def test_trim_dataframe(testdatadir) -> None:
assert all(data_modify.iloc[0] == data.iloc[25]) assert all(data_modify.iloc[0] == data.iloc[25])
def test_convert_trades_format(mocker, default_conf, testdatadir): def test_trades_remove_duplicates(trades_history):
file = testdatadir / "XRP_ETH-trades.json.gz" trades_history1 = trades_history * 3
file_new = testdatadir / "XRP_ETH-trades.json" assert len(trades_history1) == len(trades_history) * 3
_backup_file(file, copy_file=True) res = trades_remove_duplicates(trades_history1)
default_conf['datadir'] = testdatadir assert len(res) == len(trades_history)
for i, t in enumerate(res):
assert t == trades_history[i]
assert not file_new.exists()
def test_trades_dict_to_list(fetch_trades_result):
res = trades_dict_to_list(fetch_trades_result)
assert isinstance(res, list)
assert isinstance(res[0], list)
for i, t in enumerate(res):
assert t[0] == fetch_trades_result[i]['timestamp']
assert t[1] == fetch_trades_result[i]['id']
assert t[2] == fetch_trades_result[i]['type']
assert t[3] == fetch_trades_result[i]['side']
assert t[4] == fetch_trades_result[i]['price']
assert t[5] == fetch_trades_result[i]['amount']
assert t[6] == fetch_trades_result[i]['cost']
def test_convert_trades_format(mocker, default_conf, testdatadir):
files = [{'old': testdatadir / "XRP_ETH-trades.json.gz",
'new': testdatadir / "XRP_ETH-trades.json"},
{'old': testdatadir / "XRP_OLD-trades.json.gz",
'new': testdatadir / "XRP_OLD-trades.json"},
]
for file in files:
_backup_file(file['old'], copy_file=True)
assert not file['new'].exists()
default_conf['datadir'] = testdatadir
convert_trades_format(default_conf, convert_from='jsongz', convert_trades_format(default_conf, convert_from='jsongz',
convert_to='json', erase=False) convert_to='json', erase=False)
assert file_new.exists() for file in files:
assert file.exists() assert file['new'].exists()
assert file['old'].exists()
# Remove original file # Remove original file
file.unlink() file['old'].unlink()
# Convert back # Convert back
convert_trades_format(default_conf, convert_from='json', convert_trades_format(default_conf, convert_from='json',
convert_to='jsongz', erase=True) convert_to='jsongz', erase=True)
for file in files:
assert file['old'].exists()
assert not file['new'].exists()
assert file.exists() _clean_test_file(file['old'])
assert not file_new.exists() if file['new'].exists():
file['new'].unlink()
_clean_test_file(file)
if file_new.exists():
file_new.unlink()
def test_convert_ohlcv_format(mocker, default_conf, testdatadir): def test_convert_ohlcv_format(mocker, default_conf, testdatadir):

View File

@ -547,6 +547,17 @@ def test_download_trades_history(trades_history, mocker, default_conf, testdatad
assert log_has("New Amount of trades: 5", caplog) assert log_has("New Amount of trades: 5", caplog)
assert file1.is_file() assert file1.is_file()
ght_mock.reset_mock()
since_time = int(trades_history[-3][0] // 1000)
since_time2 = int(trades_history[-1][0] // 1000)
timerange = TimeRange('date', None, since_time, 0)
assert _download_trades_history(data_handler=data_handler, exchange=exchange,
pair='ETH/BTC', timerange=timerange)
assert ght_mock.call_count == 1
# Check this in seconds - since we had to convert to seconds above too.
assert int(ght_mock.call_args_list[0][1]['since'] // 1000) == since_time2 - 5
# clean files freshly downloaded # clean files freshly downloaded
_clean_test_file(file1) _clean_test_file(file1)
@ -601,7 +612,7 @@ def test_jsondatahandler_ohlcv_get_pairs(testdatadir):
def test_jsondatahandler_trades_get_pairs(testdatadir): def test_jsondatahandler_trades_get_pairs(testdatadir):
pairs = JsonGzDataHandler.trades_get_pairs(testdatadir) pairs = JsonGzDataHandler.trades_get_pairs(testdatadir)
# Convert to set to avoid failures due to sorting # Convert to set to avoid failures due to sorting
assert set(pairs) == {'XRP/ETH'} assert set(pairs) == {'XRP/ETH', 'XRP/OLD'}
def test_jsondatahandler_ohlcv_purge(mocker, testdatadir): def test_jsondatahandler_ohlcv_purge(mocker, testdatadir):
@ -614,6 +625,17 @@ def test_jsondatahandler_ohlcv_purge(mocker, testdatadir):
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m') assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
def test_jsondatahandler_trades_load(mocker, testdatadir, caplog):
dh = JsonGzDataHandler(testdatadir)
logmsg = "Old trades format detected - converting"
dh.trades_load('XRP/ETH')
assert not log_has(logmsg, caplog)
# Test conversation is happening
dh.trades_load('XRP/OLD')
assert log_has(logmsg, caplog)
def test_jsondatahandler_trades_purge(mocker, testdatadir): def test_jsondatahandler_trades_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False)) mocker.patch.object(Path, "exists", MagicMock(return_value=False))
mocker.patch.object(Path, "unlink", MagicMock()) mocker.patch.object(Path, "unlink", MagicMock())

View File

@ -1537,18 +1537,18 @@ async def test___async_get_candle_history_sort(default_conf, mocker, exchange_na
@pytest.mark.asyncio @pytest.mark.asyncio
@pytest.mark.parametrize("exchange_name", EXCHANGES) @pytest.mark.parametrize("exchange_name", EXCHANGES)
async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name, async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
trades_history): fetch_trades_result):
caplog.set_level(logging.DEBUG) caplog.set_level(logging.DEBUG)
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name) exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
# Monkey-patch async function # Monkey-patch async function
exchange._api_async.fetch_trades = get_mock_coro(trades_history) exchange._api_async.fetch_trades = get_mock_coro(fetch_trades_result)
pair = 'ETH/BTC' pair = 'ETH/BTC'
res = await exchange._async_fetch_trades(pair, since=None, params=None) res = await exchange._async_fetch_trades(pair, since=None, params=None)
assert type(res) is list assert type(res) is list
assert isinstance(res[0], dict) assert isinstance(res[0], list)
assert isinstance(res[1], dict) assert isinstance(res[1], list)
assert exchange._api_async.fetch_trades.call_count == 1 assert exchange._api_async.fetch_trades.call_count == 1
assert exchange._api_async.fetch_trades.call_args[0][0] == pair assert exchange._api_async.fetch_trades.call_args[0][0] == pair
@ -1594,7 +1594,7 @@ async def test__async_get_trade_history_id(default_conf, mocker, caplog, exchang
if 'since' in kwargs: if 'since' in kwargs:
# Return first 3 # Return first 3
return trades_history[:-2] return trades_history[:-2]
elif kwargs.get('params', {}).get(pagination_arg) == trades_history[-3]['id']: elif kwargs.get('params', {}).get(pagination_arg) == trades_history[-3][1]:
# Return 2 # Return 2
return trades_history[-3:-1] return trades_history[-3:-1]
else: else:
@ -1604,8 +1604,8 @@ async def test__async_get_trade_history_id(default_conf, mocker, caplog, exchang
exchange._async_fetch_trades = MagicMock(side_effect=mock_get_trade_hist) exchange._async_fetch_trades = MagicMock(side_effect=mock_get_trade_hist)
pair = 'ETH/BTC' pair = 'ETH/BTC'
ret = await exchange._async_get_trade_history_id(pair, since=trades_history[0]["timestamp"], ret = await exchange._async_get_trade_history_id(pair, since=trades_history[0][0],
until=trades_history[-1]["timestamp"]-1) until=trades_history[-1][0]-1)
assert type(ret) is tuple assert type(ret) is tuple
assert ret[0] == pair assert ret[0] == pair
assert type(ret[1]) is list assert type(ret[1]) is list
@ -1614,7 +1614,7 @@ async def test__async_get_trade_history_id(default_conf, mocker, caplog, exchang
fetch_trades_cal = exchange._async_fetch_trades.call_args_list fetch_trades_cal = exchange._async_fetch_trades.call_args_list
# first call (using since, not fromId) # first call (using since, not fromId)
assert fetch_trades_cal[0][0][0] == pair assert fetch_trades_cal[0][0][0] == pair
assert fetch_trades_cal[0][1]['since'] == trades_history[0]["timestamp"] assert fetch_trades_cal[0][1]['since'] == trades_history[0][0]
# 2nd call # 2nd call
assert fetch_trades_cal[1][0][0] == pair assert fetch_trades_cal[1][0][0] == pair
@ -1630,7 +1630,7 @@ async def test__async_get_trade_history_time(default_conf, mocker, caplog, excha
caplog.set_level(logging.DEBUG) caplog.set_level(logging.DEBUG)
async def mock_get_trade_hist(pair, *args, **kwargs): async def mock_get_trade_hist(pair, *args, **kwargs):
if kwargs['since'] == trades_history[0]["timestamp"]: if kwargs['since'] == trades_history[0][0]:
return trades_history[:-1] return trades_history[:-1]
else: else:
return trades_history[-1:] return trades_history[-1:]
@ -1640,8 +1640,8 @@ async def test__async_get_trade_history_time(default_conf, mocker, caplog, excha
# Monkey-patch async function # Monkey-patch async function
exchange._async_fetch_trades = MagicMock(side_effect=mock_get_trade_hist) exchange._async_fetch_trades = MagicMock(side_effect=mock_get_trade_hist)
pair = 'ETH/BTC' pair = 'ETH/BTC'
ret = await exchange._async_get_trade_history_time(pair, since=trades_history[0]["timestamp"], ret = await exchange._async_get_trade_history_time(pair, since=trades_history[0][0],
until=trades_history[-1]["timestamp"]-1) until=trades_history[-1][0]-1)
assert type(ret) is tuple assert type(ret) is tuple
assert ret[0] == pair assert ret[0] == pair
assert type(ret[1]) is list assert type(ret[1]) is list
@ -1650,11 +1650,11 @@ async def test__async_get_trade_history_time(default_conf, mocker, caplog, excha
fetch_trades_cal = exchange._async_fetch_trades.call_args_list fetch_trades_cal = exchange._async_fetch_trades.call_args_list
# first call (using since, not fromId) # first call (using since, not fromId)
assert fetch_trades_cal[0][0][0] == pair assert fetch_trades_cal[0][0][0] == pair
assert fetch_trades_cal[0][1]['since'] == trades_history[0]["timestamp"] assert fetch_trades_cal[0][1]['since'] == trades_history[0][0]
# 2nd call # 2nd call
assert fetch_trades_cal[1][0][0] == pair assert fetch_trades_cal[1][0][0] == pair
assert fetch_trades_cal[0][1]['since'] == trades_history[0]["timestamp"] assert fetch_trades_cal[0][1]['since'] == trades_history[0][0]
assert log_has_re(r"Stopping because until was reached.*", caplog) assert log_has_re(r"Stopping because until was reached.*", caplog)
@ -1666,7 +1666,7 @@ async def test__async_get_trade_history_time_empty(default_conf, mocker, caplog,
caplog.set_level(logging.DEBUG) caplog.set_level(logging.DEBUG)
async def mock_get_trade_hist(pair, *args, **kwargs): async def mock_get_trade_hist(pair, *args, **kwargs):
if kwargs['since'] == trades_history[0]["timestamp"]: if kwargs['since'] == trades_history[0][0]:
return trades_history[:-1] return trades_history[:-1]
else: else:
return [] return []
@ -1676,8 +1676,8 @@ async def test__async_get_trade_history_time_empty(default_conf, mocker, caplog,
# Monkey-patch async function # Monkey-patch async function
exchange._async_fetch_trades = MagicMock(side_effect=mock_get_trade_hist) exchange._async_fetch_trades = MagicMock(side_effect=mock_get_trade_hist)
pair = 'ETH/BTC' pair = 'ETH/BTC'
ret = await exchange._async_get_trade_history_time(pair, since=trades_history[0]["timestamp"], ret = await exchange._async_get_trade_history_time(pair, since=trades_history[0][0],
until=trades_history[-1]["timestamp"]-1) until=trades_history[-1][0]-1)
assert type(ret) is tuple assert type(ret) is tuple
assert ret[0] == pair assert ret[0] == pair
assert type(ret[1]) is list assert type(ret[1]) is list
@ -1686,7 +1686,7 @@ async def test__async_get_trade_history_time_empty(default_conf, mocker, caplog,
fetch_trades_cal = exchange._async_fetch_trades.call_args_list fetch_trades_cal = exchange._async_fetch_trades.call_args_list
# first call (using since, not fromId) # first call (using since, not fromId)
assert fetch_trades_cal[0][0][0] == pair assert fetch_trades_cal[0][0][0] == pair
assert fetch_trades_cal[0][1]['since'] == trades_history[0]["timestamp"] assert fetch_trades_cal[0][1]['since'] == trades_history[0][0]
@pytest.mark.parametrize("exchange_name", EXCHANGES) @pytest.mark.parametrize("exchange_name", EXCHANGES)
@ -1698,8 +1698,8 @@ def test_get_historic_trades(default_conf, mocker, caplog, exchange_name, trades
exchange._async_get_trade_history_id = get_mock_coro((pair, trades_history)) exchange._async_get_trade_history_id = get_mock_coro((pair, trades_history))
exchange._async_get_trade_history_time = get_mock_coro((pair, trades_history)) exchange._async_get_trade_history_time = get_mock_coro((pair, trades_history))
ret = exchange.get_historic_trades(pair, since=trades_history[0]["timestamp"], ret = exchange.get_historic_trades(pair, since=trades_history[0][0],
until=trades_history[-1]["timestamp"]) until=trades_history[-1][0])
# Depending on the exchange, one or the other method should be called # Depending on the exchange, one or the other method should be called
assert sum([exchange._async_get_trade_history_id.call_count, assert sum([exchange._async_get_trade_history_id.call_count,
@ -1720,8 +1720,8 @@ def test_get_historic_trades_notsupported(default_conf, mocker, caplog, exchange
with pytest.raises(OperationalException, with pytest.raises(OperationalException,
match="This exchange does not suport downloading Trades."): match="This exchange does not suport downloading Trades."):
exchange.get_historic_trades(pair, since=trades_history[0]["timestamp"], exchange.get_historic_trades(pair, since=trades_history[0][0],
until=trades_history[-1]["timestamp"]) until=trades_history[-1][0])
@pytest.mark.parametrize("exchange_name", EXCHANGES) @pytest.mark.parametrize("exchange_name", EXCHANGES)

Binary file not shown.

BIN
tests/testdata/XRP_OLD-trades.json.gz vendored Normal file

Binary file not shown.