2020-03-31 18:20:10 +00:00
|
|
|
import logging
|
2019-12-23 13:56:48 +00:00
|
|
|
import re
|
|
|
|
from pathlib import Path
|
2020-03-31 18:20:10 +00:00
|
|
|
from typing import List, Optional
|
2019-12-23 13:56:48 +00:00
|
|
|
|
2019-12-25 14:05:01 +00:00
|
|
|
import numpy as np
|
|
|
|
from pandas import DataFrame, read_json, to_datetime
|
2019-12-23 13:56:48 +00:00
|
|
|
|
|
|
|
from freqtrade import misc
|
|
|
|
from freqtrade.configuration import TimeRange
|
2020-11-21 09:52:15 +00:00
|
|
|
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, ListPairsWithTimeframes, TradeList
|
2020-03-31 18:46:42 +00:00
|
|
|
from freqtrade.data.converter import trades_dict_to_list
|
2019-12-23 13:56:48 +00:00
|
|
|
|
2020-11-21 09:52:15 +00:00
|
|
|
from .idatahandler import IDataHandler
|
2020-03-31 18:20:10 +00:00
|
|
|
|
2020-09-28 17:39:41 +00:00
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2019-12-23 13:56:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
class JsonDataHandler(IDataHandler):
|
|
|
|
|
|
|
|
_use_zip = False
|
2019-12-26 18:52:08 +00:00
|
|
|
_columns = DEFAULT_DATAFRAME_COLUMNS
|
2019-12-23 13:56:48 +00:00
|
|
|
|
2020-07-12 07:50:53 +00:00
|
|
|
@classmethod
|
|
|
|
def ohlcv_get_available_data(cls, datadir: Path) -> ListPairsWithTimeframes:
|
|
|
|
"""
|
|
|
|
Returns a list of all pairs with ohlcv data available in this datadir
|
|
|
|
:param datadir: Directory to search for ohlcv files
|
2020-07-12 08:23:09 +00:00
|
|
|
:return: List of Tuples of (pair, timeframe)
|
2020-07-12 07:50:53 +00:00
|
|
|
"""
|
2020-07-12 07:56:46 +00:00
|
|
|
_tmp = [re.search(r'^([a-zA-Z_]+)\-(\d+\S+)(?=.json)', p.name)
|
2020-07-12 07:50:53 +00:00
|
|
|
for p in datadir.glob(f"*.{cls._get_file_extension()}")]
|
2020-07-12 07:56:46 +00:00
|
|
|
return [(match[1].replace('_', '/'), match[2]) for match in _tmp
|
|
|
|
if match and len(match.groups()) > 1]
|
2020-07-12 07:50:53 +00:00
|
|
|
|
2019-12-23 13:56:48 +00:00
|
|
|
@classmethod
|
2021-11-07 06:35:27 +00:00
|
|
|
def ohlcv_get_pairs(
|
|
|
|
cls,
|
|
|
|
datadir: Path,
|
|
|
|
timeframe: str,
|
|
|
|
candle_type: Optional[str] = ""
|
|
|
|
) -> List[str]:
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
2019-12-25 18:53:52 +00:00
|
|
|
Returns a list of all pairs with ohlcv data available in this datadir
|
|
|
|
for the specified timeframe
|
|
|
|
:param datadir: Directory to search for ohlcv files
|
|
|
|
:param timeframe: Timeframe to search pairs for
|
|
|
|
:return: List of Pairs
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
2021-11-07 06:35:27 +00:00
|
|
|
if candle_type:
|
|
|
|
candle_type = f"-{candle_type}"
|
|
|
|
else:
|
|
|
|
candle_type = ""
|
2019-12-25 09:21:30 +00:00
|
|
|
|
2021-11-07 06:35:27 +00:00
|
|
|
_tmp = [re.search(r'^(\S+)(?=\-' + timeframe + candle_type + '.json)', p.name)
|
2021-11-19 10:55:50 +00:00
|
|
|
for p in datadir.glob(f"*{timeframe}{candle_type}.{cls._get_file_extension()}")]
|
2019-12-25 09:21:30 +00:00
|
|
|
# Check if regex found something and only return these results
|
2020-01-05 12:35:36 +00:00
|
|
|
return [match[0].replace('_', '/') for match in _tmp if match]
|
2019-12-23 13:56:48 +00:00
|
|
|
|
2021-11-07 06:35:27 +00:00
|
|
|
def ohlcv_store(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
|
|
|
data: DataFrame,
|
|
|
|
candle_type: Optional[str] = ""
|
|
|
|
) -> None:
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
2019-12-25 14:05:01 +00:00
|
|
|
Store data in json format "values".
|
|
|
|
format looks as follows:
|
|
|
|
[[<date>,<open>,<high>,<low>,<close>]]
|
|
|
|
:param pair: Pair - used to generate filename
|
2021-06-25 17:13:31 +00:00
|
|
|
:param timeframe: Timeframe - used to generate filename
|
|
|
|
:param data: Dataframe containing OHLCV data
|
2019-12-25 14:05:01 +00:00
|
|
|
:return: None
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
2021-11-07 06:35:27 +00:00
|
|
|
filename = self._pair_data_filename(
|
|
|
|
self._datadir,
|
|
|
|
pair,
|
|
|
|
timeframe,
|
|
|
|
candle_type
|
|
|
|
)
|
2019-12-25 14:05:01 +00:00
|
|
|
_data = data.copy()
|
|
|
|
# Convert date to int
|
2021-08-01 08:31:35 +00:00
|
|
|
_data['date'] = _data['date'].view(np.int64) // 1000 // 1000
|
2019-12-23 13:56:48 +00:00
|
|
|
|
2019-12-25 14:05:01 +00:00
|
|
|
# Reset index, select only appropriate columns and save as json
|
|
|
|
_data.reset_index(drop=True).loc[:, self._columns].to_json(
|
|
|
|
filename, orient="values",
|
|
|
|
compression='gzip' if self._use_zip else None)
|
2019-12-23 13:56:48 +00:00
|
|
|
|
2019-12-25 10:08:49 +00:00
|
|
|
def _ohlcv_load(self, pair: str, timeframe: str,
|
|
|
|
timerange: Optional[TimeRange] = None,
|
2021-11-07 06:35:27 +00:00
|
|
|
candle_type: Optional[str] = ""
|
2019-12-25 10:08:49 +00:00
|
|
|
) -> DataFrame:
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
2019-12-25 14:07:49 +00:00
|
|
|
Internal method used to load data for one pair from disk.
|
2020-01-05 08:55:02 +00:00
|
|
|
Implements the loading and conversion to a Pandas dataframe.
|
2019-12-26 08:56:42 +00:00
|
|
|
Timerange trimming and dataframe validation happens outside of this method.
|
2019-12-25 18:53:52 +00:00
|
|
|
:param pair: Pair to load data
|
2020-03-08 10:35:31 +00:00
|
|
|
:param timeframe: Timeframe (e.g. "5m")
|
2019-12-26 08:56:42 +00:00
|
|
|
:param timerange: Limit data to be loaded to this timerange.
|
2019-12-28 09:27:49 +00:00
|
|
|
Optionally implemented by subclasses to avoid loading
|
|
|
|
all data where possible.
|
2019-12-25 14:07:49 +00:00
|
|
|
:return: DataFrame with ohlcv data, or empty DataFrame
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
2021-11-07 06:35:27 +00:00
|
|
|
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type)
|
2019-12-27 10:08:47 +00:00
|
|
|
if not filename.exists():
|
2019-12-25 14:24:53 +00:00
|
|
|
return DataFrame(columns=self._columns)
|
2021-01-31 18:49:14 +00:00
|
|
|
try:
|
|
|
|
pairdata = read_json(filename, orient='values')
|
|
|
|
pairdata.columns = self._columns
|
|
|
|
except ValueError:
|
|
|
|
logger.error(f"Could not load data for {pair}.")
|
|
|
|
return DataFrame(columns=self._columns)
|
2020-02-22 16:54:19 +00:00
|
|
|
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
2020-02-22 16:46:40 +00:00
|
|
|
'low': 'float', 'close': 'float', 'volume': 'float'})
|
2019-12-25 14:05:01 +00:00
|
|
|
pairdata['date'] = to_datetime(pairdata['date'],
|
|
|
|
unit='ms',
|
|
|
|
utc=True,
|
|
|
|
infer_datetime_format=True)
|
2019-12-26 08:56:42 +00:00
|
|
|
return pairdata
|
2019-12-25 14:05:01 +00:00
|
|
|
|
2021-11-07 06:35:27 +00:00
|
|
|
def ohlcv_purge(self, pair: str, timeframe: str, candle_type: Optional[str] = "") -> bool:
|
2019-12-26 09:22:38 +00:00
|
|
|
"""
|
|
|
|
Remove data for this pair
|
|
|
|
:param pair: Delete data for this pair.
|
2020-03-08 10:35:31 +00:00
|
|
|
:param timeframe: Timeframe (e.g. "5m")
|
2019-12-26 09:22:38 +00:00
|
|
|
:return: True when deleted, false if file did not exist.
|
|
|
|
"""
|
2021-11-07 06:35:27 +00:00
|
|
|
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type)
|
2019-12-27 12:16:53 +00:00
|
|
|
if filename.exists():
|
2019-12-26 09:22:38 +00:00
|
|
|
filename.unlink()
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2021-11-07 06:35:27 +00:00
|
|
|
def ohlcv_append(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
|
|
|
data: DataFrame,
|
|
|
|
candle_type: Optional[str] = ""
|
|
|
|
) -> None:
|
2019-12-25 14:05:01 +00:00
|
|
|
"""
|
|
|
|
Append data to existing data structures
|
|
|
|
:param pair: Pair
|
|
|
|
:param timeframe: Timeframe this ohlcv data is for
|
|
|
|
:param data: Data to append.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
2019-12-23 13:56:48 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def trades_get_pairs(cls, datadir: Path) -> List[str]:
|
|
|
|
"""
|
2019-12-25 18:53:52 +00:00
|
|
|
Returns a list of all pairs for which trade data is available in this
|
|
|
|
:param datadir: Directory to search for ohlcv files
|
|
|
|
:return: List of Pairs
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
2019-12-25 09:21:30 +00:00
|
|
|
_tmp = [re.search(r'^(\S+)(?=\-trades.json)', p.name)
|
2019-12-23 13:56:48 +00:00
|
|
|
for p in datadir.glob(f"*trades.{cls._get_file_extension()}")]
|
2019-12-25 09:21:30 +00:00
|
|
|
# Check if regex found something and only return these results to avoid exceptions.
|
2020-01-05 12:35:36 +00:00
|
|
|
return [match[0].replace('_', '/') for match in _tmp if match]
|
2019-12-23 13:56:48 +00:00
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
def trades_store(self, pair: str, data: TradeList) -> None:
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
2019-12-25 18:53:52 +00:00
|
|
|
Store trades data (list of Dicts) to file
|
|
|
|
:param pair: Pair - used for filename
|
2020-03-31 18:20:10 +00:00
|
|
|
:param data: List of Lists containing trade data,
|
|
|
|
column sequence as in DEFAULT_TRADES_COLUMNS
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
2019-12-25 10:08:49 +00:00
|
|
|
filename = self._pair_trades_filename(self._datadir, pair)
|
2019-12-23 13:56:48 +00:00
|
|
|
misc.file_dump_json(filename, data, is_zip=self._use_zip)
|
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
def trades_append(self, pair: str, data: TradeList):
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
|
|
|
Append data to existing files
|
2019-12-25 18:53:52 +00:00
|
|
|
:param pair: Pair - used for filename
|
2020-03-31 18:20:10 +00:00
|
|
|
:param data: List of Lists containing trade data,
|
|
|
|
column sequence as in DEFAULT_TRADES_COLUMNS
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2020-04-01 05:58:39 +00:00
|
|
|
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
|
2019-12-23 13:56:48 +00:00
|
|
|
"""
|
|
|
|
Load a pair from file, either .json.gz or .json
|
2019-12-25 18:53:52 +00:00
|
|
|
# TODO: respect timerange ...
|
|
|
|
:param pair: Load trades for this pair
|
|
|
|
:param timerange: Timerange to load trades for - currently not implemented
|
2019-12-23 13:56:48 +00:00
|
|
|
:return: List of trades
|
|
|
|
"""
|
2019-12-25 10:08:49 +00:00
|
|
|
filename = self._pair_trades_filename(self._datadir, pair)
|
2019-12-23 13:56:48 +00:00
|
|
|
tradesdata = misc.file_load_json(filename)
|
2020-03-31 18:20:10 +00:00
|
|
|
|
2019-12-23 13:56:48 +00:00
|
|
|
if not tradesdata:
|
|
|
|
return []
|
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
if isinstance(tradesdata[0], dict):
|
|
|
|
# Convert trades dict to list
|
|
|
|
logger.info("Old trades format detected - converting")
|
|
|
|
tradesdata = trades_dict_to_list(tradesdata)
|
|
|
|
pass
|
2019-12-23 13:56:48 +00:00
|
|
|
return tradesdata
|
|
|
|
|
2019-12-26 08:51:03 +00:00
|
|
|
def trades_purge(self, pair: str) -> bool:
|
|
|
|
"""
|
|
|
|
Remove data for this pair
|
|
|
|
:param pair: Delete data for this pair.
|
|
|
|
:return: True when deleted, false if file did not exist.
|
|
|
|
"""
|
|
|
|
filename = self._pair_trades_filename(self._datadir, pair)
|
2019-12-27 10:08:47 +00:00
|
|
|
if filename.exists():
|
2019-12-26 08:51:03 +00:00
|
|
|
filename.unlink()
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-12-23 13:56:48 +00:00
|
|
|
@classmethod
|
2021-11-07 06:35:27 +00:00
|
|
|
def _pair_data_filename(
|
|
|
|
cls,
|
|
|
|
datadir: Path,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
|
|
|
candle_type: Optional[str] = ""
|
|
|
|
) -> Path:
|
2020-01-05 09:36:08 +00:00
|
|
|
pair_s = misc.pair_to_filename(pair)
|
2021-11-07 06:35:27 +00:00
|
|
|
if candle_type:
|
|
|
|
candle_type = f"-{candle_type}"
|
|
|
|
filename = datadir.joinpath(
|
|
|
|
f'{pair_s}-{timeframe}{candle_type}.{cls._get_file_extension()}')
|
2019-12-23 13:56:48 +00:00
|
|
|
return filename
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _get_file_extension(cls):
|
|
|
|
return "json.gz" if cls._use_zip else "json"
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _pair_trades_filename(cls, datadir: Path, pair: str) -> Path:
|
2020-01-05 09:36:08 +00:00
|
|
|
pair_s = misc.pair_to_filename(pair)
|
2019-12-23 13:56:48 +00:00
|
|
|
filename = datadir.joinpath(f'{pair_s}-trades.{cls._get_file_extension()}')
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
|
|
|
class JsonGzDataHandler(JsonDataHandler):
|
|
|
|
|
|
|
|
_use_zip = True
|