Merge pull request #2372 from xmatthias/kraken_ohlcv_emulate
download tick-based data to emulate candles
This commit is contained in:
@@ -35,7 +35,8 @@ ARGS_LIST_TIMEFRAMES = ["exchange", "print_one_column"]
|
||||
|
||||
ARGS_CREATE_USERDIR = ["user_data_dir"]
|
||||
|
||||
ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "exchange", "timeframes", "erase"]
|
||||
ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "download_trades", "exchange",
|
||||
"timeframes", "erase"]
|
||||
|
||||
ARGS_PLOT_DATAFRAME = ["pairs", "indicators1", "indicators2", "plot_limit", "db_url",
|
||||
"trade_source", "export", "exportfilename", "timerange", "ticker_interval"]
|
||||
|
@@ -273,6 +273,12 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
type=check_int_positive,
|
||||
metavar='INT',
|
||||
),
|
||||
"download_trades": Arg(
|
||||
'--dl-trades',
|
||||
help='Download trades instead of OHLCV data. The bot will resample trades to the '
|
||||
'desired timeframe as specified as --timeframes/-t.',
|
||||
action='store_true',
|
||||
),
|
||||
"exchange": Arg(
|
||||
'--exchange',
|
||||
help=f'Exchange name (default: `{constants.DEFAULT_EXCHANGE}`). '
|
||||
|
@@ -312,6 +312,8 @@ class Configuration:
|
||||
|
||||
self._args_to_config(config, argname='days',
|
||||
logstring='Detected --days: {}')
|
||||
self._args_to_config(config, argname='download_trades',
|
||||
logstring='Detected --dl-trades: {}')
|
||||
|
||||
def _process_runmode(self, config: Dict[str, Any]) -> None:
|
||||
|
||||
|
@@ -114,3 +114,25 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
|
||||
keys=['b_sum', 'b_size', 'bids', 'asks', 'a_size', 'a_sum'])
|
||||
# logger.info('order book %s', frame )
|
||||
return frame
|
||||
|
||||
|
||||
def trades_to_ohlcv(trades: list, timeframe: str) -> list:
|
||||
"""
|
||||
Converts trades list to ohlcv list
|
||||
:param trades: List of trades, as returned by ccxt.fetch_trades.
|
||||
:param timeframe: Ticker timeframe to resample data to
|
||||
:return: ohlcv timeframe as list (as returned by ccxt.fetch_ohlcv)
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
ticker_minutes = timeframe_to_minutes(timeframe)
|
||||
df = pd.DataFrame(trades)
|
||||
df['datetime'] = pd.to_datetime(df['datetime'])
|
||||
df = df.set_index('datetime')
|
||||
|
||||
df_new = df['price'].resample(f'{ticker_minutes}min').ohlc()
|
||||
df_new['volume'] = df['amount'].resample(f'{ticker_minutes}min').sum()
|
||||
df_new['date'] = df_new.index.astype("int64") // 10 ** 6
|
||||
# Drop 0 volume rows
|
||||
df_new = df_new.dropna()
|
||||
columns = ["date", "open", "high", "low", "close", "volume"]
|
||||
return list(zip(*[df_new[x].values.tolist() for x in columns]))
|
||||
|
@@ -17,7 +17,7 @@ from pandas import DataFrame
|
||||
|
||||
from freqtrade import OperationalException, misc
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.data.converter import parse_ticker_dataframe, trades_to_ohlcv
|
||||
from freqtrade.exchange import Exchange, timeframe_to_minutes
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -82,6 +82,29 @@ def store_tickerdata_file(datadir: Path, pair: str,
|
||||
misc.file_dump_json(filename, data, is_zip=is_zip)
|
||||
|
||||
|
||||
def load_trades_file(datadir: Path, pair: str,
|
||||
timerange: Optional[TimeRange] = None) -> List[Dict]:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
:return: tradelist or empty list if unsuccesful
|
||||
"""
|
||||
filename = pair_trades_filename(datadir, pair)
|
||||
tradesdata = misc.file_load_json(filename)
|
||||
if not tradesdata:
|
||||
return []
|
||||
|
||||
return tradesdata
|
||||
|
||||
|
||||
def store_trades_file(datadir: Path, pair: str,
|
||||
data: list, is_zip: bool = True):
|
||||
"""
|
||||
Stores tickerdata to file
|
||||
"""
|
||||
filename = pair_trades_filename(datadir, pair)
|
||||
misc.file_dump_json(filename, data, is_zip=is_zip)
|
||||
|
||||
|
||||
def _validate_pairdata(pair, pairdata, timerange: TimeRange):
|
||||
if timerange.starttype == 'date' and pairdata[0][0] > timerange.startts * 1000:
|
||||
logger.warning('Missing data at start for pair %s, data starts at %s',
|
||||
@@ -173,6 +196,12 @@ def pair_data_filename(datadir: Path, pair: str, ticker_interval: str) -> Path:
|
||||
return filename
|
||||
|
||||
|
||||
def pair_trades_filename(datadir: Path, pair: str) -> Path:
|
||||
pair_s = pair.replace("/", "_")
|
||||
filename = datadir.joinpath(f'{pair_s}-trades.json.gz')
|
||||
return filename
|
||||
|
||||
|
||||
def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: str,
|
||||
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
||||
Optional[int]]:
|
||||
@@ -299,6 +328,92 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
||||
return pairs_not_available
|
||||
|
||||
|
||||
def download_trades_history(datadir: Path,
|
||||
exchange: Exchange,
|
||||
pair: str,
|
||||
timerange: Optional[TimeRange] = None) -> bool:
|
||||
"""
|
||||
Download trade history from the exchange.
|
||||
Appends to previously downloaded trades data.
|
||||
"""
|
||||
try:
|
||||
|
||||
since = timerange.startts * 1000 if timerange and timerange.starttype == 'date' else None
|
||||
|
||||
trades = load_trades_file(datadir, pair)
|
||||
|
||||
from_id = trades[-1]['id'] if trades else None
|
||||
|
||||
logger.debug("Current Start: %s", trades[0]['datetime'] if trades else 'None')
|
||||
logger.debug("Current End: %s", trades[-1]['datetime'] if trades else 'None')
|
||||
|
||||
new_trades = exchange.get_historic_trades(pair=pair,
|
||||
since=since if since else
|
||||
int(arrow.utcnow().shift(
|
||||
days=-30).float_timestamp) * 1000,
|
||||
# until=xxx,
|
||||
from_id=from_id,
|
||||
)
|
||||
trades.extend(new_trades[1])
|
||||
store_trades_file(datadir, pair, trades)
|
||||
|
||||
logger.debug("New Start: %s", trades[0]['datetime'])
|
||||
logger.debug("New End: %s", trades[-1]['datetime'])
|
||||
logger.info(f"New Amount of trades: {len(trades)}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f'Failed to download historic trades for pair: "{pair}". '
|
||||
f'Error: {e}'
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path,
|
||||
timerange: TimeRange, erase=False) -> List[str]:
|
||||
"""
|
||||
Refresh stored trades data.
|
||||
Used by freqtrade download-data
|
||||
:return: Pairs not available
|
||||
"""
|
||||
pairs_not_available = []
|
||||
for pair in pairs:
|
||||
if pair not in exchange.markets:
|
||||
pairs_not_available.append(pair)
|
||||
logger.info(f"Skipping pair {pair}...")
|
||||
continue
|
||||
|
||||
dl_file = pair_trades_filename(datadir, pair)
|
||||
if erase and dl_file.exists():
|
||||
logger.info(
|
||||
f'Deleting existing data for pair {pair}.')
|
||||
dl_file.unlink()
|
||||
|
||||
logger.info(f'Downloading trades for pair {pair}.')
|
||||
download_trades_history(datadir=datadir, exchange=exchange,
|
||||
pair=pair,
|
||||
timerange=timerange)
|
||||
return pairs_not_available
|
||||
|
||||
|
||||
def convert_trades_to_ohlcv(pairs: List[str], timeframes: List[str],
|
||||
datadir: Path, timerange: TimeRange, erase=False) -> None:
|
||||
"""
|
||||
Convert stored trades data to ohlcv data
|
||||
"""
|
||||
for pair in pairs:
|
||||
trades = load_trades_file(datadir, pair)
|
||||
for timeframe in timeframes:
|
||||
ohlcv_file = pair_data_filename(datadir, pair, timeframe)
|
||||
if erase and ohlcv_file.exists():
|
||||
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
ohlcv_file.unlink()
|
||||
ohlcv = trades_to_ohlcv(trades, timeframe)
|
||||
# Store ohlcv
|
||||
store_tickerdata_file(datadir, pair, timeframe, data=ohlcv)
|
||||
|
||||
|
||||
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
|
||||
"""
|
||||
Get the maximum timeframe for the given backtest data
|
||||
|
@@ -16,6 +16,8 @@ class Binance(Exchange):
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": True,
|
||||
"order_time_in_force": ['gtc', 'fok', 'ioc'],
|
||||
"trades_pagination": "id",
|
||||
"trades_pagination_arg": "fromId",
|
||||
}
|
||||
|
||||
def get_order_book(self, pair: str, limit: int = 100) -> dict:
|
||||
|
@@ -147,6 +147,8 @@ def retrier(f):
|
||||
class Exchange:
|
||||
|
||||
_config: Dict = {}
|
||||
|
||||
# Parameters to add directly to buy/sell calls (like agreeing to trading agreement)
|
||||
_params: Dict = {}
|
||||
|
||||
# Dict to specify which options each exchange implements
|
||||
@@ -157,6 +159,9 @@ class Exchange:
|
||||
"order_time_in_force": ["gtc"],
|
||||
"ohlcv_candle_limit": 500,
|
||||
"ohlcv_partial_candle": True,
|
||||
"trades_pagination": "time", # Possible are "time" or "id"
|
||||
"trades_pagination_arg": "since",
|
||||
|
||||
}
|
||||
_ft_has: Dict = {}
|
||||
|
||||
@@ -200,6 +205,9 @@ class Exchange:
|
||||
self._ohlcv_candle_limit = self._ft_has['ohlcv_candle_limit']
|
||||
self._ohlcv_partial_candle = self._ft_has['ohlcv_partial_candle']
|
||||
|
||||
self._trades_pagination = self._ft_has['trades_pagination']
|
||||
self._trades_pagination_arg = self._ft_has['trades_pagination_arg']
|
||||
|
||||
# Initialize ccxt objects
|
||||
self._api = self._init_ccxt(
|
||||
exchange_config, ccxt_kwargs=exchange_config.get('ccxt_config'))
|
||||
@@ -742,6 +750,154 @@ class Exchange:
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(f'Could not fetch ticker data. Msg: {e}') from e
|
||||
|
||||
@retrier_async
|
||||
async def _async_fetch_trades(self, pair: str,
|
||||
since: Optional[int] = None,
|
||||
params: Optional[dict] = None) -> List[Dict]:
|
||||
"""
|
||||
Asyncronously gets trade history using fetch_trades.
|
||||
Handles exchange errors, does one call to the exchange.
|
||||
:param pair: Pair to fetch trade data for
|
||||
:param since: Since as integer timestamp in milliseconds
|
||||
returns: List of dicts containing trades
|
||||
"""
|
||||
try:
|
||||
# fetch trades asynchronously
|
||||
if params:
|
||||
logger.debug("Fetching trades for pair %s, params: %s ", pair, params)
|
||||
trades = await self._api_async.fetch_trades(pair, params=params, limit=1000)
|
||||
else:
|
||||
logger.debug(
|
||||
"Fetching trades for pair %s, since %s %s...",
|
||||
pair, since,
|
||||
'(' + arrow.get(since // 1000).isoformat() + ') ' if since is not None else ''
|
||||
)
|
||||
trades = await self._api_async.fetch_trades(pair, since=since, limit=1000)
|
||||
return trades
|
||||
except ccxt.NotSupported as e:
|
||||
raise OperationalException(
|
||||
f'Exchange {self._api.name} does not support fetching historical trade data.'
|
||||
f'Message: {e}') from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(f'Could not load trade history due to {e.__class__.__name__}. '
|
||||
f'Message: {e}') from e
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(f'Could not fetch trade data. Msg: {e}') from e
|
||||
|
||||
async def _async_get_trade_history_id(self, pair: str,
|
||||
until: int,
|
||||
since: Optional[int] = None,
|
||||
from_id: Optional[str] = None) -> Tuple[str, List[Dict]]:
|
||||
"""
|
||||
Asyncronously gets trade history using fetch_trades
|
||||
use this when exchange uses id-based iteration (check `self._trades_pagination`)
|
||||
:param pair: Pair to fetch trade data for
|
||||
:param since: Since as integer timestamp in milliseconds
|
||||
:param until: Until as integer timestamp in milliseconds
|
||||
:param from_id: Download data starting with ID (if id is known). Ignores "since" if set.
|
||||
returns tuple: (pair, trades-list)
|
||||
"""
|
||||
|
||||
trades: List[Dict] = []
|
||||
|
||||
if not from_id:
|
||||
# Fetch first elements using timebased method to get an ID to paginate on
|
||||
# Depending on the Exchange, this can introduce a drift at the start of the interval
|
||||
# of up to an hour.
|
||||
# e.g. Binance returns the "last 1000" candles within a 1h time interval
|
||||
# - so we will miss the first trades.
|
||||
t = await self._async_fetch_trades(pair, since=since)
|
||||
from_id = t[-1]['id']
|
||||
trades.extend(t[:-1])
|
||||
while True:
|
||||
t = await self._async_fetch_trades(pair,
|
||||
params={self._trades_pagination_arg: from_id})
|
||||
if len(t):
|
||||
# Skip last id since its the key for the next call
|
||||
trades.extend(t[:-1])
|
||||
if from_id == t[-1]['id'] or t[-1]['timestamp'] > until:
|
||||
logger.debug(f"Stopping because from_id did not change. "
|
||||
f"Reached {t[-1]['timestamp']} > {until}")
|
||||
# Reached the end of the defined-download period - add last trade as well.
|
||||
trades.extend(t[-1:])
|
||||
break
|
||||
|
||||
from_id = t[-1]['id']
|
||||
else:
|
||||
break
|
||||
|
||||
return (pair, trades)
|
||||
|
||||
async def _async_get_trade_history_time(self, pair: str, until: int,
|
||||
since: Optional[int] = None) -> Tuple[str, List]:
|
||||
"""
|
||||
Asyncronously gets trade history using fetch_trades,
|
||||
when the exchange uses time-based iteration (check `self._trades_pagination`)
|
||||
:param pair: Pair to fetch trade data for
|
||||
:param since: Since as integer timestamp in milliseconds
|
||||
:param until: Until as integer timestamp in milliseconds
|
||||
returns tuple: (pair, trades-list)
|
||||
"""
|
||||
|
||||
trades: List[Dict] = []
|
||||
while True:
|
||||
t = await self._async_fetch_trades(pair, since=since)
|
||||
if len(t):
|
||||
since = t[-1]['timestamp']
|
||||
trades.extend(t)
|
||||
# Reached the end of the defined-download period
|
||||
if until and t[-1]['timestamp'] > until:
|
||||
logger.debug(
|
||||
f"Stopping because until was reached. {t[-1]['timestamp']} > {until}")
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
return (pair, trades)
|
||||
|
||||
async def _async_get_trade_history(self, pair: str,
|
||||
since: Optional[int] = None,
|
||||
until: Optional[int] = None,
|
||||
from_id: Optional[str] = None) -> Tuple[str, List[Dict]]:
|
||||
"""
|
||||
Async wrapper handling downloading trades using either time or id based methods.
|
||||
"""
|
||||
|
||||
if self._trades_pagination == 'time':
|
||||
return await self._async_get_trade_history_time(
|
||||
pair=pair, since=since,
|
||||
until=until or ccxt.Exchange.milliseconds())
|
||||
elif self._trades_pagination == 'id':
|
||||
return await self._async_get_trade_history_id(
|
||||
pair=pair, since=since,
|
||||
until=until or ccxt.Exchange.milliseconds(), from_id=from_id
|
||||
)
|
||||
else:
|
||||
raise OperationalException(f"Exchange {self.name} does use neither time, "
|
||||
f"nor id based pagination")
|
||||
|
||||
def get_historic_trades(self, pair: str,
|
||||
since: Optional[int] = None,
|
||||
until: Optional[int] = None,
|
||||
from_id: Optional[str] = None) -> Tuple[str, List]:
|
||||
"""
|
||||
Gets candle history using asyncio and returns the list of candles.
|
||||
Handles all async doing.
|
||||
Async over one pair, assuming we get `_ohlcv_candle_limit` candles per call.
|
||||
:param pair: Pair to download
|
||||
:param ticker_interval: Interval to get
|
||||
:param since: Timestamp in milliseconds to get history from
|
||||
:param until: Timestamp in milliseconds. Defaults to current timestamp if not defined.
|
||||
:param from_id: Download data starting with ID (if id is known)
|
||||
:returns List of tickers
|
||||
"""
|
||||
if not self.exchange_has("fetchTrades"):
|
||||
raise OperationalException("This exchange does not suport downloading Trades.")
|
||||
|
||||
return asyncio.get_event_loop().run_until_complete(
|
||||
self._async_get_trade_history(pair=pair, since=since,
|
||||
until=until, from_id=from_id))
|
||||
|
||||
@retrier
|
||||
def cancel_order(self, order_id: str, pair: str) -> None:
|
||||
if self._config['dry_run']:
|
||||
|
@@ -14,6 +14,10 @@ logger = logging.getLogger(__name__)
|
||||
class Kraken(Exchange):
|
||||
|
||||
_params: Dict = {"trading_agreement": "agree"}
|
||||
_ft_has: Dict = {
|
||||
"trades_pagination": "id",
|
||||
"trades_pagination_arg": "since",
|
||||
}
|
||||
|
||||
@retrier
|
||||
def get_balances(self) -> dict:
|
||||
|
@@ -72,8 +72,10 @@ def json_load(datafile: IO):
|
||||
|
||||
def file_load_json(file):
|
||||
|
||||
gzipfile = file.with_suffix(file.suffix + '.gz')
|
||||
|
||||
if file.suffix != ".gz":
|
||||
gzipfile = file.with_suffix(file.suffix + '.gz')
|
||||
else:
|
||||
gzipfile = file
|
||||
# Try gzip file first, otherwise regular json file.
|
||||
if gzipfile.is_file():
|
||||
logger.debug('Loading ticker data from file %s', gzipfile)
|
||||
|
@@ -8,7 +8,9 @@ import arrow
|
||||
from freqtrade import OperationalException
|
||||
from freqtrade.configuration import Configuration, TimeRange
|
||||
from freqtrade.configuration.directory_operations import create_userdata_dir
|
||||
from freqtrade.data.history import refresh_backtest_ohlcv_data
|
||||
from freqtrade.data.history import (convert_trades_to_ohlcv,
|
||||
refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data)
|
||||
from freqtrade.exchange import available_exchanges, ccxt_exchanges
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
from freqtrade.state import RunMode
|
||||
@@ -88,9 +90,19 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
# Init exchange
|
||||
exchange = ExchangeResolver(config['exchange']['name'], config).exchange
|
||||
|
||||
pairs_not_available = refresh_backtest_ohlcv_data(
|
||||
exchange, pairs=config["pairs"], timeframes=config["timeframes"],
|
||||
dl_path=Path(config['datadir']), timerange=timerange, erase=config.get("erase"))
|
||||
if config.get('download_trades'):
|
||||
pairs_not_available = refresh_backtest_trades_data(
|
||||
exchange, pairs=config["pairs"], datadir=Path(config['datadir']),
|
||||
timerange=timerange, erase=config.get("erase"))
|
||||
|
||||
# Convert downloaded trade data to different timeframes
|
||||
convert_trades_to_ohlcv(
|
||||
pairs=config["pairs"], timeframes=config["timeframes"],
|
||||
datadir=Path(config['datadir']), timerange=timerange, erase=config.get("erase"))
|
||||
else:
|
||||
pairs_not_available = refresh_backtest_ohlcv_data(
|
||||
exchange, pairs=config["pairs"], timeframes=config["timeframes"],
|
||||
dl_path=Path(config['datadir']), timerange=timerange, erase=config.get("erase"))
|
||||
|
||||
except KeyboardInterrupt:
|
||||
sys.exit("SIGINT received, aborting ...")
|
||||
|
Reference in New Issue
Block a user