2017-11-18 07:52:28 +00:00
|
|
|
# pragma pylint: disable=W0603
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
|
|
|
Cryptocurrency Exchanges support
|
|
|
|
"""
|
2019-06-09 12:05:36 +00:00
|
|
|
import asyncio
|
2021-02-14 06:22:08 +00:00
|
|
|
import http
|
2018-08-14 17:51:49 +00:00
|
|
|
import inspect
|
2019-06-09 12:05:36 +00:00
|
|
|
import logging
|
|
|
|
from copy import deepcopy
|
2021-09-08 19:41:32 +00:00
|
|
|
from datetime import datetime, time, timezone
|
2020-01-14 19:16:20 +00:00
|
|
|
from math import ceil
|
2021-09-08 19:46:52 +00:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
2017-09-01 19:11:46 +00:00
|
|
|
|
2018-12-11 18:47:48 +00:00
|
|
|
import arrow
|
2018-04-06 07:57:08 +00:00
|
|
|
import ccxt
|
2018-07-31 10:47:32 +00:00
|
|
|
import ccxt.async_support as ccxt_async
|
2021-04-13 18:09:22 +00:00
|
|
|
from cachetools import TTLCache
|
2020-09-28 17:39:41 +00:00
|
|
|
from ccxt.base.decimal_to_precision import (ROUND_DOWN, ROUND_UP, TICK_SIZE, TRUNCATE,
|
|
|
|
decimal_to_precision)
|
2018-12-11 18:47:48 +00:00
|
|
|
from pandas import DataFrame
|
2018-08-03 16:10:03 +00:00
|
|
|
|
2021-08-27 17:54:53 +00:00
|
|
|
from freqtrade.constants import (DEFAULT_AMOUNT_RESERVE_PERCENT, NON_OPEN_EXCHANGE_STATES,
|
|
|
|
ListPairsWithTimeframes)
|
2020-03-31 18:20:10 +00:00
|
|
|
from freqtrade.data.converter import ohlcv_to_dataframe, trades_dict_to_list
|
2020-09-28 17:39:41 +00:00
|
|
|
from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFundsError,
|
2021-06-02 09:30:19 +00:00
|
|
|
InvalidOrderException, OperationalException, PricingError,
|
|
|
|
RetryableOrderError, TemporaryError)
|
2021-04-06 05:47:44 +00:00
|
|
|
from freqtrade.exchange.common import (API_FETCH_ORDER_RETRY_COUNT, BAD_EXCHANGES,
|
|
|
|
EXCHANGE_HAS_OPTIONAL, EXCHANGE_HAS_REQUIRED, retrier,
|
2020-09-28 17:39:41 +00:00
|
|
|
retrier_async)
|
2020-07-15 17:49:51 +00:00
|
|
|
from freqtrade.misc import deep_merge_dicts, safe_value_fallback2
|
2021-01-12 00:13:58 +00:00
|
|
|
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
2017-05-12 17:11:56 +00:00
|
|
|
|
2020-09-28 17:39:41 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
CcxtModuleType = Any
|
|
|
|
|
|
|
|
|
2017-05-14 12:14:16 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2017-05-12 17:11:56 +00:00
|
|
|
|
2019-04-04 17:56:40 +00:00
|
|
|
|
2021-02-14 06:22:08 +00:00
|
|
|
# Workaround for adding samesite support to pre 3.8 python
|
|
|
|
# Only applies to python3.7, and only on certain exchanges (kraken)
|
|
|
|
# Replicates the fix from starlette (which is actually causing this problem)
|
|
|
|
http.cookies.Morsel._reserved["samesite"] = "SameSite" # type: ignore
|
|
|
|
|
|
|
|
|
2019-09-12 01:39:52 +00:00
|
|
|
class Exchange:
|
2018-04-22 07:57:48 +00:00
|
|
|
|
2019-02-28 23:13:16 +00:00
|
|
|
_config: Dict = {}
|
2019-08-14 08:45:57 +00:00
|
|
|
|
2019-11-13 17:22:23 +00:00
|
|
|
# Parameters to add directly to ccxt sync/async initialization.
|
|
|
|
_ccxt_config: Dict = {}
|
2019-11-13 16:50:54 +00:00
|
|
|
|
2019-08-14 08:45:57 +00:00
|
|
|
# Parameters to add directly to buy/sell calls (like agreeing to trading agreement)
|
2019-02-17 22:34:15 +00:00
|
|
|
_params: Dict = {}
|
2018-08-09 10:47:26 +00:00
|
|
|
|
2019-02-24 18:35:29 +00:00
|
|
|
# Dict to specify which options each exchange implements
|
2019-06-09 12:05:36 +00:00
|
|
|
# This defines defaults, which can be selectively overridden by subclasses using _ft_has
|
|
|
|
# or by specifying them in the configuration.
|
|
|
|
_ft_has_default: Dict = {
|
2019-02-24 18:35:29 +00:00
|
|
|
"stoploss_on_exchange": False,
|
2019-03-25 23:49:39 +00:00
|
|
|
"order_time_in_force": ["gtc"],
|
2021-05-01 15:29:53 +00:00
|
|
|
"ohlcv_params": {},
|
2019-06-09 12:52:17 +00:00
|
|
|
"ohlcv_candle_limit": 500,
|
2019-06-09 12:35:58 +00:00
|
|
|
"ohlcv_partial_candle": True,
|
2019-08-14 17:22:52 +00:00
|
|
|
"trades_pagination": "time", # Possible are "time" or "id"
|
|
|
|
"trades_pagination_arg": "since",
|
2020-10-13 18:02:47 +00:00
|
|
|
"l2_limit_range": None,
|
2021-04-13 10:28:07 +00:00
|
|
|
"l2_limit_range_required": True, # Allow Empty L2 limit (kucoin)
|
2019-02-24 18:35:29 +00:00
|
|
|
}
|
2019-06-09 12:05:36 +00:00
|
|
|
_ft_has: Dict = {}
|
2021-09-08 19:41:32 +00:00
|
|
|
funding_fee_times: List[time] = []
|
2019-02-24 18:35:29 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def __init__(self, config: Dict[str, Any], validate: bool = True) -> None:
|
2018-06-17 10:41:33 +00:00
|
|
|
"""
|
|
|
|
Initializes this module with the given config,
|
2019-02-24 18:35:29 +00:00
|
|
|
it does basic validation whether the specified exchange and pairs are valid.
|
2018-06-17 10:41:33 +00:00
|
|
|
:return: None
|
|
|
|
"""
|
2019-07-03 02:02:44 +00:00
|
|
|
self._api: ccxt.Exchange = None
|
|
|
|
self._api_async: ccxt_async.Exchange = None
|
2021-01-28 18:40:10 +00:00
|
|
|
self._markets: Dict = {}
|
2019-07-03 02:02:44 +00:00
|
|
|
|
2019-02-28 23:13:16 +00:00
|
|
|
self._config.update(config)
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-08-19 17:37:48 +00:00
|
|
|
# Holds last candle refreshed time of each pair
|
2019-01-05 05:44:08 +00:00
|
|
|
self._pairs_last_refresh_time: Dict[Tuple[str, str], int] = {}
|
2019-03-10 12:30:45 +00:00
|
|
|
# Timestamp of last markets refresh
|
|
|
|
self._last_markets_refresh: int = 0
|
2018-08-19 17:37:48 +00:00
|
|
|
|
2021-04-13 18:09:22 +00:00
|
|
|
# Cache for 10 minutes ...
|
2021-04-13 20:17:42 +00:00
|
|
|
self._fetch_tickers_cache: TTLCache = TTLCache(maxsize=1, ttl=60 * 10)
|
2021-06-02 09:39:18 +00:00
|
|
|
# Cache values for 1800 to avoid frequent polling of the exchange for prices
|
|
|
|
# Caching only applies to RPC methods, so prices for open trades are still
|
|
|
|
# refreshed once every iteration.
|
|
|
|
self._sell_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=1800)
|
2021-06-02 09:30:19 +00:00
|
|
|
self._buy_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=1800)
|
2021-04-13 18:09:22 +00:00
|
|
|
|
2018-08-19 17:37:48 +00:00
|
|
|
# Holds candles
|
2018-12-30 06:15:21 +00:00
|
|
|
self._klines: Dict[Tuple[str, str], DataFrame] = {}
|
2018-08-19 17:37:48 +00:00
|
|
|
|
2018-12-10 18:54:43 +00:00
|
|
|
# Holds all open sell orders for dry_run
|
|
|
|
self._dry_run_open_orders: Dict[str, Any] = {}
|
2018-08-19 17:37:48 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
if config['dry_run']:
|
|
|
|
logger.info('Instance is running with dry_run enabled')
|
2020-03-25 16:01:11 +00:00
|
|
|
logger.info(f"Using CCXT {ccxt.__version__}")
|
2018-06-17 10:41:33 +00:00
|
|
|
exchange_config = config['exchange']
|
2021-06-10 18:09:25 +00:00
|
|
|
self.log_responses = exchange_config.get('log_responses', False)
|
2019-06-09 12:05:36 +00:00
|
|
|
|
|
|
|
# Deep merge ft_has with default ft_has options
|
|
|
|
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
2020-08-26 18:52:09 +00:00
|
|
|
if exchange_config.get('_ft_has_params'):
|
|
|
|
self._ft_has = deep_merge_dicts(exchange_config.get('_ft_has_params'),
|
2019-06-09 12:05:36 +00:00
|
|
|
self._ft_has)
|
2019-06-09 12:35:58 +00:00
|
|
|
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
2019-06-09 12:05:36 +00:00
|
|
|
|
2019-06-09 12:35:58 +00:00
|
|
|
# Assign this directly for easy access
|
2019-06-09 12:52:17 +00:00
|
|
|
self._ohlcv_partial_candle = self._ft_has['ohlcv_partial_candle']
|
2019-06-09 12:35:58 +00:00
|
|
|
|
2019-08-14 17:22:52 +00:00
|
|
|
self._trades_pagination = self._ft_has['trades_pagination']
|
|
|
|
self._trades_pagination_arg = self._ft_has['trades_pagination_arg']
|
|
|
|
|
2019-06-09 12:35:58 +00:00
|
|
|
# Initialize ccxt objects
|
2019-11-13 17:22:23 +00:00
|
|
|
ccxt_config = self._ccxt_config.copy()
|
2020-05-30 08:45:50 +00:00
|
|
|
ccxt_config = deep_merge_dicts(exchange_config.get('ccxt_config', {}), ccxt_config)
|
|
|
|
ccxt_config = deep_merge_dicts(exchange_config.get('ccxt_sync_config', {}), ccxt_config)
|
|
|
|
|
|
|
|
self._api = self._init_ccxt(exchange_config, ccxt_kwargs=ccxt_config)
|
2019-11-13 17:22:23 +00:00
|
|
|
|
|
|
|
ccxt_async_config = self._ccxt_config.copy()
|
2020-05-30 08:45:50 +00:00
|
|
|
ccxt_async_config = deep_merge_dicts(exchange_config.get('ccxt_config', {}),
|
|
|
|
ccxt_async_config)
|
2019-11-13 17:22:23 +00:00
|
|
|
ccxt_async_config = deep_merge_dicts(exchange_config.get('ccxt_async_config', {}),
|
|
|
|
ccxt_async_config)
|
2019-07-03 02:02:44 +00:00
|
|
|
self._api_async = self._init_ccxt(
|
2019-11-13 17:22:23 +00:00
|
|
|
exchange_config, ccxt_async, ccxt_kwargs=ccxt_async_config)
|
2017-10-01 21:28:09 +00:00
|
|
|
|
2018-06-18 20:20:50 +00:00
|
|
|
logger.info('Using Exchange "%s"', self.name)
|
2018-03-21 17:40:16 +00:00
|
|
|
|
2019-10-13 08:33:22 +00:00
|
|
|
if validate:
|
2019-10-22 10:51:36 +00:00
|
|
|
# Check if timeframe is available
|
2020-06-01 18:39:01 +00:00
|
|
|
self.validate_timeframes(config.get('timeframe'))
|
2019-10-22 10:51:36 +00:00
|
|
|
|
2019-10-13 08:33:22 +00:00
|
|
|
# Initial markets load
|
|
|
|
self._load_markets()
|
2019-10-22 10:51:36 +00:00
|
|
|
|
2019-10-13 08:33:22 +00:00
|
|
|
# Check if all pairs are available
|
2020-01-11 10:53:44 +00:00
|
|
|
self.validate_stakecurrency(config['stake_currency'])
|
2020-11-12 11:32:45 +00:00
|
|
|
if not exchange_config.get('skip_pair_validation'):
|
|
|
|
self.validate_pairs(config['exchange']['pair_whitelist'])
|
2019-10-13 08:33:22 +00:00
|
|
|
self.validate_ordertypes(config.get('order_types', {}))
|
|
|
|
self.validate_order_time_in_force(config.get('order_time_in_force', {}))
|
2021-02-14 09:29:45 +00:00
|
|
|
self.validate_required_startup_candles(config.get('startup_candle_count', 0),
|
2021-02-14 09:46:59 +00:00
|
|
|
config.get('timeframe', ''))
|
2019-03-12 12:03:29 +00:00
|
|
|
|
2019-10-22 10:51:36 +00:00
|
|
|
# Converts the interval provided in minutes in config to seconds
|
|
|
|
self.markets_refresh_interval: int = exchange_config.get(
|
|
|
|
"markets_refresh_interval", 60) * 60
|
|
|
|
|
2018-08-14 17:51:49 +00:00
|
|
|
def __del__(self):
|
|
|
|
"""
|
|
|
|
Destructor - clean up async stuff
|
|
|
|
"""
|
2021-02-06 09:22:59 +00:00
|
|
|
self.close()
|
|
|
|
|
|
|
|
def close(self):
|
2018-08-14 17:51:49 +00:00
|
|
|
logger.debug("Exchange object destroyed, closing async loop")
|
|
|
|
if self._api_async and inspect.iscoroutinefunction(self._api_async.close):
|
|
|
|
asyncio.get_event_loop().run_until_complete(self._api_async.close())
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def _init_ccxt(self, exchange_config: Dict[str, Any], ccxt_module: CcxtModuleType = ccxt,
|
2018-10-04 18:11:02 +00:00
|
|
|
ccxt_kwargs: dict = None) -> ccxt.Exchange:
|
2018-06-17 11:09:23 +00:00
|
|
|
"""
|
|
|
|
Initialize ccxt with given config and return valid
|
|
|
|
ccxt instance.
|
|
|
|
"""
|
|
|
|
# Find matching class for the given exchange name
|
|
|
|
name = exchange_config['name']
|
|
|
|
|
2019-09-30 21:33:33 +00:00
|
|
|
if not is_exchange_known_ccxt(name, ccxt_module):
|
2019-06-11 10:43:29 +00:00
|
|
|
raise OperationalException(f'Exchange {name} is not supported by ccxt')
|
2018-10-04 18:11:02 +00:00
|
|
|
|
|
|
|
ex_config = {
|
2018-12-11 06:11:43 +00:00
|
|
|
'apiKey': exchange_config.get('key'),
|
|
|
|
'secret': exchange_config.get('secret'),
|
|
|
|
'password': exchange_config.get('password'),
|
|
|
|
'uid': exchange_config.get('uid', ''),
|
|
|
|
}
|
2018-10-04 18:11:02 +00:00
|
|
|
if ccxt_kwargs:
|
|
|
|
logger.info('Applying additional ccxt config: %s', ccxt_kwargs)
|
|
|
|
ex_config.update(ccxt_kwargs)
|
|
|
|
try:
|
|
|
|
|
|
|
|
api = getattr(ccxt_module, name.lower())(ex_config)
|
2019-07-25 18:06:20 +00:00
|
|
|
except (KeyError, AttributeError) as e:
|
|
|
|
raise OperationalException(f'Exchange {name} is not supported') from e
|
2019-07-03 02:13:41 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(f"Initialization of ccxt failed. Reason: {e}") from e
|
2018-06-17 11:09:23 +00:00
|
|
|
|
2018-07-28 20:32:10 +00:00
|
|
|
self.set_sandbox(api, exchange_config, name)
|
2018-07-27 08:55:36 +00:00
|
|
|
|
2018-06-17 11:09:23 +00:00
|
|
|
return api
|
|
|
|
|
2018-06-18 20:20:50 +00:00
|
|
|
@property
|
|
|
|
def name(self) -> str:
|
|
|
|
"""exchange Name (from ccxt)"""
|
2018-06-18 20:07:15 +00:00
|
|
|
return self._api.name
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-06-18 20:20:50 +00:00
|
|
|
@property
|
|
|
|
def id(self) -> str:
|
|
|
|
"""exchange ccxt id"""
|
2018-06-18 20:07:15 +00:00
|
|
|
return self._api.id
|
2017-10-06 10:22:04 +00:00
|
|
|
|
2019-09-29 20:08:11 +00:00
|
|
|
@property
|
|
|
|
def timeframes(self) -> List[str]:
|
|
|
|
return list((self._api.timeframes or {}).keys())
|
|
|
|
|
2019-03-04 22:59:08 +00:00
|
|
|
@property
|
|
|
|
def markets(self) -> Dict:
|
|
|
|
"""exchange ccxt markets"""
|
2021-01-28 18:40:10 +00:00
|
|
|
if not self._markets:
|
2020-06-10 03:30:29 +00:00
|
|
|
logger.info("Markets were not loaded. Loading them now..")
|
2019-03-06 21:57:31 +00:00
|
|
|
self._load_markets()
|
2021-01-28 18:40:10 +00:00
|
|
|
return self._markets
|
2019-03-04 22:59:08 +00:00
|
|
|
|
2020-01-12 13:37:45 +00:00
|
|
|
@property
|
|
|
|
def precisionMode(self) -> str:
|
|
|
|
"""exchange ccxt precisionMode"""
|
|
|
|
return self._api.precisionMode
|
|
|
|
|
2021-06-10 18:09:25 +00:00
|
|
|
def _log_exchange_response(self, endpoint, response) -> None:
|
|
|
|
""" Log exchange responses """
|
|
|
|
if self.log_responses:
|
|
|
|
logger.info(f"API {endpoint}: {response}")
|
|
|
|
|
2021-02-14 09:29:45 +00:00
|
|
|
def ohlcv_candle_limit(self, timeframe: str) -> int:
|
|
|
|
"""
|
|
|
|
Exchange ohlcv candle limit
|
2021-06-25 13:45:49 +00:00
|
|
|
Uses ohlcv_candle_limit_per_timeframe if the exchange has different limits
|
2021-02-14 09:29:45 +00:00
|
|
|
per timeframe (e.g. bittrex), otherwise falls back to ohlcv_candle_limit
|
|
|
|
:param timeframe: Timeframe to check
|
|
|
|
:return: Candle limit as integer
|
|
|
|
"""
|
|
|
|
return int(self._ft_has.get('ohlcv_candle_limit_per_timeframe', {}).get(
|
|
|
|
timeframe, self._ft_has.get('ohlcv_candle_limit')))
|
|
|
|
|
2019-10-16 23:09:19 +00:00
|
|
|
def get_markets(self, base_currencies: List[str] = None, quote_currencies: List[str] = None,
|
2020-12-30 08:55:44 +00:00
|
|
|
pairs_only: bool = False, active_only: bool = False) -> Dict[str, Any]:
|
2019-10-13 10:12:20 +00:00
|
|
|
"""
|
|
|
|
Return exchange ccxt markets, filtered out by base currency and quote currency
|
|
|
|
if this was requested in parameters.
|
|
|
|
|
|
|
|
TODO: consider moving it to the Dataprovider
|
|
|
|
"""
|
|
|
|
markets = self.markets
|
2019-10-14 10:32:39 +00:00
|
|
|
if not markets:
|
|
|
|
raise OperationalException("Markets were not loaded.")
|
|
|
|
|
2019-10-16 23:09:19 +00:00
|
|
|
if base_currencies:
|
|
|
|
markets = {k: v for k, v in markets.items() if v['base'] in base_currencies}
|
|
|
|
if quote_currencies:
|
|
|
|
markets = {k: v for k, v in markets.items() if v['quote'] in quote_currencies}
|
2019-10-13 10:12:20 +00:00
|
|
|
if pairs_only:
|
2020-06-02 18:41:29 +00:00
|
|
|
markets = {k: v for k, v in markets.items() if self.market_is_tradable(v)}
|
2019-10-13 10:12:20 +00:00
|
|
|
if active_only:
|
2019-10-14 10:32:39 +00:00
|
|
|
markets = {k: v for k, v in markets.items() if market_is_active(v)}
|
2019-10-13 10:12:20 +00:00
|
|
|
return markets
|
|
|
|
|
2020-01-11 10:53:44 +00:00
|
|
|
def get_quote_currencies(self) -> List[str]:
|
|
|
|
"""
|
|
|
|
Return a list of supported quote currencies
|
|
|
|
"""
|
|
|
|
markets = self.markets
|
2020-01-12 11:48:29 +00:00
|
|
|
return sorted(set([x['quote'] for _, x in markets.items()]))
|
2020-01-11 10:53:44 +00:00
|
|
|
|
2020-02-24 20:50:27 +00:00
|
|
|
def get_pair_quote_currency(self, pair: str) -> str:
|
|
|
|
"""
|
|
|
|
Return a pair's quote currency
|
|
|
|
"""
|
2020-02-26 06:09:54 +00:00
|
|
|
return self.markets.get(pair, {}).get('quote', '')
|
2020-02-24 20:50:27 +00:00
|
|
|
|
|
|
|
def get_pair_base_currency(self, pair: str) -> str:
|
|
|
|
"""
|
|
|
|
Return a pair's quote currency
|
|
|
|
"""
|
2020-02-26 06:09:54 +00:00
|
|
|
return self.markets.get(pair, {}).get('base', '')
|
2020-02-24 20:50:27 +00:00
|
|
|
|
2020-06-02 18:29:48 +00:00
|
|
|
def market_is_tradable(self, market: Dict[str, Any]) -> bool:
|
|
|
|
"""
|
|
|
|
Check if the market symbol is tradable by Freqtrade.
|
|
|
|
By default, checks if it's splittable by `/` and both sides correspond to base / quote
|
|
|
|
"""
|
|
|
|
symbol_parts = market['symbol'].split('/')
|
|
|
|
return (len(symbol_parts) == 2 and
|
|
|
|
len(symbol_parts[0]) > 0 and
|
|
|
|
len(symbol_parts[1]) > 0 and
|
|
|
|
symbol_parts[0] == market.get('base') and
|
|
|
|
symbol_parts[1] == market.get('quote')
|
|
|
|
)
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def klines(self, pair_interval: Tuple[str, str], copy: bool = True) -> DataFrame:
|
2018-12-30 06:15:21 +00:00
|
|
|
if pair_interval in self._klines:
|
|
|
|
return self._klines[pair_interval].copy() if copy else self._klines[pair_interval]
|
2018-12-11 18:47:48 +00:00
|
|
|
else:
|
2018-12-29 12:00:50 +00:00
|
|
|
return DataFrame()
|
2018-12-11 18:47:48 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def set_sandbox(self, api: ccxt.Exchange, exchange_config: dict, name: str) -> None:
|
2018-07-28 20:32:10 +00:00
|
|
|
if exchange_config.get('sandbox'):
|
|
|
|
if api.urls.get('test'):
|
|
|
|
api.urls['api'] = api.urls['test']
|
2018-07-29 09:15:13 +00:00
|
|
|
logger.info("Enabled Sandbox API on %s", name)
|
2018-07-28 20:32:10 +00:00
|
|
|
else:
|
2020-08-03 17:28:57 +00:00
|
|
|
logger.warning(
|
|
|
|
f"No Sandbox URL in CCXT for {name}, exiting. Please check your config.json")
|
2018-07-28 20:32:10 +00:00
|
|
|
raise OperationalException(f'Exchange {name} does not provide a sandbox api')
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def _load_async_markets(self, reload: bool = False) -> None:
|
2018-08-10 11:04:43 +00:00
|
|
|
try:
|
|
|
|
if self._api_async:
|
2019-03-06 21:48:04 +00:00
|
|
|
asyncio.get_event_loop().run_until_complete(
|
|
|
|
self._api_async.load_markets(reload=reload))
|
2018-08-10 11:04:43 +00:00
|
|
|
|
2020-11-13 09:43:48 +00:00
|
|
|
except (asyncio.TimeoutError, ccxt.BaseError) as e:
|
2018-08-10 11:04:43 +00:00
|
|
|
logger.warning('Could not load async markets. Reason: %s', e)
|
|
|
|
return
|
|
|
|
|
2019-03-12 15:35:32 +00:00
|
|
|
def _load_markets(self) -> None:
|
2018-09-10 18:19:12 +00:00
|
|
|
""" Initialize markets both sync and async """
|
|
|
|
try:
|
2021-01-28 18:40:10 +00:00
|
|
|
self._markets = self._api.load_markets()
|
2018-09-10 18:19:12 +00:00
|
|
|
self._load_async_markets()
|
2020-10-12 17:58:04 +00:00
|
|
|
self._last_markets_refresh = arrow.utcnow().int_timestamp
|
2021-03-20 12:33:49 +00:00
|
|
|
except ccxt.BaseError:
|
|
|
|
logger.exception('Unable to initialize markets.')
|
2018-09-10 18:19:12 +00:00
|
|
|
|
2020-06-09 22:39:23 +00:00
|
|
|
def reload_markets(self) -> None:
|
2020-06-13 10:34:29 +00:00
|
|
|
"""Reload markets both sync and async if refresh interval has passed """
|
2019-03-10 12:30:45 +00:00
|
|
|
# Check whether markets have to be reloaded
|
|
|
|
if (self._last_markets_refresh > 0) and (
|
|
|
|
self._last_markets_refresh + self.markets_refresh_interval
|
2020-10-12 17:58:04 +00:00
|
|
|
> arrow.utcnow().int_timestamp):
|
2019-03-10 12:30:45 +00:00
|
|
|
return None
|
2019-03-10 15:36:25 +00:00
|
|
|
logger.debug("Performing scheduled market reload..")
|
2019-04-24 19:56:24 +00:00
|
|
|
try:
|
2021-01-28 18:40:10 +00:00
|
|
|
self._markets = self._api.load_markets(reload=True)
|
2020-06-17 05:23:20 +00:00
|
|
|
# Also reload async markets to avoid issues with newly listed pairs
|
|
|
|
self._load_async_markets(reload=True)
|
2020-10-12 17:58:04 +00:00
|
|
|
self._last_markets_refresh = arrow.utcnow().int_timestamp
|
2019-04-24 20:20:05 +00:00
|
|
|
except ccxt.BaseError:
|
2019-04-24 19:56:24 +00:00
|
|
|
logger.exception("Could not reload markets.")
|
2018-09-10 18:19:12 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def validate_stakecurrency(self, stake_currency: str) -> None:
|
2020-01-11 10:53:44 +00:00
|
|
|
"""
|
|
|
|
Checks stake-currency against available currencies on the exchange.
|
2021-08-29 07:18:46 +00:00
|
|
|
Only runs on startup. If markets have not been loaded, there's been a problem with
|
|
|
|
the connection to the exchange.
|
2020-01-11 10:53:44 +00:00
|
|
|
:param stake_currency: Stake-currency to validate
|
|
|
|
:raise: OperationalException if stake-currency is not available.
|
|
|
|
"""
|
2021-08-29 07:18:46 +00:00
|
|
|
if not self._markets:
|
|
|
|
raise OperationalException(
|
|
|
|
'Could not load markets, therefore cannot start. '
|
|
|
|
'Please investigate the above error for more details.'
|
2021-09-06 08:24:15 +00:00
|
|
|
)
|
2020-01-11 10:53:44 +00:00
|
|
|
quote_currencies = self.get_quote_currencies()
|
|
|
|
if stake_currency not in quote_currencies:
|
|
|
|
raise OperationalException(
|
2020-01-19 18:54:30 +00:00
|
|
|
f"{stake_currency} is not available as stake on {self.name}. "
|
|
|
|
f"Available currencies are: {', '.join(quote_currencies)}")
|
2020-01-11 10:53:44 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
def validate_pairs(self, pairs: List[str]) -> None:
|
|
|
|
"""
|
|
|
|
Checks if all given pairs are tradable on the current exchange.
|
|
|
|
:param pairs: list of pairs
|
2020-01-11 10:53:44 +00:00
|
|
|
:raise: OperationalException if one pair is not available
|
2018-06-17 10:41:33 +00:00
|
|
|
:return: None
|
|
|
|
"""
|
2017-10-06 10:22:04 +00:00
|
|
|
|
2018-09-10 18:19:12 +00:00
|
|
|
if not self.markets:
|
|
|
|
logger.warning('Unable to validate pairs (assuming they are correct).')
|
2019-08-03 11:11:51 +00:00
|
|
|
return
|
2021-01-14 23:13:11 +00:00
|
|
|
extended_pairs = expand_pairlist(pairs, list(self.markets), keep_invalid=True)
|
2020-02-24 20:22:58 +00:00
|
|
|
invalid_pairs = []
|
2021-01-12 00:13:58 +00:00
|
|
|
for pair in extended_pairs:
|
2018-06-17 10:41:33 +00:00
|
|
|
# Note: ccxt has BaseCurrency/QuoteCurrency format for pairs
|
2018-09-11 17:46:18 +00:00
|
|
|
if self.markets and pair not in self.markets:
|
2018-06-17 10:41:33 +00:00
|
|
|
raise OperationalException(
|
2019-03-05 18:45:42 +00:00
|
|
|
f'Pair {pair} is not available on {self.name}. '
|
2018-11-29 06:07:47 +00:00
|
|
|
f'Please remove {pair} from your whitelist.')
|
2019-12-27 15:15:44 +00:00
|
|
|
|
|
|
|
# From ccxt Documentation:
|
|
|
|
# markets.info: An associative array of non-common market properties,
|
|
|
|
# including fees, rates, limits and other general market information.
|
|
|
|
# The internal info array is different for each particular market,
|
|
|
|
# its contents depend on the exchange.
|
|
|
|
# It can also be a string or similar ... so we need to verify that first.
|
|
|
|
elif (isinstance(self.markets[pair].get('info', None), dict)
|
2021-07-29 04:56:37 +00:00
|
|
|
and self.markets[pair].get('info', {}).get('prohibitedIn', False)):
|
2019-08-03 11:11:51 +00:00
|
|
|
# Warn users about restricted pairs in whitelist.
|
|
|
|
# We cannot determine reliably if Users are affected.
|
|
|
|
logger.warning(f"Pair {pair} is restricted for some users on this exchange."
|
|
|
|
f"Please check if you are impacted by this restriction "
|
|
|
|
f"on the exchange and eventually remove {pair} from your whitelist.")
|
2020-02-29 13:56:36 +00:00
|
|
|
if (self._config['stake_currency'] and
|
2020-02-29 19:41:03 +00:00
|
|
|
self.get_pair_quote_currency(pair) != self._config['stake_currency']):
|
2020-02-24 20:22:58 +00:00
|
|
|
invalid_pairs.append(pair)
|
|
|
|
if invalid_pairs:
|
|
|
|
raise OperationalException(
|
|
|
|
f"Stake-currency '{self._config['stake_currency']}' not compatible with "
|
|
|
|
f"pair-whitelist. Please remove the following pairs: {invalid_pairs}")
|
2018-06-17 10:41:33 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def get_valid_pair_combination(self, curr_1: str, curr_2: str) -> str:
|
2019-07-03 18:06:50 +00:00
|
|
|
"""
|
2019-07-07 04:36:35 +00:00
|
|
|
Get valid pair combination of curr_1 and curr_2 by trying both combinations.
|
2019-07-03 18:06:50 +00:00
|
|
|
"""
|
2019-07-07 04:36:35 +00:00
|
|
|
for pair in [f"{curr_1}/{curr_2}", f"{curr_2}/{curr_1}"]:
|
2019-07-03 18:20:12 +00:00
|
|
|
if pair in self.markets and self.markets[pair].get('active'):
|
2019-07-03 18:06:50 +00:00
|
|
|
return pair
|
2020-06-28 14:01:40 +00:00
|
|
|
raise ExchangeError(f"Could not combine {curr_1} and {curr_2} to get a valid pair.")
|
2019-07-03 18:06:50 +00:00
|
|
|
|
2019-09-29 20:08:11 +00:00
|
|
|
def validate_timeframes(self, timeframe: Optional[str]) -> None:
|
Handle if ticker_interval in config.json is not supported on exchange.
Returns.
Tested positive and negative data.
The ticker list in constants.py may be obsolete now, im not sure.
raise OperationalException(f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
freqtrade.OperationalException: Invalid ticker 14m, this Exchange supports {'1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M'}
2018-07-05 11:57:59 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Check if timeframe from config is a supported timeframe on the exchange
|
Handle if ticker_interval in config.json is not supported on exchange.
Returns.
Tested positive and negative data.
The ticker list in constants.py may be obsolete now, im not sure.
raise OperationalException(f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
freqtrade.OperationalException: Invalid ticker 14m, this Exchange supports {'1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M'}
2018-07-05 11:57:59 +00:00
|
|
|
"""
|
2019-07-02 22:03:38 +00:00
|
|
|
if not hasattr(self._api, "timeframes") or self._api.timeframes is None:
|
|
|
|
# If timeframes attribute is missing (or is None), the exchange probably
|
|
|
|
# has no fetchOHLCV method.
|
2019-06-30 18:30:31 +00:00
|
|
|
# Therefore we also show that.
|
|
|
|
raise OperationalException(
|
2019-07-02 22:03:38 +00:00
|
|
|
f"The ccxt library does not provide the list of timeframes "
|
|
|
|
f"for the exchange \"{self.name}\" and this exchange "
|
|
|
|
f"is therefore not supported. ccxt fetchOHLCV: {self.exchange_has('fetchOHLCV')}")
|
|
|
|
|
2019-09-29 20:08:11 +00:00
|
|
|
if timeframe and (timeframe not in self.timeframes):
|
2018-07-05 12:05:31 +00:00
|
|
|
raise OperationalException(
|
2020-03-08 10:35:31 +00:00
|
|
|
f"Invalid timeframe '{timeframe}'. This exchange supports: {self.timeframes}")
|
Handle if ticker_interval in config.json is not supported on exchange.
Returns.
Tested positive and negative data.
The ticker list in constants.py may be obsolete now, im not sure.
raise OperationalException(f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
freqtrade.OperationalException: Invalid ticker 14m, this Exchange supports {'1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M'}
2018-07-05 11:57:59 +00:00
|
|
|
|
2020-01-11 10:53:44 +00:00
|
|
|
if timeframe and timeframe_to_minutes(timeframe) < 1:
|
2020-05-18 09:40:25 +00:00
|
|
|
raise OperationalException("Timeframes < 1m are currently not supported by Freqtrade.")
|
2020-01-11 10:36:28 +00:00
|
|
|
|
2018-11-17 18:54:55 +00:00
|
|
|
def validate_ordertypes(self, order_types: Dict) -> None:
|
|
|
|
"""
|
|
|
|
Checks if order-types configured in strategy/config are supported
|
|
|
|
"""
|
|
|
|
if any(v == 'market' for k, v in order_types.items()):
|
|
|
|
if not self.exchange_has('createMarketOrder'):
|
|
|
|
raise OperationalException(
|
|
|
|
f'Exchange {self.name} does not support market orders.')
|
|
|
|
|
2019-02-24 18:35:29 +00:00
|
|
|
if (order_types.get("stoploss_on_exchange")
|
|
|
|
and not self._ft_has.get("stoploss_on_exchange", False)):
|
2019-02-24 19:18:41 +00:00
|
|
|
raise OperationalException(
|
2019-08-24 17:41:11 +00:00
|
|
|
f'On exchange stoploss is not supported for {self.name}.'
|
2019-02-24 19:18:41 +00:00
|
|
|
)
|
2018-11-25 16:22:56 +00:00
|
|
|
|
2018-11-25 20:09:35 +00:00
|
|
|
def validate_order_time_in_force(self, order_time_in_force: Dict) -> None:
|
|
|
|
"""
|
|
|
|
Checks if order time in force configured in strategy/config are supported
|
|
|
|
"""
|
2019-03-25 23:49:39 +00:00
|
|
|
if any(v not in self._ft_has["order_time_in_force"]
|
|
|
|
for k, v in order_time_in_force.items()):
|
2019-03-21 18:12:15 +00:00
|
|
|
raise OperationalException(
|
2019-03-27 19:51:55 +00:00
|
|
|
f'Time in force policies are not supported for {self.name} yet.')
|
2018-11-25 20:09:35 +00:00
|
|
|
|
2021-02-14 09:29:45 +00:00
|
|
|
def validate_required_startup_candles(self, startup_candles: int, timeframe: str) -> None:
|
2019-10-27 09:38:21 +00:00
|
|
|
"""
|
2021-02-14 09:29:45 +00:00
|
|
|
Checks if required startup_candles is more than ohlcv_candle_limit().
|
2019-10-27 09:56:38 +00:00
|
|
|
Requires a grace-period of 5 candles - so a startup-period up to 494 is allowed by default.
|
2019-10-27 09:38:21 +00:00
|
|
|
"""
|
2021-02-14 09:29:45 +00:00
|
|
|
candle_limit = self.ohlcv_candle_limit(timeframe)
|
|
|
|
if startup_candles + 5 > candle_limit:
|
2019-10-27 09:38:21 +00:00
|
|
|
raise OperationalException(
|
|
|
|
f"This strategy requires {startup_candles} candles to start. "
|
2021-02-14 09:29:45 +00:00
|
|
|
f"{self.name} only provides {candle_limit} for {timeframe}.")
|
2019-10-27 09:38:21 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
def exchange_has(self, endpoint: str) -> bool:
|
|
|
|
"""
|
|
|
|
Checks if exchange implements a specific API endpoint.
|
|
|
|
Wrapper around ccxt 'has' attribute
|
|
|
|
:param endpoint: Name of endpoint (e.g. 'fetchOHLCV', 'fetchTickers')
|
|
|
|
:return: bool
|
|
|
|
"""
|
2018-06-18 20:07:15 +00:00
|
|
|
return endpoint in self._api.has and self._api.has[endpoint]
|
2018-06-17 10:41:33 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def amount_to_precision(self, pair: str, amount: float) -> float:
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2018-07-30 16:49:58 +00:00
|
|
|
Returns the amount to buy or sell to a precision the Exchange accepts
|
2021-05-16 12:50:25 +00:00
|
|
|
Re-implementation of ccxt internal methods - ensuring we can test the result is correct
|
2020-01-12 13:55:05 +00:00
|
|
|
based on our definitions.
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2019-03-05 18:44:23 +00:00
|
|
|
if self.markets[pair]['precision']['amount']:
|
2020-01-12 13:40:58 +00:00
|
|
|
amount = float(decimal_to_precision(amount, rounding_mode=TRUNCATE,
|
|
|
|
precision=self.markets[pair]['precision']['amount'],
|
|
|
|
counting_mode=self.precisionMode,
|
|
|
|
))
|
|
|
|
|
2018-07-30 16:49:58 +00:00
|
|
|
return amount
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def price_to_precision(self, pair: str, price: float) -> float:
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2020-01-14 19:16:20 +00:00
|
|
|
Returns the price rounded up to the precision the Exchange accepts.
|
2021-05-16 12:50:25 +00:00
|
|
|
Partial Re-implementation of ccxt internal method decimal_to_precision(),
|
2020-01-14 19:16:20 +00:00
|
|
|
which does not support rounding up
|
|
|
|
TODO: If ccxt supports ROUND_UP for decimal_to_precision(), we could remove this and
|
|
|
|
align with amount_to_precision().
|
2018-07-30 16:49:58 +00:00
|
|
|
Rounds up
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2019-03-05 18:44:23 +00:00
|
|
|
if self.markets[pair]['precision']['price']:
|
2020-01-14 19:16:20 +00:00
|
|
|
# price = float(decimal_to_precision(price, rounding_mode=ROUND,
|
|
|
|
# precision=self.markets[pair]['precision']['price'],
|
|
|
|
# counting_mode=self.precisionMode,
|
|
|
|
# ))
|
|
|
|
if self.precisionMode == TICK_SIZE:
|
|
|
|
precision = self.markets[pair]['precision']['price']
|
|
|
|
missing = price % precision
|
|
|
|
if missing != 0:
|
|
|
|
price = price - missing + precision
|
|
|
|
else:
|
|
|
|
symbol_prec = self.markets[pair]['precision']['price']
|
|
|
|
big_price = price * pow(10, symbol_prec)
|
|
|
|
price = ceil(big_price) / pow(10, symbol_prec)
|
2018-07-30 16:49:58 +00:00
|
|
|
return price
|
|
|
|
|
2020-04-15 05:19:27 +00:00
|
|
|
def price_get_one_pip(self, pair: str, price: float) -> float:
|
|
|
|
"""
|
|
|
|
Get's the "1 pip" value for this pair.
|
|
|
|
Used in PriceFilter to calculate the 1pip movements.
|
|
|
|
"""
|
|
|
|
precision = self.markets[pair]['precision']['price']
|
|
|
|
if self.precisionMode == TICK_SIZE:
|
2020-04-15 05:53:31 +00:00
|
|
|
return precision
|
2020-04-15 05:19:27 +00:00
|
|
|
else:
|
|
|
|
return 1 / pow(10, precision)
|
|
|
|
|
2021-02-02 18:47:21 +00:00
|
|
|
def get_min_pair_stake_amount(self, pair: str, price: float,
|
|
|
|
stoploss: float) -> Optional[float]:
|
|
|
|
try:
|
|
|
|
market = self.markets[pair]
|
|
|
|
except KeyError:
|
|
|
|
raise ValueError(f"Can't get market information for symbol {pair}")
|
|
|
|
|
|
|
|
if 'limits' not in market:
|
|
|
|
return None
|
|
|
|
|
|
|
|
min_stake_amounts = []
|
|
|
|
limits = market['limits']
|
|
|
|
if ('cost' in limits and 'min' in limits['cost']
|
|
|
|
and limits['cost']['min'] is not None):
|
|
|
|
min_stake_amounts.append(limits['cost']['min'])
|
|
|
|
|
|
|
|
if ('amount' in limits and 'min' in limits['amount']
|
|
|
|
and limits['amount']['min'] is not None):
|
|
|
|
min_stake_amounts.append(limits['amount']['min'] * price)
|
|
|
|
|
|
|
|
if not min_stake_amounts:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# reserve some percent defined in config (5% default) + stoploss
|
2021-03-20 06:21:22 +00:00
|
|
|
amount_reserve_percent = 1.0 + self._config.get('amount_reserve_percent',
|
2021-02-02 18:47:21 +00:00
|
|
|
DEFAULT_AMOUNT_RESERVE_PERCENT)
|
2021-04-15 00:19:30 +00:00
|
|
|
amount_reserve_percent = (
|
2021-07-18 03:58:54 +00:00
|
|
|
amount_reserve_percent / (1 - abs(stoploss)) if abs(stoploss) != 1 else 1.5
|
2021-04-15 20:38:00 +00:00
|
|
|
)
|
2021-02-02 18:47:21 +00:00
|
|
|
# it should not be more than 50%
|
2021-03-20 06:21:22 +00:00
|
|
|
amount_reserve_percent = max(min(amount_reserve_percent, 1.5), 1)
|
2021-02-02 18:47:21 +00:00
|
|
|
|
|
|
|
# The value returned should satisfy both limits: for amount (base currency) and
|
|
|
|
# for cost (quote, stake currency), so max() is used here.
|
|
|
|
# See also #2575 at github.
|
2021-03-20 06:21:22 +00:00
|
|
|
return max(min_stake_amounts) * amount_reserve_percent
|
2021-02-02 18:47:21 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
# Dry-run methods
|
|
|
|
|
2021-04-10 11:50:56 +00:00
|
|
|
def create_dry_run_order(self, pair: str, ordertype: str, side: str, amount: float,
|
|
|
|
rate: float, params: Dict = {}) -> Dict[str, Any]:
|
2020-08-13 12:13:58 +00:00
|
|
|
order_id = f'dry_run_{side}_{datetime.now().timestamp()}'
|
2020-01-12 13:55:05 +00:00
|
|
|
_amount = self.amount_to_precision(pair, amount)
|
2021-06-03 18:55:18 +00:00
|
|
|
dry_order: Dict[str, Any] = {
|
2020-08-13 12:13:58 +00:00
|
|
|
'id': order_id,
|
|
|
|
'symbol': pair,
|
2019-02-22 18:02:31 +00:00
|
|
|
'price': rate,
|
2020-07-20 17:39:12 +00:00
|
|
|
'average': rate,
|
2019-12-13 05:59:10 +00:00
|
|
|
'amount': _amount,
|
2020-04-30 17:56:48 +00:00
|
|
|
'cost': _amount * rate,
|
2019-02-22 18:02:31 +00:00
|
|
|
'type': ordertype,
|
2019-03-08 20:17:12 +00:00
|
|
|
'side': side,
|
2019-12-13 05:59:10 +00:00
|
|
|
'remaining': _amount,
|
2019-02-22 18:02:31 +00:00
|
|
|
'datetime': arrow.utcnow().isoformat(),
|
2021-07-05 17:51:14 +00:00
|
|
|
'timestamp': arrow.utcnow().int_timestamp * 1000,
|
2019-08-12 14:46:34 +00:00
|
|
|
'status': "closed" if ordertype == "market" else "open",
|
2019-02-23 15:03:15 +00:00
|
|
|
'fee': None,
|
2020-04-30 17:56:48 +00:00
|
|
|
'info': {}
|
2019-02-22 18:02:31 +00:00
|
|
|
}
|
2021-06-03 18:55:18 +00:00
|
|
|
if dry_order["type"] in ["stop_loss_limit", "stop-loss-limit"]:
|
|
|
|
dry_order["info"] = {"stopPrice": dry_order["price"]}
|
|
|
|
|
|
|
|
if dry_order["type"] == "market":
|
|
|
|
# Update market order pricing
|
|
|
|
average = self.get_dry_market_fill_price(pair, side, amount, rate)
|
|
|
|
dry_order.update({
|
|
|
|
'average': average,
|
|
|
|
'cost': dry_order['amount'] * average,
|
|
|
|
})
|
2021-06-05 13:22:52 +00:00
|
|
|
dry_order = self.add_dry_order_fee(pair, dry_order)
|
|
|
|
|
|
|
|
dry_order = self.check_dry_limit_order_filled(dry_order)
|
2021-06-03 18:55:18 +00:00
|
|
|
|
|
|
|
self._dry_run_open_orders[dry_order["id"]] = dry_order
|
2019-09-01 15:48:06 +00:00
|
|
|
# Copy order and close it - so the returned order is open unless it's a market order
|
2019-02-22 18:02:31 +00:00
|
|
|
return dry_order
|
2018-04-06 07:57:08 +00:00
|
|
|
|
2021-06-05 13:22:52 +00:00
|
|
|
def add_dry_order_fee(self, pair: str, dry_order: Dict[str, Any]) -> Dict[str, Any]:
|
2021-06-03 18:55:18 +00:00
|
|
|
dry_order.update({
|
|
|
|
'fee': {
|
|
|
|
'currency': self.get_pair_quote_currency(pair),
|
|
|
|
'cost': dry_order['cost'] * self.get_fee(pair),
|
|
|
|
'rate': self.get_fee(pair)
|
|
|
|
}
|
|
|
|
})
|
2021-06-05 13:22:52 +00:00
|
|
|
return dry_order
|
2021-06-03 18:55:18 +00:00
|
|
|
|
|
|
|
def get_dry_market_fill_price(self, pair: str, side: str, amount: float, rate: float) -> float:
|
|
|
|
"""
|
|
|
|
Get the market order fill price based on orderbook interpolation
|
|
|
|
"""
|
|
|
|
if self.exchange_has('fetchL2OrderBook'):
|
|
|
|
ob = self.fetch_l2_order_book(pair, 20)
|
2021-06-04 04:44:51 +00:00
|
|
|
ob_type = 'asks' if side == 'buy' else 'bids'
|
2021-08-11 10:11:29 +00:00
|
|
|
slippage = 0.05
|
|
|
|
max_slippage_val = rate * ((1 + slippage) if side == 'buy' else (1 - slippage))
|
2021-06-03 18:55:18 +00:00
|
|
|
|
|
|
|
remaining_amount = amount
|
|
|
|
filled_amount = 0
|
2021-06-04 04:44:51 +00:00
|
|
|
for book_entry in ob[ob_type]:
|
2021-06-03 18:55:18 +00:00
|
|
|
book_entry_price = book_entry[0]
|
|
|
|
book_entry_coin_volume = book_entry[1]
|
|
|
|
if remaining_amount > 0:
|
2021-06-04 04:44:51 +00:00
|
|
|
if remaining_amount < book_entry_coin_volume:
|
2021-08-11 10:11:29 +00:00
|
|
|
# Orderbook at this slot bigger than remaining amount
|
2021-06-03 18:55:18 +00:00
|
|
|
filled_amount += remaining_amount * book_entry_price
|
2021-08-11 10:11:29 +00:00
|
|
|
break
|
2021-06-03 18:55:18 +00:00
|
|
|
else:
|
2021-06-04 04:44:51 +00:00
|
|
|
filled_amount += book_entry_coin_volume * book_entry_price
|
|
|
|
remaining_amount -= book_entry_coin_volume
|
2021-06-03 18:55:18 +00:00
|
|
|
else:
|
|
|
|
break
|
2021-06-04 04:44:51 +00:00
|
|
|
else:
|
|
|
|
# If remaining_amount wasn't consumed completely (break was not called)
|
|
|
|
filled_amount += remaining_amount * book_entry_price
|
2021-08-11 10:11:29 +00:00
|
|
|
forecast_avg_filled_price = max(filled_amount, 0) / amount
|
|
|
|
# Limit max. slippage to specified value
|
|
|
|
if side == 'buy':
|
|
|
|
forecast_avg_filled_price = min(forecast_avg_filled_price, max_slippage_val)
|
|
|
|
|
|
|
|
else:
|
|
|
|
forecast_avg_filled_price = max(forecast_avg_filled_price, max_slippage_val)
|
|
|
|
|
2021-06-03 18:55:18 +00:00
|
|
|
return self.price_to_precision(pair, forecast_avg_filled_price)
|
|
|
|
|
|
|
|
return rate
|
|
|
|
|
2021-06-05 13:22:52 +00:00
|
|
|
def _is_dry_limit_order_filled(self, pair: str, side: str, limit: float) -> bool:
|
2021-06-03 18:55:18 +00:00
|
|
|
if not self.exchange_has('fetchL2OrderBook'):
|
|
|
|
return True
|
|
|
|
ob = self.fetch_l2_order_book(pair, 1)
|
|
|
|
if side == 'buy':
|
|
|
|
price = ob['asks'][0][0]
|
|
|
|
logger.debug(f"{pair} checking dry buy-order: price={price}, limit={limit}")
|
|
|
|
if limit >= price:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
price = ob['bids'][0][0]
|
|
|
|
logger.debug(f"{pair} checking dry sell-order: price={price}, limit={limit}")
|
|
|
|
if limit <= price:
|
|
|
|
return True
|
|
|
|
return False
|
2019-02-23 15:03:15 +00:00
|
|
|
|
2021-06-05 13:22:52 +00:00
|
|
|
def check_dry_limit_order_filled(self, order: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Check dry-run limit order fill and update fee (if it filled).
|
|
|
|
"""
|
|
|
|
if order['status'] != "closed" and order['type'] in ["limit"]:
|
|
|
|
pair = order['symbol']
|
|
|
|
if self._is_dry_limit_order_filled(pair, order['side'], order['price']):
|
|
|
|
order.update({
|
|
|
|
'status': 'closed',
|
|
|
|
'filled': order['amount'],
|
|
|
|
'remaining': 0,
|
|
|
|
})
|
|
|
|
self.add_dry_order_fee(pair, order)
|
|
|
|
|
|
|
|
return order
|
2019-02-23 15:03:15 +00:00
|
|
|
|
2021-06-02 09:06:32 +00:00
|
|
|
def fetch_dry_run_order(self, order_id) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Return dry-run order
|
|
|
|
Only call if running in dry-run mode.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
order = self._dry_run_open_orders[order_id]
|
2021-06-05 13:22:52 +00:00
|
|
|
order = self.check_dry_limit_order_filled(order)
|
2021-06-02 09:06:32 +00:00
|
|
|
return order
|
|
|
|
except KeyError as e:
|
|
|
|
# Gracefully handle errors with dry-run orders.
|
|
|
|
raise InvalidOrderException(
|
|
|
|
f'Tried to get an invalid dry-run-order (id: {order_id}). Message: {e}') from e
|
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
# Order handling
|
|
|
|
|
2019-02-22 18:02:31 +00:00
|
|
|
def create_order(self, pair: str, ordertype: str, side: str, amount: float,
|
2021-07-25 01:30:34 +00:00
|
|
|
rate: float, time_in_force: str = 'gtc') -> Dict:
|
|
|
|
|
|
|
|
if self._config['dry_run']:
|
|
|
|
dry_order = self.create_dry_run_order(pair, ordertype, side, amount, rate)
|
|
|
|
return dry_order
|
|
|
|
|
|
|
|
params = self._params.copy()
|
|
|
|
if time_in_force != 'gtc' and ordertype != 'market':
|
|
|
|
params.update({'timeInForce': time_in_force})
|
|
|
|
|
2018-06-06 18:18:16 +00:00
|
|
|
try:
|
2018-07-30 16:49:58 +00:00
|
|
|
# Set the precision for amount and price(rate) as accepted by the exchange
|
2020-01-12 13:55:05 +00:00
|
|
|
amount = self.amount_to_precision(pair, amount)
|
2019-07-14 12:17:09 +00:00
|
|
|
needs_price = (ordertype != 'market'
|
|
|
|
or self._api.options.get("createMarketBuyOrderRequiresPrice", False))
|
2020-01-12 13:55:05 +00:00
|
|
|
rate_for_order = self.price_to_precision(pair, rate) if needs_price else None
|
2018-07-30 16:49:58 +00:00
|
|
|
|
2021-06-10 18:09:25 +00:00
|
|
|
order = self._api.create_order(pair, ordertype, side,
|
|
|
|
amount, rate_for_order, params)
|
|
|
|
self._log_exchange_response('create_order', order)
|
|
|
|
return order
|
2018-11-25 20:38:11 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.InsufficientFunds as e:
|
2020-08-14 07:57:13 +00:00
|
|
|
raise InsufficientFundsError(
|
2020-06-18 18:00:18 +00:00
|
|
|
f'Insufficient funds to create {ordertype} {side} order on market {pair}. '
|
2019-08-18 13:46:10 +00:00
|
|
|
f'Tried to {side} amount {amount} at rate {rate}.'
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Message: {e}') from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.InvalidOrder as e:
|
2020-06-28 14:01:40 +00:00
|
|
|
raise ExchangeError(
|
2020-06-18 18:00:18 +00:00
|
|
|
f'Could not create {ordertype} {side} order on market {pair}. '
|
|
|
|
f'Tried to {side} amount {amount} at rate {rate}. '
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Message: {e}') from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2018-06-06 18:18:16 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Could not place {side} order due to {e.__class__.__name__}. Message: {e}') from e
|
2018-06-06 18:18:16 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(e) from e
|
2017-10-06 10:22:04 +00:00
|
|
|
|
2020-01-19 18:54:30 +00:00
|
|
|
def stoploss_adjust(self, stop_loss: float, order: Dict) -> bool:
|
|
|
|
"""
|
|
|
|
Verify stop_loss against stoploss-order value (limit or price)
|
|
|
|
Returns True if adjustment is necessary.
|
|
|
|
"""
|
|
|
|
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
|
|
|
|
2020-01-19 12:30:56 +00:00
|
|
|
def stoploss(self, pair: str, amount: float, stop_price: float, order_types: Dict) -> Dict:
|
2018-11-25 09:54:36 +00:00
|
|
|
"""
|
2020-01-19 12:30:56 +00:00
|
|
|
creates a stoploss order.
|
|
|
|
The precise ordertype is determined by the order_types dict or exchange default.
|
2019-08-25 07:50:37 +00:00
|
|
|
Since ccxt does not unify stoploss-limit orders yet, this needs to be implemented in each
|
|
|
|
exchange's subclass.
|
|
|
|
The exception below should never raise, since we disallow
|
|
|
|
starting the bot in validate_ordertypes()
|
|
|
|
Note: Changes to this interface need to be applied to all sub-classes too.
|
2018-11-25 09:54:36 +00:00
|
|
|
"""
|
2018-11-22 15:24:40 +00:00
|
|
|
|
2020-01-19 13:39:51 +00:00
|
|
|
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
2018-11-26 17:46:59 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
|
|
|
|
def fetch_order(self, order_id: str, pair: str) -> Dict:
|
|
|
|
if self._config['dry_run']:
|
|
|
|
return self.fetch_dry_run_order(order_id)
|
|
|
|
try:
|
2021-06-10 18:09:25 +00:00
|
|
|
order = self._api.fetch_order(order_id, pair)
|
|
|
|
self._log_exchange_response('fetch_order', order)
|
|
|
|
return order
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.OrderNotFound as e:
|
|
|
|
raise RetryableOrderError(
|
|
|
|
f'Order not found (pair: {pair} id: {order_id}). Message: {e}') from e
|
|
|
|
except ccxt.InvalidOrder as e:
|
|
|
|
raise InvalidOrderException(
|
|
|
|
f'Tried to get an invalid order (pair: {pair} id: {order_id}). Message: {e}') from e
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get order due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
|
|
|
# Assign method to fetch_stoploss_order to allow easy overriding in other classes
|
|
|
|
fetch_stoploss_order = fetch_order
|
|
|
|
|
|
|
|
def fetch_order_or_stoploss_order(self, order_id: str, pair: str,
|
|
|
|
stoploss_order: bool = False) -> Dict:
|
|
|
|
"""
|
|
|
|
Simple wrapper calling either fetch_order or fetch_stoploss_order depending on
|
|
|
|
the stoploss_order parameter
|
2021-06-25 17:13:31 +00:00
|
|
|
:param order_id: OrderId to fetch order
|
|
|
|
:param pair: Pair corresponding to order_id
|
2021-06-02 09:17:50 +00:00
|
|
|
:param stoploss_order: If true, uses fetch_stoploss_order, otherwise fetch_order.
|
|
|
|
"""
|
|
|
|
if stoploss_order:
|
|
|
|
return self.fetch_stoploss_order(order_id, pair)
|
|
|
|
return self.fetch_order(order_id, pair)
|
|
|
|
|
|
|
|
def check_order_canceled_empty(self, order: Dict) -> bool:
|
|
|
|
"""
|
|
|
|
Verify if an order has been cancelled without being partially filled
|
|
|
|
:param order: Order dict as returned from fetch_order()
|
|
|
|
:return: True if order has been cancelled without being filled, False otherwise.
|
|
|
|
"""
|
2021-08-27 17:54:53 +00:00
|
|
|
return (order.get('status') in NON_OPEN_EXCHANGE_STATES
|
2021-06-02 09:17:50 +00:00
|
|
|
and order.get('filled') == 0.0)
|
|
|
|
|
|
|
|
@retrier
|
|
|
|
def cancel_order(self, order_id: str, pair: str) -> Dict:
|
|
|
|
if self._config['dry_run']:
|
|
|
|
try:
|
|
|
|
order = self.fetch_dry_run_order(order_id)
|
|
|
|
|
|
|
|
order.update({'status': 'canceled', 'filled': 0.0, 'remaining': order['amount']})
|
|
|
|
return order
|
|
|
|
except InvalidOrderException:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
try:
|
2021-06-10 18:09:25 +00:00
|
|
|
order = self._api.cancel_order(order_id, pair)
|
|
|
|
self._log_exchange_response('cancel_order', order)
|
|
|
|
return order
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.InvalidOrder as e:
|
|
|
|
raise InvalidOrderException(
|
|
|
|
f'Could not cancel order. Message: {e}') from e
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not cancel order due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
|
|
|
# Assign method to cancel_stoploss_order to allow easy overriding in other classes
|
|
|
|
cancel_stoploss_order = cancel_order
|
|
|
|
|
|
|
|
def is_cancel_order_result_suitable(self, corder) -> bool:
|
|
|
|
if not isinstance(corder, dict):
|
|
|
|
return False
|
|
|
|
|
|
|
|
required = ('fee', 'status', 'amount')
|
|
|
|
return all(k in corder for k in required)
|
|
|
|
|
|
|
|
def cancel_order_with_result(self, order_id: str, pair: str, amount: float) -> Dict:
|
|
|
|
"""
|
|
|
|
Cancel order returning a result.
|
|
|
|
Creates a fake result if cancel order returns a non-usable result
|
|
|
|
and fetch_order does not work (certain exchanges don't return cancelled orders)
|
|
|
|
:param order_id: Orderid to cancel
|
|
|
|
:param pair: Pair corresponding to order_id
|
|
|
|
:param amount: Amount to use for fake response
|
|
|
|
:return: Result from either cancel_order if usable, or fetch_order
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
corder = self.cancel_order(order_id, pair)
|
|
|
|
if self.is_cancel_order_result_suitable(corder):
|
|
|
|
return corder
|
|
|
|
except InvalidOrderException:
|
|
|
|
logger.warning(f"Could not cancel order {order_id} for {pair}.")
|
|
|
|
try:
|
|
|
|
order = self.fetch_order(order_id, pair)
|
|
|
|
except InvalidOrderException:
|
|
|
|
logger.warning(f"Could not fetch cancelled order {order_id}.")
|
|
|
|
order = {'fee': {}, 'status': 'canceled', 'amount': amount, 'info': {}}
|
|
|
|
|
|
|
|
return order
|
|
|
|
|
|
|
|
def cancel_stoploss_order_with_result(self, order_id: str, pair: str, amount: float) -> Dict:
|
|
|
|
"""
|
|
|
|
Cancel stoploss order returning a result.
|
|
|
|
Creates a fake result if cancel order returns a non-usable result
|
|
|
|
and fetch_order does not work (certain exchanges don't return cancelled orders)
|
|
|
|
:param order_id: stoploss-order-id to cancel
|
|
|
|
:param pair: Pair corresponding to order_id
|
|
|
|
:param amount: Amount to use for fake response
|
|
|
|
:return: Result from either cancel_order if usable, or fetch_order
|
|
|
|
"""
|
|
|
|
corder = self.cancel_stoploss_order(order_id, pair)
|
|
|
|
if self.is_cancel_order_result_suitable(corder):
|
|
|
|
return corder
|
|
|
|
try:
|
|
|
|
order = self.fetch_stoploss_order(order_id, pair)
|
|
|
|
except InvalidOrderException:
|
|
|
|
logger.warning(f"Could not fetch cancelled stoploss order {order_id}.")
|
|
|
|
order = {'fee': {}, 'status': 'canceled', 'amount': amount, 'info': {}}
|
|
|
|
|
|
|
|
return order
|
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
@retrier
|
|
|
|
def get_balances(self) -> dict:
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
try:
|
2018-06-18 20:07:15 +00:00
|
|
|
balances = self._api.fetch_balance()
|
2018-06-17 10:41:33 +00:00
|
|
|
# Remove additional info from ccxt results
|
|
|
|
balances.pop("info", None)
|
|
|
|
balances.pop("free", None)
|
|
|
|
balances.pop("total", None)
|
|
|
|
balances.pop("used", None)
|
|
|
|
|
|
|
|
return balances
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Could not get balance due to {e.__class__.__name__}. Message: {e}') from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(e) from e
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
@retrier
|
2021-04-13 18:09:22 +00:00
|
|
|
def get_tickers(self, cached: bool = False) -> Dict:
|
|
|
|
"""
|
|
|
|
:param cached: Allow cached result
|
|
|
|
:return: fetch_tickers result
|
|
|
|
"""
|
|
|
|
if cached:
|
|
|
|
tickers = self._fetch_tickers_cache.get('fetch_tickers')
|
|
|
|
if tickers:
|
|
|
|
return tickers
|
2018-06-17 10:41:33 +00:00
|
|
|
try:
|
2021-04-13 18:09:22 +00:00
|
|
|
tickers = self._api.fetch_tickers()
|
|
|
|
self._fetch_tickers_cache['fetch_tickers'] = tickers
|
|
|
|
return tickers
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
2020-03-08 10:35:31 +00:00
|
|
|
f'Exchange {self._api.name} does not support fetching tickers in batch. '
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Message: {e}') from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Could not load tickers due to {e.__class__.__name__}. Message: {e}') from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(e) from e
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
# Pricing info
|
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
@retrier
|
2020-02-22 10:03:25 +00:00
|
|
|
def fetch_ticker(self, pair: str) -> dict:
|
|
|
|
try:
|
2021-01-28 18:40:10 +00:00
|
|
|
if (pair not in self.markets or
|
|
|
|
self.markets[pair].get('active', False) is False):
|
2020-06-28 14:01:40 +00:00
|
|
|
raise ExchangeError(f"Pair {pair} not available")
|
2020-02-22 10:03:25 +00:00
|
|
|
data = self._api.fetch_ticker(pair)
|
|
|
|
return data
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2020-02-22 10:03:25 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not load ticker due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_next_limit_in_list(limit: int, limit_range: Optional[List[int]],
|
|
|
|
range_required: bool = True):
|
|
|
|
"""
|
|
|
|
Get next greater value in the list.
|
|
|
|
Used by fetch_l2_order_book if the api only supports a limited range
|
|
|
|
"""
|
|
|
|
if not limit_range:
|
|
|
|
return limit
|
|
|
|
|
|
|
|
result = min([x for x in limit_range if limit <= x] + [max(limit_range)])
|
|
|
|
if not range_required and limit > result:
|
|
|
|
# Range is not required - we can use None as parameter.
|
|
|
|
return None
|
|
|
|
return result
|
|
|
|
|
|
|
|
@retrier
|
|
|
|
def fetch_l2_order_book(self, pair: str, limit: int = 100) -> dict:
|
|
|
|
"""
|
|
|
|
Get L2 order book from exchange.
|
|
|
|
Can be limited to a certain amount (if supported).
|
|
|
|
Returns a dict in the format
|
|
|
|
{'asks': [price, volume], 'bids': [price, volume]}
|
|
|
|
"""
|
|
|
|
limit1 = self.get_next_limit_in_list(limit, self._ft_has['l2_limit_range'],
|
|
|
|
self._ft_has['l2_limit_range_required'])
|
|
|
|
try:
|
|
|
|
|
|
|
|
return self._api.fetch_l2_order_book(pair, limit1)
|
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
|
|
|
f'Exchange {self._api.name} does not support fetching order book.'
|
|
|
|
f'Message: {e}') from e
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get order book due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2021-07-19 17:37:52 +00:00
|
|
|
def get_rate(self, pair: str, refresh: bool, side: str) -> float:
|
2021-06-02 09:30:19 +00:00
|
|
|
"""
|
2021-07-18 06:00:18 +00:00
|
|
|
Calculates bid/ask target
|
|
|
|
bid rate - between current ask price and last price
|
|
|
|
ask rate - either using ticker bid or first bid based on orderbook
|
|
|
|
or remain static in any other case since it's not updating.
|
2021-06-02 09:30:19 +00:00
|
|
|
:param pair: Pair to get rate for
|
|
|
|
:param refresh: allow cached data
|
2021-07-18 03:58:54 +00:00
|
|
|
:param side: "buy" or "sell"
|
2021-06-02 09:30:19 +00:00
|
|
|
:return: float: Price
|
|
|
|
:raises PricingError if orderbook price could not be determined.
|
|
|
|
"""
|
2021-07-18 03:58:54 +00:00
|
|
|
cache_rate: TTLCache = self._buy_rate_cache if side == "buy" else self._sell_rate_cache
|
|
|
|
[strat_name, name] = ['bid_strategy', 'Buy'] if side == "buy" else ['ask_strategy', 'Sell']
|
|
|
|
|
2021-06-02 09:30:19 +00:00
|
|
|
if not refresh:
|
2021-07-18 03:58:54 +00:00
|
|
|
rate = cache_rate.get(pair)
|
2021-06-02 09:30:19 +00:00
|
|
|
# Check if cache has been invalidated
|
|
|
|
if rate:
|
2021-07-18 03:58:54 +00:00
|
|
|
logger.debug(f"Using cached {side} rate for {pair}.")
|
2021-06-02 09:30:19 +00:00
|
|
|
return rate
|
|
|
|
|
2021-07-19 17:37:52 +00:00
|
|
|
conf_strategy = self._config.get(strat_name, {})
|
2021-07-18 03:58:54 +00:00
|
|
|
|
2021-07-19 17:37:52 +00:00
|
|
|
if conf_strategy.get('use_order_book', False) and ('use_order_book' in conf_strategy):
|
2021-06-02 09:30:19 +00:00
|
|
|
|
2021-07-19 17:37:52 +00:00
|
|
|
order_book_top = conf_strategy.get('order_book_top', 1)
|
2021-06-02 09:30:19 +00:00
|
|
|
order_book = self.fetch_l2_order_book(pair, order_book_top)
|
|
|
|
logger.debug('order_book %s', order_book)
|
|
|
|
# top 1 = index 0
|
|
|
|
try:
|
2021-07-19 17:37:52 +00:00
|
|
|
rate = order_book[f"{conf_strategy['price_side']}s"][order_book_top - 1][0]
|
2021-06-02 09:39:18 +00:00
|
|
|
except (IndexError, KeyError) as e:
|
2021-06-25 18:36:39 +00:00
|
|
|
logger.warning(
|
2021-07-18 03:58:54 +00:00
|
|
|
f"{name} Price at location {order_book_top} from orderbook could not be "
|
2021-06-25 18:51:45 +00:00
|
|
|
f"determined. Orderbook: {order_book}"
|
2021-06-25 18:36:39 +00:00
|
|
|
)
|
2021-06-02 09:39:18 +00:00
|
|
|
raise PricingError from e
|
2021-07-25 22:13:04 +00:00
|
|
|
price_side = {conf_strategy['price_side'].capitalize()}
|
|
|
|
logger.debug(f"{name} price from orderbook {price_side}"
|
|
|
|
f"side - top {order_book_top} order book {side} rate {rate:.8f}")
|
2021-06-02 09:39:18 +00:00
|
|
|
else:
|
2021-07-25 22:13:04 +00:00
|
|
|
logger.debug(f"Using Last {conf_strategy['price_side'].capitalize()} / Last Price")
|
2021-06-02 09:39:18 +00:00
|
|
|
ticker = self.fetch_ticker(pair)
|
2021-07-19 17:37:52 +00:00
|
|
|
ticker_rate = ticker[conf_strategy['price_side']]
|
2021-08-25 05:13:09 +00:00
|
|
|
if ticker['last'] and ticker_rate:
|
2021-07-18 03:58:54 +00:00
|
|
|
if side == 'buy' and ticker_rate > ticker['last']:
|
2021-07-19 17:37:52 +00:00
|
|
|
balance = conf_strategy['ask_last_balance']
|
2021-07-18 03:58:54 +00:00
|
|
|
ticker_rate = ticker_rate + balance * (ticker['last'] - ticker_rate)
|
|
|
|
elif side == 'sell' and ticker_rate < ticker['last']:
|
2021-07-19 17:37:52 +00:00
|
|
|
balance = conf_strategy.get('bid_last_balance', 0.0)
|
2021-07-18 03:58:54 +00:00
|
|
|
ticker_rate = ticker_rate - balance * (ticker_rate - ticker['last'])
|
2021-06-02 09:39:18 +00:00
|
|
|
rate = ticker_rate
|
|
|
|
|
2021-07-18 06:00:18 +00:00
|
|
|
if rate is None:
|
2021-07-18 03:58:54 +00:00
|
|
|
raise PricingError(f"{name}-Rate for {pair} was empty.")
|
|
|
|
cache_rate[pair] = rate
|
|
|
|
|
2021-06-02 09:39:18 +00:00
|
|
|
return rate
|
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
# Fee handling
|
|
|
|
|
|
|
|
@retrier
|
|
|
|
def get_trades_for_order(self, order_id: str, pair: str, since: datetime) -> List:
|
|
|
|
"""
|
|
|
|
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
|
|
|
The "since" argument passed in is coming from the database and is in UTC,
|
|
|
|
as timezone-native datetime object.
|
|
|
|
From the python documentation:
|
|
|
|
> Naive datetime instances are assumed to represent local time
|
|
|
|
Therefore, calling "since.timestamp()" will get the UTC timestamp, after applying the
|
|
|
|
transformation from local timezone to UTC.
|
|
|
|
This works for timezones UTC+ since then the result will contain trades from a few hours
|
|
|
|
instead of from the last 5 seconds, however fails for UTC- timezones,
|
|
|
|
since we're then asking for trades with a "since" argument in the future.
|
|
|
|
|
|
|
|
:param order_id order_id: Order-id as given when creating the order
|
|
|
|
:param pair: Pair the order is for
|
|
|
|
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
|
|
|
"""
|
|
|
|
if self._config['dry_run']:
|
|
|
|
return []
|
|
|
|
if not self.exchange_has('fetchMyTrades'):
|
|
|
|
return []
|
|
|
|
try:
|
|
|
|
# Allow 5s offset to catch slight time offsets (discovered in #1185)
|
|
|
|
# since needs to be int in milliseconds
|
|
|
|
my_trades = self._api.fetch_my_trades(
|
|
|
|
pair, int((since.replace(tzinfo=timezone.utc).timestamp() - 5) * 1000))
|
|
|
|
matched_trades = [trade for trade in my_trades if trade['order'] == order_id]
|
|
|
|
|
2021-06-10 18:09:25 +00:00
|
|
|
self._log_exchange_response('get_trades_for_order', matched_trades)
|
2021-06-02 09:20:26 +00:00
|
|
|
return matched_trades
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get trades due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
|
|
|
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
|
|
|
return order['id']
|
|
|
|
|
|
|
|
@retrier
|
|
|
|
def get_fee(self, symbol: str, type: str = '', side: str = '', amount: float = 1,
|
|
|
|
price: float = 1, taker_or_maker: str = 'maker') -> float:
|
|
|
|
try:
|
|
|
|
if self._config['dry_run'] and self._config.get('fee', None) is not None:
|
|
|
|
return self._config['fee']
|
|
|
|
# validate that markets are loaded before trying to get fee
|
|
|
|
if self._api.markets is None or len(self._api.markets) == 0:
|
|
|
|
self._api.load_markets()
|
|
|
|
|
|
|
|
return self._api.calculate_fee(symbol=symbol, type=type, side=side, amount=amount,
|
|
|
|
price=price, takerOrMaker=taker_or_maker)['rate']
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get fee info due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def order_has_fee(order: Dict) -> bool:
|
|
|
|
"""
|
|
|
|
Verifies if the passed in order dict has the needed keys to extract fees,
|
|
|
|
and that these keys (currency, cost) are not empty.
|
|
|
|
:param order: Order or trade (one trade) dict
|
|
|
|
:return: True if the fee substructure contains currency and cost, false otherwise
|
|
|
|
"""
|
|
|
|
if not isinstance(order, dict):
|
|
|
|
return False
|
|
|
|
return ('fee' in order and order['fee'] is not None
|
|
|
|
and (order['fee'].keys() >= {'currency', 'cost'})
|
|
|
|
and order['fee']['currency'] is not None
|
|
|
|
and order['fee']['cost'] is not None
|
|
|
|
)
|
|
|
|
|
|
|
|
def calculate_fee_rate(self, order: Dict) -> Optional[float]:
|
|
|
|
"""
|
|
|
|
Calculate fee rate if it's not given by the exchange.
|
|
|
|
:param order: Order or trade (one trade) dict
|
|
|
|
"""
|
|
|
|
if order['fee'].get('rate') is not None:
|
|
|
|
return order['fee'].get('rate')
|
|
|
|
fee_curr = order['fee']['currency']
|
|
|
|
# Calculate fee based on order details
|
|
|
|
if fee_curr in self.get_pair_base_currency(order['symbol']):
|
|
|
|
# Base currency - divide by amount
|
|
|
|
return round(
|
|
|
|
order['fee']['cost'] / safe_value_fallback2(order, order, 'filled', 'amount'), 8)
|
|
|
|
elif fee_curr in self.get_pair_quote_currency(order['symbol']):
|
|
|
|
# Quote currency - divide by cost
|
|
|
|
return round(order['fee']['cost'] / order['cost'], 8) if order['cost'] else None
|
|
|
|
else:
|
|
|
|
# If Fee currency is a different currency
|
|
|
|
if not order['cost']:
|
|
|
|
# If cost is None or 0.0 -> falsy, return None
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
comb = self.get_valid_pair_combination(fee_curr, self._config['stake_currency'])
|
|
|
|
tick = self.fetch_ticker(comb)
|
|
|
|
|
|
|
|
fee_to_quote_rate = safe_value_fallback2(tick, tick, 'last', 'ask')
|
|
|
|
return round((order['fee']['cost'] * fee_to_quote_rate) / order['cost'], 8)
|
|
|
|
except ExchangeError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def extract_cost_curr_rate(self, order: Dict) -> Tuple[float, str, Optional[float]]:
|
|
|
|
"""
|
|
|
|
Extract tuple of cost, currency, rate.
|
|
|
|
Requires order_has_fee to run first!
|
|
|
|
:param order: Order or trade (one trade) dict
|
|
|
|
:return: Tuple with cost, currency, rate of the given fee dict
|
|
|
|
"""
|
|
|
|
return (order['fee']['cost'],
|
|
|
|
order['fee']['currency'],
|
|
|
|
self.calculate_fee_rate(order))
|
|
|
|
|
|
|
|
# Historic data
|
|
|
|
|
2019-11-02 19:25:18 +00:00
|
|
|
def get_historic_ohlcv(self, pair: str, timeframe: str,
|
2019-08-14 08:14:54 +00:00
|
|
|
since_ms: int) -> List:
|
2018-08-10 09:08:28 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Get candle history using asyncio and returns the list of candles.
|
|
|
|
Handles all async work for this.
|
2021-02-14 09:29:45 +00:00
|
|
|
Async over one pair, assuming we get `self.ohlcv_candle_limit()` candles per call.
|
2019-08-14 07:36:28 +00:00
|
|
|
:param pair: Pair to download
|
2020-03-08 10:35:31 +00:00
|
|
|
:param timeframe: Timeframe to get data for
|
2019-08-14 07:36:28 +00:00
|
|
|
:param since_ms: Timestamp in milliseconds to get history from
|
2020-11-21 14:28:50 +00:00
|
|
|
:return: List with candle (OHLCV) data
|
2018-08-10 09:08:28 +00:00
|
|
|
"""
|
|
|
|
return asyncio.get_event_loop().run_until_complete(
|
2019-11-02 19:25:18 +00:00
|
|
|
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
2019-08-14 08:14:54 +00:00
|
|
|
since_ms=since_ms))
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2020-11-21 14:28:50 +00:00
|
|
|
def get_historic_ohlcv_as_df(self, pair: str, timeframe: str,
|
|
|
|
since_ms: int) -> DataFrame:
|
|
|
|
"""
|
|
|
|
Minimal wrapper around get_historic_ohlcv - converting the result into a dataframe
|
2020-11-22 18:59:18 +00:00
|
|
|
:param pair: Pair to download
|
2020-11-21 14:28:50 +00:00
|
|
|
:param timeframe: Timeframe to get data for
|
|
|
|
:param since_ms: Timestamp in milliseconds to get history from
|
|
|
|
:return: OHLCV DataFrame
|
|
|
|
"""
|
|
|
|
ticks = self.get_historic_ohlcv(pair, timeframe, since_ms=since_ms)
|
|
|
|
return ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
|
|
|
|
drop_incomplete=self._ohlcv_partial_candle)
|
|
|
|
|
2019-08-14 08:14:54 +00:00
|
|
|
async def _async_get_historic_ohlcv(self, pair: str,
|
2019-11-02 19:25:18 +00:00
|
|
|
timeframe: str,
|
2019-08-14 08:14:54 +00:00
|
|
|
since_ms: int) -> List:
|
2020-10-23 05:45:11 +00:00
|
|
|
"""
|
|
|
|
Download historic ohlcv
|
|
|
|
"""
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2021-02-14 09:29:45 +00:00
|
|
|
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(timeframe)
|
2019-05-25 19:42:17 +00:00
|
|
|
logger.debug(
|
|
|
|
"one_call: %s msecs (%s)",
|
|
|
|
one_call,
|
|
|
|
arrow.utcnow().shift(seconds=one_call // 1000).humanize(only_distance=True)
|
|
|
|
)
|
2018-08-14 18:33:03 +00:00
|
|
|
input_coroutines = [self._async_get_candle_history(
|
2019-11-02 19:25:18 +00:00
|
|
|
pair, timeframe, since) for since in
|
2020-10-12 17:58:04 +00:00
|
|
|
range(since_ms, arrow.utcnow().int_timestamp * 1000, one_call)]
|
2018-12-29 18:30:47 +00:00
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
results = await asyncio.gather(*input_coroutines, return_exceptions=True)
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
# Combine gathered results
|
2018-08-10 09:15:02 +00:00
|
|
|
data: List = []
|
2020-10-23 05:45:11 +00:00
|
|
|
for res in results:
|
|
|
|
if isinstance(res, Exception):
|
|
|
|
logger.warning("Async code raised an exception: %s", res.__class__.__name__)
|
|
|
|
continue
|
|
|
|
# Deconstruct tuple if it's not an exception
|
|
|
|
p, _, new_data = res
|
2018-12-10 19:22:21 +00:00
|
|
|
if p == pair:
|
2020-10-23 05:45:11 +00:00
|
|
|
data.extend(new_data)
|
2018-12-29 13:18:49 +00:00
|
|
|
# Sort data again after extending the result - above calls return in "async order"
|
2018-08-18 19:08:59 +00:00
|
|
|
data = sorted(data, key=lambda x: x[0])
|
2020-03-08 10:35:31 +00:00
|
|
|
logger.info("Downloaded data for %s with length %s.", pair, len(data))
|
2018-08-10 09:08:28 +00:00
|
|
|
return data
|
|
|
|
|
2020-12-15 07:22:45 +00:00
|
|
|
def refresh_latest_ohlcv(self, pair_list: ListPairsWithTimeframes, *,
|
|
|
|
since_ms: Optional[int] = None, cache: bool = True
|
2020-12-15 19:59:58 +00:00
|
|
|
) -> Dict[Tuple[str, str], DataFrame]:
|
2018-08-16 10:15:09 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Refresh in-memory OHLCV asynchronously and set `_klines` with the result
|
2019-08-15 04:37:26 +00:00
|
|
|
Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
|
2020-03-08 10:35:31 +00:00
|
|
|
Only used in the dataprovider.refresh() method.
|
2019-08-15 04:37:26 +00:00
|
|
|
:param pair_list: List of 2 element tuples containing pair, interval to refresh
|
2020-12-15 07:22:45 +00:00
|
|
|
:param since_ms: time since when to download, in milliseconds
|
|
|
|
:param cache: Assign result to _klines. Usefull for one-off downloads like for pairlists
|
|
|
|
:return: Dict of [{(pair, timeframe): Dataframe}]
|
2018-08-16 10:15:09 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
logger.debug("Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
|
2018-08-16 10:15:09 +00:00
|
|
|
|
2018-12-11 06:11:43 +00:00
|
|
|
input_coroutines = []
|
2021-08-17 04:44:20 +00:00
|
|
|
cached_pairs = []
|
2019-02-20 21:46:35 +00:00
|
|
|
# Gather coroutines to run
|
2019-11-02 19:25:18 +00:00
|
|
|
for pair, timeframe in set(pair_list):
|
2021-03-21 11:44:35 +00:00
|
|
|
if (((pair, timeframe) not in self._klines)
|
2019-11-02 19:25:18 +00:00
|
|
|
or self._now_is_time_to_refresh(pair, timeframe)):
|
2020-12-15 07:22:45 +00:00
|
|
|
input_coroutines.append(self._async_get_candle_history(pair, timeframe,
|
|
|
|
since_ms=since_ms))
|
2018-12-11 06:11:43 +00:00
|
|
|
else:
|
2019-05-25 19:42:17 +00:00
|
|
|
logger.debug(
|
2020-03-08 10:35:31 +00:00
|
|
|
"Using cached candle (OHLCV) data for pair %s, timeframe %s ...",
|
2019-11-02 19:25:18 +00:00
|
|
|
pair, timeframe
|
2019-05-25 19:42:17 +00:00
|
|
|
)
|
2021-08-17 04:44:20 +00:00
|
|
|
cached_pairs.append((pair, timeframe))
|
2018-12-11 06:11:43 +00:00
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
results = asyncio.get_event_loop().run_until_complete(
|
2018-12-29 12:07:22 +00:00
|
|
|
asyncio.gather(*input_coroutines, return_exceptions=True))
|
2018-12-11 06:11:43 +00:00
|
|
|
|
2020-12-15 07:22:45 +00:00
|
|
|
results_df = {}
|
2018-12-11 06:11:43 +00:00
|
|
|
# handle caching
|
2020-03-08 10:35:31 +00:00
|
|
|
for res in results:
|
2019-01-19 19:02:37 +00:00
|
|
|
if isinstance(res, Exception):
|
|
|
|
logger.warning("Async code raised an exception: %s", res.__class__.__name__)
|
|
|
|
continue
|
2020-10-23 05:45:11 +00:00
|
|
|
# Deconstruct tuple (has 3 elements)
|
|
|
|
pair, timeframe, ticks = res
|
2018-12-11 06:11:43 +00:00
|
|
|
# keeping last candle time as last refreshed time of the pair
|
|
|
|
if ticks:
|
2019-11-02 19:25:18 +00:00
|
|
|
self._pairs_last_refresh_time[(pair, timeframe)] = ticks[-1][0] // 1000
|
2018-12-30 06:15:21 +00:00
|
|
|
# keeping parsed dataframe in cache
|
2020-12-15 07:22:45 +00:00
|
|
|
ohlcv_df = ohlcv_to_dataframe(
|
2021-07-18 03:58:54 +00:00
|
|
|
ticks, timeframe, pair=pair, fill_missing=True,
|
|
|
|
drop_incomplete=self._ohlcv_partial_candle)
|
2020-12-15 07:22:45 +00:00
|
|
|
results_df[(pair, timeframe)] = ohlcv_df
|
|
|
|
if cache:
|
|
|
|
self._klines[(pair, timeframe)] = ohlcv_df
|
2021-08-17 04:44:20 +00:00
|
|
|
# Return cached klines
|
|
|
|
for pair, timeframe in cached_pairs:
|
|
|
|
results_df[(pair, timeframe)] = self.klines((pair, timeframe), copy=False)
|
|
|
|
|
2020-12-15 07:22:45 +00:00
|
|
|
return results_df
|
2018-07-31 10:47:32 +00:00
|
|
|
|
2019-11-02 19:25:18 +00:00
|
|
|
def _now_is_time_to_refresh(self, pair: str, timeframe: str) -> bool:
|
2020-03-08 10:35:31 +00:00
|
|
|
# Timeframe in seconds
|
2019-11-02 19:25:18 +00:00
|
|
|
interval_in_sec = timeframe_to_seconds(timeframe)
|
2019-02-20 22:20:24 +00:00
|
|
|
|
2019-11-02 19:25:18 +00:00
|
|
|
return not ((self._pairs_last_refresh_time.get((pair, timeframe), 0)
|
2020-10-12 17:58:04 +00:00
|
|
|
+ interval_in_sec) >= arrow.utcnow().int_timestamp)
|
2019-02-20 22:20:24 +00:00
|
|
|
|
2018-08-18 19:05:38 +00:00
|
|
|
@retrier_async
|
2019-11-02 19:25:18 +00:00
|
|
|
async def _async_get_candle_history(self, pair: str, timeframe: str,
|
2018-12-29 13:32:24 +00:00
|
|
|
since_ms: Optional[int] = None) -> Tuple[str, str, List]:
|
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Asynchronously get candle history data using fetch_ohlcv
|
2019-11-02 19:25:18 +00:00
|
|
|
returns tuple: (pair, timeframe, ohlcv_list)
|
2018-12-29 13:32:24 +00:00
|
|
|
"""
|
2018-07-31 10:47:32 +00:00
|
|
|
try:
|
2020-03-08 10:35:31 +00:00
|
|
|
# Fetch OHLCV asynchronously
|
2019-05-25 19:42:17 +00:00
|
|
|
s = '(' + arrow.get(since_ms // 1000).isoformat() + ') ' if since_ms is not None else ''
|
|
|
|
logger.debug(
|
|
|
|
"Fetching pair %s, interval %s, since %s %s...",
|
2019-11-02 19:25:18 +00:00
|
|
|
pair, timeframe, since_ms, s
|
2019-05-25 19:42:17 +00:00
|
|
|
)
|
2021-05-01 15:29:53 +00:00
|
|
|
params = self._ft_has.get('ohlcv_params', {})
|
2019-11-02 19:25:18 +00:00
|
|
|
data = await self._api_async.fetch_ohlcv(pair, timeframe=timeframe,
|
2020-12-20 10:44:50 +00:00
|
|
|
since=since_ms,
|
2021-05-01 15:29:53 +00:00
|
|
|
limit=self.ohlcv_candle_limit(timeframe),
|
|
|
|
params=params)
|
2018-08-10 07:44:15 +00:00
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
# Some exchanges sort OHLCV in ASC order and others in DESC.
|
|
|
|
# Ex: Bittrex returns the list of OHLCV in ASC order (oldest first, newest last)
|
|
|
|
# while GDAX returns the list of OHLCV in DESC order (newest first, oldest last)
|
2018-11-25 14:00:50 +00:00
|
|
|
# Only sort if necessary to save computing time
|
2019-01-19 19:02:37 +00:00
|
|
|
try:
|
|
|
|
if data and data[0][0] > data[-1][0]:
|
|
|
|
data = sorted(data, key=lambda x: x[0])
|
|
|
|
except IndexError:
|
|
|
|
logger.exception("Error loading %s. Result was %s.", pair, data)
|
2019-11-02 19:25:18 +00:00
|
|
|
return pair, timeframe, []
|
|
|
|
logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe)
|
|
|
|
return pair, timeframe, data
|
2018-07-31 10:47:32 +00:00
|
|
|
|
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
2020-03-08 10:35:31 +00:00
|
|
|
f'Exchange {self._api.name} does not support fetching historical '
|
|
|
|
f'candle (OHLCV) data. Message: {e}') from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2018-07-31 10:47:32 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
2020-03-08 10:35:31 +00:00
|
|
|
raise TemporaryError(f'Could not fetch historical candle (OHLCV) data '
|
|
|
|
f'for pair {pair} due to {e.__class__.__name__}. '
|
|
|
|
f'Message: {e}') from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.BaseError as e:
|
2020-03-08 10:35:31 +00:00
|
|
|
raise OperationalException(f'Could not fetch historical candle (OHLCV) data '
|
|
|
|
f'for pair {pair}. Message: {e}') from e
|
2017-10-31 23:12:18 +00:00
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
# Fetch historic trades
|
|
|
|
|
2019-08-14 18:30:29 +00:00
|
|
|
@retrier_async
|
|
|
|
async def _async_fetch_trades(self, pair: str,
|
|
|
|
since: Optional[int] = None,
|
2020-03-31 18:20:10 +00:00
|
|
|
params: Optional[dict] = None) -> List[List]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
Asyncronously gets trade history using fetch_trades.
|
2019-08-29 10:56:10 +00:00
|
|
|
Handles exchange errors, does one call to the exchange.
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to fetch trade data for
|
|
|
|
:param since: Since as integer timestamp in milliseconds
|
2019-10-19 08:05:30 +00:00
|
|
|
returns: List of dicts containing trades
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
# fetch trades asynchronously
|
|
|
|
if params:
|
|
|
|
logger.debug("Fetching trades for pair %s, params: %s ", pair, params)
|
|
|
|
trades = await self._api_async.fetch_trades(pair, params=params, limit=1000)
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"Fetching trades for pair %s, since %s %s...",
|
|
|
|
pair, since,
|
|
|
|
'(' + arrow.get(since // 1000).isoformat() + ') ' if since is not None else ''
|
|
|
|
)
|
|
|
|
trades = await self._api_async.fetch_trades(pair, since=since, limit=1000)
|
2020-03-31 18:20:10 +00:00
|
|
|
return trades_dict_to_list(trades)
|
2019-08-14 18:30:29 +00:00
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
|
|
|
f'Exchange {self._api.name} does not support fetching historical trade data.'
|
|
|
|
f'Message: {e}') from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2019-08-14 18:30:29 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(f'Could not load trade history due to {e.__class__.__name__}. '
|
|
|
|
f'Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(f'Could not fetch trade data. Msg: {e}') from e
|
|
|
|
|
|
|
|
async def _async_get_trade_history_id(self, pair: str,
|
2019-08-29 11:01:44 +00:00
|
|
|
until: int,
|
2019-08-14 18:30:29 +00:00
|
|
|
since: Optional[int] = None,
|
2020-03-31 18:20:10 +00:00
|
|
|
from_id: Optional[str] = None) -> Tuple[str, List[List]]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
Asyncronously gets trade history using fetch_trades
|
2019-09-28 08:56:43 +00:00
|
|
|
use this when exchange uses id-based iteration (check `self._trades_pagination`)
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to fetch trade data for
|
|
|
|
:param since: Since as integer timestamp in milliseconds
|
|
|
|
:param until: Until as integer timestamp in milliseconds
|
2019-08-16 08:51:04 +00:00
|
|
|
:param from_id: Download data starting with ID (if id is known). Ignores "since" if set.
|
2019-10-19 08:05:30 +00:00
|
|
|
returns tuple: (pair, trades-list)
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
trades: List[List] = []
|
2019-08-29 10:56:10 +00:00
|
|
|
|
|
|
|
if not from_id:
|
|
|
|
# Fetch first elements using timebased method to get an ID to paginate on
|
|
|
|
# Depending on the Exchange, this can introduce a drift at the start of the interval
|
|
|
|
# of up to an hour.
|
2019-08-29 11:13:41 +00:00
|
|
|
# e.g. Binance returns the "last 1000" candles within a 1h time interval
|
|
|
|
# - so we will miss the first trades.
|
2019-08-29 10:56:10 +00:00
|
|
|
t = await self._async_fetch_trades(pair, since=since)
|
2020-03-31 18:20:10 +00:00
|
|
|
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
|
|
|
# DEFAULT_TRADES_COLUMNS: 1 -> id
|
|
|
|
from_id = t[-1][1]
|
2019-08-29 10:56:10 +00:00
|
|
|
trades.extend(t[:-1])
|
|
|
|
while True:
|
|
|
|
t = await self._async_fetch_trades(pair,
|
|
|
|
params={self._trades_pagination_arg: from_id})
|
2021-03-21 11:44:32 +00:00
|
|
|
if t:
|
2019-08-29 10:56:10 +00:00
|
|
|
# Skip last id since its the key for the next call
|
2019-08-27 05:12:04 +00:00
|
|
|
trades.extend(t[:-1])
|
2020-03-31 18:20:10 +00:00
|
|
|
if from_id == t[-1][1] or t[-1][0] > until:
|
2019-08-29 10:56:10 +00:00
|
|
|
logger.debug(f"Stopping because from_id did not change. "
|
2020-03-31 18:20:10 +00:00
|
|
|
f"Reached {t[-1][0]} > {until}")
|
2019-09-28 11:35:25 +00:00
|
|
|
# Reached the end of the defined-download period - add last trade as well.
|
|
|
|
trades.extend(t[-1:])
|
2019-08-14 18:30:29 +00:00
|
|
|
break
|
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
from_id = t[-1][1]
|
2019-08-29 10:56:10 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
return (pair, trades)
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2019-08-29 10:56:10 +00:00
|
|
|
async def _async_get_trade_history_time(self, pair: str, until: int,
|
2020-03-31 18:20:10 +00:00
|
|
|
since: Optional[int] = None) -> Tuple[str, List[List]]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2019-08-27 05:13:32 +00:00
|
|
|
Asyncronously gets trade history using fetch_trades,
|
2019-09-28 08:56:43 +00:00
|
|
|
when the exchange uses time-based iteration (check `self._trades_pagination`)
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to fetch trade data for
|
|
|
|
:param since: Since as integer timestamp in milliseconds
|
|
|
|
:param until: Until as integer timestamp in milliseconds
|
2019-10-19 08:05:30 +00:00
|
|
|
returns tuple: (pair, trades-list)
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
trades: List[List] = []
|
|
|
|
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
|
|
|
# DEFAULT_TRADES_COLUMNS: 1 -> id
|
2019-08-29 10:56:10 +00:00
|
|
|
while True:
|
|
|
|
t = await self._async_fetch_trades(pair, since=since)
|
2021-03-21 11:44:32 +00:00
|
|
|
if t:
|
2021-01-07 19:03:34 +00:00
|
|
|
since = t[-1][0]
|
2019-08-29 10:56:10 +00:00
|
|
|
trades.extend(t)
|
|
|
|
# Reached the end of the defined-download period
|
2020-03-31 18:20:10 +00:00
|
|
|
if until and t[-1][0] > until:
|
2019-08-29 11:01:44 +00:00
|
|
|
logger.debug(
|
2020-03-31 18:20:10 +00:00
|
|
|
f"Stopping because until was reached. {t[-1][0]} > {until}")
|
2019-08-14 18:30:29 +00:00
|
|
|
break
|
2019-08-29 10:56:10 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
return (pair, trades)
|
|
|
|
|
|
|
|
async def _async_get_trade_history(self, pair: str,
|
|
|
|
since: Optional[int] = None,
|
|
|
|
until: Optional[int] = None,
|
2020-03-31 18:20:10 +00:00
|
|
|
from_id: Optional[str] = None) -> Tuple[str, List[List]]:
|
2019-08-29 10:56:10 +00:00
|
|
|
"""
|
|
|
|
Async wrapper handling downloading trades using either time or id based methods.
|
|
|
|
"""
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2020-05-31 10:05:58 +00:00
|
|
|
logger.debug(f"_async_get_trade_history(), pair: {pair}, "
|
|
|
|
f"since: {since}, until: {until}, from_id: {from_id}")
|
|
|
|
|
2020-06-02 15:43:37 +00:00
|
|
|
if until is None:
|
2020-06-02 18:09:23 +00:00
|
|
|
until = ccxt.Exchange.milliseconds()
|
|
|
|
logger.debug(f"Exchange milliseconds: {until}")
|
2020-05-31 10:05:58 +00:00
|
|
|
|
2019-09-28 08:52:53 +00:00
|
|
|
if self._trades_pagination == 'time':
|
|
|
|
return await self._async_get_trade_history_time(
|
2020-05-31 10:05:58 +00:00
|
|
|
pair=pair, since=since, until=until)
|
2019-09-28 08:52:53 +00:00
|
|
|
elif self._trades_pagination == 'id':
|
|
|
|
return await self._async_get_trade_history_id(
|
2020-05-31 10:05:58 +00:00
|
|
|
pair=pair, since=since, until=until, from_id=from_id
|
2019-09-28 08:52:53 +00:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise OperationalException(f"Exchange {self.name} does use neither time, "
|
|
|
|
f"nor id based pagination")
|
2019-08-14 18:30:29 +00:00
|
|
|
|
|
|
|
def get_historic_trades(self, pair: str,
|
|
|
|
since: Optional[int] = None,
|
2019-08-16 08:34:52 +00:00
|
|
|
until: Optional[int] = None,
|
2019-08-29 10:56:10 +00:00
|
|
|
from_id: Optional[str] = None) -> Tuple[str, List]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Get trade history data using asyncio.
|
|
|
|
Handles all async work and returns the list of candles.
|
2021-02-14 09:29:45 +00:00
|
|
|
Async over one pair, assuming we get `self.ohlcv_candle_limit()` candles per call.
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to download
|
2019-08-16 08:51:04 +00:00
|
|
|
:param since: Timestamp in milliseconds to get history from
|
|
|
|
:param until: Timestamp in milliseconds. Defaults to current timestamp if not defined.
|
2019-08-16 08:34:52 +00:00
|
|
|
:param from_id: Download data starting with ID (if id is known)
|
2020-03-08 10:35:31 +00:00
|
|
|
:returns List of trade data
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2019-10-19 08:05:30 +00:00
|
|
|
if not self.exchange_has("fetchTrades"):
|
2021-08-16 12:16:24 +00:00
|
|
|
raise OperationalException("This exchange does not support downloading Trades.")
|
2019-08-29 10:56:10 +00:00
|
|
|
|
|
|
|
return asyncio.get_event_loop().run_until_complete(
|
|
|
|
self._async_get_trade_history(pair=pair, since=since,
|
|
|
|
until=until, from_id=from_id))
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2021-09-06 08:24:15 +00:00
|
|
|
# https://www.binance.com/en/support/faq/360033525031
|
2021-09-08 19:43:28 +00:00
|
|
|
def fetch_funding_rate(self, pair):
|
2021-09-02 02:34:01 +00:00
|
|
|
if not self.exchange_has("fetchFundingHistory"):
|
|
|
|
raise OperationalException(
|
|
|
|
f"fetch_funding_history() has not been implemented on ccxt.{self.name}")
|
|
|
|
|
2021-09-06 08:24:15 +00:00
|
|
|
return self._api.fetch_funding_rates()
|
2021-08-25 18:59:25 +00:00
|
|
|
|
2021-09-08 19:46:52 +00:00
|
|
|
@retrier
|
|
|
|
def get_funding_fees(self, pair: str, since: Union[datetime, int]) -> float:
|
|
|
|
"""
|
|
|
|
Returns the sum of all funding fees that were exchanged for a pair within a timeframe
|
|
|
|
:param pair: (e.g. ADA/USDT)
|
|
|
|
:param since: The earliest time of consideration for calculating funding fees,
|
|
|
|
in unix time or as a datetime
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not self.exchange_has("fetchFundingHistory"):
|
|
|
|
raise OperationalException(
|
|
|
|
f"fetch_funding_history() has not been implemented on ccxt.{self.name}")
|
|
|
|
|
|
|
|
if type(since) is datetime:
|
|
|
|
since = int(since.strftime('%s'))
|
|
|
|
|
|
|
|
try:
|
|
|
|
funding_history = self._api.fetch_funding_history(
|
|
|
|
pair=pair,
|
|
|
|
since=since
|
|
|
|
)
|
|
|
|
return sum(fee['amount'] for fee in funding_history)
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get funding fees due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2019-04-09 09:27:35 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def is_exchange_known_ccxt(exchange_name: str, ccxt_module: CcxtModuleType = None) -> bool:
|
2019-09-30 21:33:33 +00:00
|
|
|
return exchange_name in ccxt_exchanges(ccxt_module)
|
2019-06-11 10:18:35 +00:00
|
|
|
|
|
|
|
|
2019-08-22 17:01:41 +00:00
|
|
|
def is_exchange_officially_supported(exchange_name: str) -> bool:
|
2020-02-29 15:58:22 +00:00
|
|
|
return exchange_name in ['bittrex', 'binance', 'kraken']
|
2019-04-09 09:27:35 +00:00
|
|
|
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def ccxt_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
2019-09-30 21:33:33 +00:00
|
|
|
"""
|
|
|
|
Return the list of all exchanges known to ccxt
|
|
|
|
"""
|
2019-04-10 21:07:27 +00:00
|
|
|
return ccxt_module.exchanges if ccxt_module is not None else ccxt.exchanges
|
2019-04-09 09:27:35 +00:00
|
|
|
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def available_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
2019-09-30 21:33:33 +00:00
|
|
|
"""
|
|
|
|
Return exchanges available to the bot, i.e. non-bad exchanges in the ccxt list
|
|
|
|
"""
|
|
|
|
exchanges = ccxt_exchanges(ccxt_module)
|
2021-04-06 18:16:29 +00:00
|
|
|
return [x for x in exchanges if validate_exchange(x)[0]]
|
2019-09-30 21:33:33 +00:00
|
|
|
|
|
|
|
|
2021-04-06 05:47:44 +00:00
|
|
|
def validate_exchange(exchange: str) -> Tuple[bool, str]:
|
|
|
|
ex_mod = getattr(ccxt, exchange.lower())()
|
|
|
|
if not ex_mod or not ex_mod.has:
|
|
|
|
return False, ''
|
2021-04-06 18:16:29 +00:00
|
|
|
missing = [k for k in EXCHANGE_HAS_REQUIRED if ex_mod.has.get(k) is not True]
|
2021-04-06 05:47:44 +00:00
|
|
|
if missing:
|
|
|
|
return False, f"missing: {', '.join(missing)}"
|
|
|
|
|
|
|
|
missing_opt = [k for k in EXCHANGE_HAS_OPTIONAL if not ex_mod.has.get(k)]
|
2021-04-06 18:16:29 +00:00
|
|
|
|
|
|
|
if exchange.lower() in BAD_EXCHANGES:
|
|
|
|
return False, BAD_EXCHANGES.get(exchange.lower(), '')
|
2021-04-06 05:47:44 +00:00
|
|
|
if missing_opt:
|
|
|
|
return True, f"missing opt: {', '.join(missing_opt)}"
|
|
|
|
|
|
|
|
return True, ''
|
|
|
|
|
|
|
|
|
|
|
|
def validate_exchanges(all_exchanges: bool) -> List[Tuple[str, bool, str]]:
|
|
|
|
"""
|
|
|
|
:return: List of tuples with exchangename, valid, reason.
|
|
|
|
"""
|
|
|
|
exchanges = ccxt_exchanges() if all_exchanges else available_exchanges()
|
|
|
|
exchanges_valid = [
|
|
|
|
(e, *validate_exchange(e)) for e in exchanges
|
|
|
|
]
|
|
|
|
return exchanges_valid
|
|
|
|
|
|
|
|
|
2019-11-02 19:25:18 +00:00
|
|
|
def timeframe_to_seconds(timeframe: str) -> int:
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
|
|
|
Translates the timeframe interval value written in the human readable
|
|
|
|
form ('1m', '5m', '1h', '1d', '1w', etc.) to the number
|
|
|
|
of seconds for one timeframe interval.
|
|
|
|
"""
|
2019-11-02 19:25:18 +00:00
|
|
|
return ccxt.Exchange.parse_timeframe(timeframe)
|
2019-04-09 09:27:35 +00:00
|
|
|
|
|
|
|
|
2019-11-02 19:25:18 +00:00
|
|
|
def timeframe_to_minutes(timeframe: str) -> int:
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
2019-08-12 14:07:19 +00:00
|
|
|
Same as timeframe_to_seconds, but returns minutes.
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
2019-11-02 19:25:18 +00:00
|
|
|
return ccxt.Exchange.parse_timeframe(timeframe) // 60
|
2019-04-09 09:27:35 +00:00
|
|
|
|
|
|
|
|
2019-11-02 19:25:18 +00:00
|
|
|
def timeframe_to_msecs(timeframe: str) -> int:
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
2019-08-12 14:07:19 +00:00
|
|
|
Same as timeframe_to_seconds, but returns milliseconds.
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
2019-11-02 19:25:18 +00:00
|
|
|
return ccxt.Exchange.parse_timeframe(timeframe) * 1000
|
2019-08-12 14:07:19 +00:00
|
|
|
|
|
|
|
|
2019-08-12 14:17:06 +00:00
|
|
|
def timeframe_to_prev_date(timeframe: str, date: datetime = None) -> datetime:
|
2019-08-12 14:11:43 +00:00
|
|
|
"""
|
|
|
|
Use Timeframe and determine last possible candle.
|
2019-08-12 14:17:06 +00:00
|
|
|
:param timeframe: timeframe in string format (e.g. "5m")
|
|
|
|
:param date: date to use. Defaults to utcnow()
|
|
|
|
:returns: date of previous candle (with utc timezone)
|
2019-08-12 14:11:43 +00:00
|
|
|
"""
|
|
|
|
if not date:
|
2019-08-12 18:37:11 +00:00
|
|
|
date = datetime.now(timezone.utc)
|
2019-08-25 08:34:56 +00:00
|
|
|
|
|
|
|
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, date.timestamp() * 1000,
|
|
|
|
ROUND_DOWN) // 1000
|
2019-08-12 14:11:43 +00:00
|
|
|
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
|
|
|
|
|
|
|
|
2019-08-12 14:17:06 +00:00
|
|
|
def timeframe_to_next_date(timeframe: str, date: datetime = None) -> datetime:
|
2019-08-12 14:07:19 +00:00
|
|
|
"""
|
|
|
|
Use Timeframe and determine next candle.
|
2019-08-12 14:17:06 +00:00
|
|
|
:param timeframe: timeframe in string format (e.g. "5m")
|
|
|
|
:param date: date to use. Defaults to utcnow()
|
|
|
|
:returns: date of next candle (with utc timezone)
|
2019-08-12 14:07:19 +00:00
|
|
|
"""
|
2019-08-25 08:34:56 +00:00
|
|
|
if not date:
|
|
|
|
date = datetime.now(timezone.utc)
|
|
|
|
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, date.timestamp() * 1000,
|
|
|
|
ROUND_UP) // 1000
|
2019-08-12 14:07:19 +00:00
|
|
|
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
2019-10-14 10:32:39 +00:00
|
|
|
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def market_is_active(market: Dict) -> bool:
|
2019-10-14 10:32:39 +00:00
|
|
|
"""
|
|
|
|
Return True if the market is active.
|
|
|
|
"""
|
|
|
|
# "It's active, if the active flag isn't explicitly set to false. If it's missing or
|
|
|
|
# true then it's true. If it's undefined, then it's most likely true, but not 100% )"
|
|
|
|
# See https://github.com/ccxt/ccxt/issues/4874,
|
|
|
|
# https://github.com/ccxt/ccxt/issues/4075#issuecomment-434760520
|
|
|
|
return market.get('active', True) is not False
|