2017-11-18 07:52:28 +00:00
|
|
|
# pragma pylint: disable=W0603
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
|
|
|
Cryptocurrency Exchanges support
|
|
|
|
"""
|
2019-06-09 12:05:36 +00:00
|
|
|
import asyncio
|
2021-02-14 06:22:08 +00:00
|
|
|
import http
|
2018-08-14 17:51:49 +00:00
|
|
|
import inspect
|
2019-06-09 12:05:36 +00:00
|
|
|
import logging
|
|
|
|
from copy import deepcopy
|
2021-11-18 07:56:25 +00:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2020-01-14 19:16:20 +00:00
|
|
|
from math import ceil
|
2022-02-02 02:24:06 +00:00
|
|
|
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
|
2017-09-01 19:11:46 +00:00
|
|
|
|
2018-12-11 18:47:48 +00:00
|
|
|
import arrow
|
2018-04-06 07:57:08 +00:00
|
|
|
import ccxt
|
2018-07-31 10:47:32 +00:00
|
|
|
import ccxt.async_support as ccxt_async
|
2021-04-13 18:09:22 +00:00
|
|
|
from cachetools import TTLCache
|
2020-09-28 17:39:41 +00:00
|
|
|
from ccxt.base.decimal_to_precision import (ROUND_DOWN, ROUND_UP, TICK_SIZE, TRUNCATE,
|
|
|
|
decimal_to_precision)
|
2018-12-11 18:47:48 +00:00
|
|
|
from pandas import DataFrame
|
2018-08-03 16:10:03 +00:00
|
|
|
|
2021-08-27 17:54:53 +00:00
|
|
|
from freqtrade.constants import (DEFAULT_AMOUNT_RESERVE_PERCENT, NON_OPEN_EXCHANGE_STATES,
|
2021-12-03 13:11:24 +00:00
|
|
|
ListPairsWithTimeframes, PairWithTimeframe)
|
2020-03-31 18:20:10 +00:00
|
|
|
from freqtrade.data.converter import ohlcv_to_dataframe, trades_dict_to_list
|
2022-02-01 18:53:38 +00:00
|
|
|
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
2020-09-28 17:39:41 +00:00
|
|
|
from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFundsError,
|
2021-06-02 09:30:19 +00:00
|
|
|
InvalidOrderException, OperationalException, PricingError,
|
|
|
|
RetryableOrderError, TemporaryError)
|
2021-04-06 05:47:44 +00:00
|
|
|
from freqtrade.exchange.common import (API_FETCH_ORDER_RETRY_COUNT, BAD_EXCHANGES,
|
2021-09-13 18:00:22 +00:00
|
|
|
EXCHANGE_HAS_OPTIONAL, EXCHANGE_HAS_REQUIRED,
|
|
|
|
remove_credentials, retrier, retrier_async)
|
2021-09-10 05:52:48 +00:00
|
|
|
from freqtrade.misc import chunks, deep_merge_dicts, safe_value_fallback2
|
2021-01-12 00:13:58 +00:00
|
|
|
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
2017-05-12 17:11:56 +00:00
|
|
|
|
2020-09-28 17:39:41 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
CcxtModuleType = Any
|
|
|
|
|
|
|
|
|
2017-05-14 12:14:16 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2017-05-12 17:11:56 +00:00
|
|
|
|
2019-04-04 17:56:40 +00:00
|
|
|
|
2021-02-14 06:22:08 +00:00
|
|
|
# Workaround for adding samesite support to pre 3.8 python
|
|
|
|
# Only applies to python3.7, and only on certain exchanges (kraken)
|
|
|
|
# Replicates the fix from starlette (which is actually causing this problem)
|
|
|
|
http.cookies.Morsel._reserved["samesite"] = "SameSite" # type: ignore
|
|
|
|
|
|
|
|
|
2019-09-12 01:39:52 +00:00
|
|
|
class Exchange:
|
2018-04-22 07:57:48 +00:00
|
|
|
|
2019-02-28 23:13:16 +00:00
|
|
|
_config: Dict = {}
|
2019-08-14 08:45:57 +00:00
|
|
|
|
|
|
|
# Parameters to add directly to buy/sell calls (like agreeing to trading agreement)
|
2019-02-17 22:34:15 +00:00
|
|
|
_params: Dict = {}
|
2018-08-09 10:47:26 +00:00
|
|
|
|
2021-09-02 18:43:07 +00:00
|
|
|
# Additional headers - added to the ccxt object
|
|
|
|
_headers: Dict = {}
|
|
|
|
|
2019-02-24 18:35:29 +00:00
|
|
|
# Dict to specify which options each exchange implements
|
2019-06-09 12:05:36 +00:00
|
|
|
# This defines defaults, which can be selectively overridden by subclasses using _ft_has
|
|
|
|
# or by specifying them in the configuration.
|
|
|
|
_ft_has_default: Dict = {
|
2019-02-24 18:35:29 +00:00
|
|
|
"stoploss_on_exchange": False,
|
2019-03-25 23:49:39 +00:00
|
|
|
"order_time_in_force": ["gtc"],
|
2021-09-03 06:48:53 +00:00
|
|
|
"time_in_force_parameter": "timeInForce",
|
2021-05-01 15:29:53 +00:00
|
|
|
"ohlcv_params": {},
|
2019-06-09 12:52:17 +00:00
|
|
|
"ohlcv_candle_limit": 500,
|
2019-06-09 12:35:58 +00:00
|
|
|
"ohlcv_partial_candle": True,
|
2022-01-06 13:12:00 +00:00
|
|
|
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
|
|
|
|
"ohlcv_volume_currency": "base", # "base" or "quote"
|
2019-08-14 17:22:52 +00:00
|
|
|
"trades_pagination": "time", # Possible are "time" or "id"
|
|
|
|
"trades_pagination_arg": "since",
|
2020-10-13 18:02:47 +00:00
|
|
|
"l2_limit_range": None,
|
2021-04-13 10:28:07 +00:00
|
|
|
"l2_limit_range_required": True, # Allow Empty L2 limit (kucoin)
|
2021-11-14 09:15:22 +00:00
|
|
|
"mark_ohlcv_price": "mark",
|
2021-12-05 09:01:44 +00:00
|
|
|
"mark_ohlcv_timeframe": "8h",
|
2022-02-06 01:32:46 +00:00
|
|
|
"ccxt_futures_name": "swap",
|
2022-02-07 07:33:42 +00:00
|
|
|
"can_fetch_multiple_tiers": True,
|
2019-02-24 18:35:29 +00:00
|
|
|
}
|
2019-06-09 12:05:36 +00:00
|
|
|
_ft_has: Dict = {}
|
2019-02-24 18:35:29 +00:00
|
|
|
|
2022-02-01 18:53:38 +00:00
|
|
|
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
2021-09-05 03:55:55 +00:00
|
|
|
# TradingMode.SPOT always supported and not required in this list
|
|
|
|
]
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def __init__(self, config: Dict[str, Any], validate: bool = True) -> None:
|
2018-06-17 10:41:33 +00:00
|
|
|
"""
|
|
|
|
Initializes this module with the given config,
|
2019-02-24 18:35:29 +00:00
|
|
|
it does basic validation whether the specified exchange and pairs are valid.
|
2018-06-17 10:41:33 +00:00
|
|
|
:return: None
|
|
|
|
"""
|
2019-07-03 02:02:44 +00:00
|
|
|
self._api: ccxt.Exchange = None
|
|
|
|
self._api_async: ccxt_async.Exchange = None
|
2021-01-28 18:40:10 +00:00
|
|
|
self._markets: Dict = {}
|
2022-02-07 08:01:00 +00:00
|
|
|
self._leverage_tiers: Dict[str, List[Dict]] = {}
|
2021-12-31 15:34:15 +00:00
|
|
|
self.loop = asyncio.new_event_loop()
|
|
|
|
asyncio.set_event_loop(self.loop)
|
2019-07-03 02:02:44 +00:00
|
|
|
|
2019-02-28 23:13:16 +00:00
|
|
|
self._config.update(config)
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-08-19 17:37:48 +00:00
|
|
|
# Holds last candle refreshed time of each pair
|
2021-12-03 13:11:24 +00:00
|
|
|
self._pairs_last_refresh_time: Dict[PairWithTimeframe, int] = {}
|
2019-03-10 12:30:45 +00:00
|
|
|
# Timestamp of last markets refresh
|
|
|
|
self._last_markets_refresh: int = 0
|
2018-08-19 17:37:48 +00:00
|
|
|
|
2021-04-13 18:09:22 +00:00
|
|
|
# Cache for 10 minutes ...
|
2021-04-13 20:17:42 +00:00
|
|
|
self._fetch_tickers_cache: TTLCache = TTLCache(maxsize=1, ttl=60 * 10)
|
2021-06-02 09:39:18 +00:00
|
|
|
# Cache values for 1800 to avoid frequent polling of the exchange for prices
|
|
|
|
# Caching only applies to RPC methods, so prices for open trades are still
|
|
|
|
# refreshed once every iteration.
|
|
|
|
self._sell_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=1800)
|
2021-06-02 09:30:19 +00:00
|
|
|
self._buy_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=1800)
|
2021-04-13 18:09:22 +00:00
|
|
|
|
2018-08-19 17:37:48 +00:00
|
|
|
# Holds candles
|
2021-12-03 13:11:24 +00:00
|
|
|
self._klines: Dict[PairWithTimeframe, DataFrame] = {}
|
2018-08-19 17:37:48 +00:00
|
|
|
|
2018-12-10 18:54:43 +00:00
|
|
|
# Holds all open sell orders for dry_run
|
|
|
|
self._dry_run_open_orders: Dict[str, Any] = {}
|
2021-09-13 18:00:22 +00:00
|
|
|
remove_credentials(config)
|
2018-08-19 17:37:48 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
if config['dry_run']:
|
|
|
|
logger.info('Instance is running with dry_run enabled')
|
2020-03-25 16:01:11 +00:00
|
|
|
logger.info(f"Using CCXT {ccxt.__version__}")
|
2018-06-17 10:41:33 +00:00
|
|
|
exchange_config = config['exchange']
|
2021-06-10 18:09:25 +00:00
|
|
|
self.log_responses = exchange_config.get('log_responses', False)
|
2019-06-09 12:05:36 +00:00
|
|
|
|
|
|
|
# Deep merge ft_has with default ft_has options
|
|
|
|
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
2020-08-26 18:52:09 +00:00
|
|
|
if exchange_config.get('_ft_has_params'):
|
|
|
|
self._ft_has = deep_merge_dicts(exchange_config.get('_ft_has_params'),
|
2019-06-09 12:05:36 +00:00
|
|
|
self._ft_has)
|
2019-06-09 12:35:58 +00:00
|
|
|
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
2019-06-09 12:05:36 +00:00
|
|
|
|
2019-06-09 12:35:58 +00:00
|
|
|
# Assign this directly for easy access
|
2019-06-09 12:52:17 +00:00
|
|
|
self._ohlcv_partial_candle = self._ft_has['ohlcv_partial_candle']
|
2019-06-09 12:35:58 +00:00
|
|
|
|
2019-08-14 17:22:52 +00:00
|
|
|
self._trades_pagination = self._ft_has['trades_pagination']
|
|
|
|
self._trades_pagination_arg = self._ft_has['trades_pagination_arg']
|
|
|
|
|
2022-02-11 03:14:07 +00:00
|
|
|
# Leverage properties
|
2022-02-21 18:19:12 +00:00
|
|
|
self.trading_mode: TradingMode = config.get('trading_mode', TradingMode.SPOT)
|
2022-02-01 18:53:38 +00:00
|
|
|
self.margin_mode: Optional[MarginMode] = (
|
|
|
|
MarginMode(config.get('margin_mode'))
|
|
|
|
if config.get('margin_mode')
|
2021-09-19 23:44:12 +00:00
|
|
|
else None
|
|
|
|
)
|
2022-02-11 03:14:07 +00:00
|
|
|
self.liquidation_buffer = config.get('liquidation_buffer', 0.05)
|
2021-09-19 23:44:12 +00:00
|
|
|
|
2019-06-09 12:35:58 +00:00
|
|
|
# Initialize ccxt objects
|
2021-09-19 23:44:12 +00:00
|
|
|
ccxt_config = self._ccxt_config
|
2020-05-30 08:45:50 +00:00
|
|
|
ccxt_config = deep_merge_dicts(exchange_config.get('ccxt_config', {}), ccxt_config)
|
|
|
|
ccxt_config = deep_merge_dicts(exchange_config.get('ccxt_sync_config', {}), ccxt_config)
|
|
|
|
|
|
|
|
self._api = self._init_ccxt(exchange_config, ccxt_kwargs=ccxt_config)
|
2019-11-13 17:22:23 +00:00
|
|
|
|
2021-09-19 23:44:12 +00:00
|
|
|
ccxt_async_config = self._ccxt_config
|
2020-05-30 08:45:50 +00:00
|
|
|
ccxt_async_config = deep_merge_dicts(exchange_config.get('ccxt_config', {}),
|
|
|
|
ccxt_async_config)
|
2019-11-13 17:22:23 +00:00
|
|
|
ccxt_async_config = deep_merge_dicts(exchange_config.get('ccxt_async_config', {}),
|
|
|
|
ccxt_async_config)
|
2019-07-03 02:02:44 +00:00
|
|
|
self._api_async = self._init_ccxt(
|
2019-11-13 17:22:23 +00:00
|
|
|
exchange_config, ccxt_async, ccxt_kwargs=ccxt_async_config)
|
2017-10-01 21:28:09 +00:00
|
|
|
|
2018-06-18 20:20:50 +00:00
|
|
|
logger.info('Using Exchange "%s"', self.name)
|
2018-03-21 17:40:16 +00:00
|
|
|
|
2019-10-13 08:33:22 +00:00
|
|
|
if validate:
|
2019-10-22 10:51:36 +00:00
|
|
|
# Check if timeframe is available
|
2020-06-01 18:39:01 +00:00
|
|
|
self.validate_timeframes(config.get('timeframe'))
|
2019-10-22 10:51:36 +00:00
|
|
|
|
2019-10-13 08:33:22 +00:00
|
|
|
# Initial markets load
|
|
|
|
self._load_markets()
|
2019-10-22 10:51:36 +00:00
|
|
|
|
2019-10-13 08:33:22 +00:00
|
|
|
# Check if all pairs are available
|
2020-01-11 10:53:44 +00:00
|
|
|
self.validate_stakecurrency(config['stake_currency'])
|
2020-11-12 11:32:45 +00:00
|
|
|
if not exchange_config.get('skip_pair_validation'):
|
|
|
|
self.validate_pairs(config['exchange']['pair_whitelist'])
|
2019-10-13 08:33:22 +00:00
|
|
|
self.validate_ordertypes(config.get('order_types', {}))
|
|
|
|
self.validate_order_time_in_force(config.get('order_time_in_force', {}))
|
2021-11-07 12:10:40 +00:00
|
|
|
self.required_candle_call_count = self.validate_required_startup_candles(
|
|
|
|
config.get('startup_candle_count', 0), config.get('timeframe', ''))
|
2022-02-01 18:53:38 +00:00
|
|
|
self.validate_trading_mode_and_margin_mode(self.trading_mode, self.margin_mode)
|
2019-03-12 12:03:29 +00:00
|
|
|
|
2019-10-22 10:51:36 +00:00
|
|
|
# Converts the interval provided in minutes in config to seconds
|
|
|
|
self.markets_refresh_interval: int = exchange_config.get(
|
|
|
|
"markets_refresh_interval", 60) * 60
|
|
|
|
|
2021-10-22 12:37:56 +00:00
|
|
|
if self.trading_mode != TradingMode.SPOT:
|
2022-02-07 08:01:00 +00:00
|
|
|
self.fill_leverage_tiers()
|
2021-10-22 12:37:56 +00:00
|
|
|
|
2018-08-14 17:51:49 +00:00
|
|
|
def __del__(self):
|
|
|
|
"""
|
|
|
|
Destructor - clean up async stuff
|
|
|
|
"""
|
2021-02-06 09:22:59 +00:00
|
|
|
self.close()
|
|
|
|
|
|
|
|
def close(self):
|
2018-08-14 17:51:49 +00:00
|
|
|
logger.debug("Exchange object destroyed, closing async loop")
|
2021-12-31 10:21:02 +00:00
|
|
|
if (self._api_async and inspect.iscoroutinefunction(self._api_async.close)
|
|
|
|
and self._api_async.session):
|
|
|
|
logger.info("Closing async ccxt session.")
|
2021-12-31 15:34:15 +00:00
|
|
|
self.loop.run_until_complete(self._api_async.close())
|
2018-08-14 17:51:49 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def _init_ccxt(self, exchange_config: Dict[str, Any], ccxt_module: CcxtModuleType = ccxt,
|
2021-09-02 18:43:07 +00:00
|
|
|
ccxt_kwargs: Dict = {}) -> ccxt.Exchange:
|
2018-06-17 11:09:23 +00:00
|
|
|
"""
|
|
|
|
Initialize ccxt with given config and return valid
|
|
|
|
ccxt instance.
|
|
|
|
"""
|
|
|
|
# Find matching class for the given exchange name
|
|
|
|
name = exchange_config['name']
|
|
|
|
|
2019-09-30 21:33:33 +00:00
|
|
|
if not is_exchange_known_ccxt(name, ccxt_module):
|
2019-06-11 10:43:29 +00:00
|
|
|
raise OperationalException(f'Exchange {name} is not supported by ccxt')
|
2018-10-04 18:11:02 +00:00
|
|
|
|
|
|
|
ex_config = {
|
2018-12-11 06:11:43 +00:00
|
|
|
'apiKey': exchange_config.get('key'),
|
|
|
|
'secret': exchange_config.get('secret'),
|
|
|
|
'password': exchange_config.get('password'),
|
|
|
|
'uid': exchange_config.get('uid', ''),
|
|
|
|
}
|
2018-10-04 18:11:02 +00:00
|
|
|
if ccxt_kwargs:
|
|
|
|
logger.info('Applying additional ccxt config: %s', ccxt_kwargs)
|
2021-09-02 18:43:07 +00:00
|
|
|
if self._headers:
|
|
|
|
# Inject static headers after the above output to not confuse users.
|
|
|
|
ccxt_kwargs = deep_merge_dicts({'headers': self._headers}, ccxt_kwargs)
|
|
|
|
if ccxt_kwargs:
|
2018-10-04 18:11:02 +00:00
|
|
|
ex_config.update(ccxt_kwargs)
|
|
|
|
try:
|
|
|
|
|
|
|
|
api = getattr(ccxt_module, name.lower())(ex_config)
|
2019-07-25 18:06:20 +00:00
|
|
|
except (KeyError, AttributeError) as e:
|
|
|
|
raise OperationalException(f'Exchange {name} is not supported') from e
|
2019-07-03 02:13:41 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(f"Initialization of ccxt failed. Reason: {e}") from e
|
2018-06-17 11:09:23 +00:00
|
|
|
|
2018-07-28 20:32:10 +00:00
|
|
|
self.set_sandbox(api, exchange_config, name)
|
2018-07-27 08:55:36 +00:00
|
|
|
|
2018-06-17 11:09:23 +00:00
|
|
|
return api
|
|
|
|
|
2021-09-19 23:44:12 +00:00
|
|
|
@property
|
|
|
|
def _ccxt_config(self) -> Dict:
|
|
|
|
# Parameters to add directly to ccxt sync/async initialization.
|
2021-11-13 11:00:47 +00:00
|
|
|
if self.trading_mode == TradingMode.MARGIN:
|
|
|
|
return {
|
|
|
|
"options": {
|
|
|
|
"defaultType": "margin"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
elif self.trading_mode == TradingMode.FUTURES:
|
|
|
|
return {
|
|
|
|
"options": {
|
|
|
|
"defaultType": self._ft_has["ccxt_futures_name"]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
return {}
|
2021-09-19 23:44:12 +00:00
|
|
|
|
2018-06-18 20:20:50 +00:00
|
|
|
@property
|
|
|
|
def name(self) -> str:
|
|
|
|
"""exchange Name (from ccxt)"""
|
2018-06-18 20:07:15 +00:00
|
|
|
return self._api.name
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-06-18 20:20:50 +00:00
|
|
|
@property
|
|
|
|
def id(self) -> str:
|
|
|
|
"""exchange ccxt id"""
|
2018-06-18 20:07:15 +00:00
|
|
|
return self._api.id
|
2017-10-06 10:22:04 +00:00
|
|
|
|
2019-09-29 20:08:11 +00:00
|
|
|
@property
|
|
|
|
def timeframes(self) -> List[str]:
|
|
|
|
return list((self._api.timeframes or {}).keys())
|
|
|
|
|
2019-03-04 22:59:08 +00:00
|
|
|
@property
|
|
|
|
def markets(self) -> Dict:
|
|
|
|
"""exchange ccxt markets"""
|
2021-01-28 18:40:10 +00:00
|
|
|
if not self._markets:
|
2020-06-10 03:30:29 +00:00
|
|
|
logger.info("Markets were not loaded. Loading them now..")
|
2019-03-06 21:57:31 +00:00
|
|
|
self._load_markets()
|
2021-01-28 18:40:10 +00:00
|
|
|
return self._markets
|
2019-03-04 22:59:08 +00:00
|
|
|
|
2020-01-12 13:37:45 +00:00
|
|
|
@property
|
|
|
|
def precisionMode(self) -> str:
|
|
|
|
"""exchange ccxt precisionMode"""
|
|
|
|
return self._api.precisionMode
|
|
|
|
|
2021-06-10 18:09:25 +00:00
|
|
|
def _log_exchange_response(self, endpoint, response) -> None:
|
|
|
|
""" Log exchange responses """
|
|
|
|
if self.log_responses:
|
|
|
|
logger.info(f"API {endpoint}: {response}")
|
|
|
|
|
2021-02-14 09:29:45 +00:00
|
|
|
def ohlcv_candle_limit(self, timeframe: str) -> int:
|
|
|
|
"""
|
|
|
|
Exchange ohlcv candle limit
|
2021-06-25 13:45:49 +00:00
|
|
|
Uses ohlcv_candle_limit_per_timeframe if the exchange has different limits
|
2021-02-14 09:29:45 +00:00
|
|
|
per timeframe (e.g. bittrex), otherwise falls back to ohlcv_candle_limit
|
|
|
|
:param timeframe: Timeframe to check
|
|
|
|
:return: Candle limit as integer
|
|
|
|
"""
|
|
|
|
return int(self._ft_has.get('ohlcv_candle_limit_per_timeframe', {}).get(
|
|
|
|
timeframe, self._ft_has.get('ohlcv_candle_limit')))
|
|
|
|
|
2019-10-16 23:09:19 +00:00
|
|
|
def get_markets(self, base_currencies: List[str] = None, quote_currencies: List[str] = None,
|
2021-11-01 07:40:55 +00:00
|
|
|
spot_only: bool = False, margin_only: bool = False, futures_only: bool = False,
|
|
|
|
tradable_only: bool = True,
|
|
|
|
active_only: bool = False) -> Dict[str, Any]:
|
2019-10-13 10:12:20 +00:00
|
|
|
"""
|
|
|
|
Return exchange ccxt markets, filtered out by base currency and quote currency
|
|
|
|
if this was requested in parameters.
|
|
|
|
|
|
|
|
TODO: consider moving it to the Dataprovider
|
|
|
|
"""
|
|
|
|
markets = self.markets
|
2019-10-14 10:32:39 +00:00
|
|
|
if not markets:
|
|
|
|
raise OperationalException("Markets were not loaded.")
|
|
|
|
|
2019-10-16 23:09:19 +00:00
|
|
|
if base_currencies:
|
|
|
|
markets = {k: v for k, v in markets.items() if v['base'] in base_currencies}
|
|
|
|
if quote_currencies:
|
|
|
|
markets = {k: v for k, v in markets.items() if v['quote'] in quote_currencies}
|
2021-11-01 07:40:55 +00:00
|
|
|
if tradable_only:
|
2020-06-02 18:41:29 +00:00
|
|
|
markets = {k: v for k, v in markets.items() if self.market_is_tradable(v)}
|
2021-11-01 07:40:55 +00:00
|
|
|
if spot_only:
|
|
|
|
markets = {k: v for k, v in markets.items() if self.market_is_spot(v)}
|
|
|
|
if margin_only:
|
|
|
|
markets = {k: v for k, v in markets.items() if self.market_is_margin(v)}
|
|
|
|
if futures_only:
|
|
|
|
markets = {k: v for k, v in markets.items() if self.market_is_future(v)}
|
2019-10-13 10:12:20 +00:00
|
|
|
if active_only:
|
2019-10-14 10:32:39 +00:00
|
|
|
markets = {k: v for k, v in markets.items() if market_is_active(v)}
|
2019-10-13 10:12:20 +00:00
|
|
|
return markets
|
|
|
|
|
2020-01-11 10:53:44 +00:00
|
|
|
def get_quote_currencies(self) -> List[str]:
|
|
|
|
"""
|
|
|
|
Return a list of supported quote currencies
|
|
|
|
"""
|
|
|
|
markets = self.markets
|
2020-01-12 11:48:29 +00:00
|
|
|
return sorted(set([x['quote'] for _, x in markets.items()]))
|
2020-01-11 10:53:44 +00:00
|
|
|
|
2020-02-24 20:50:27 +00:00
|
|
|
def get_pair_quote_currency(self, pair: str) -> str:
|
|
|
|
"""
|
|
|
|
Return a pair's quote currency
|
|
|
|
"""
|
2020-02-26 06:09:54 +00:00
|
|
|
return self.markets.get(pair, {}).get('quote', '')
|
2020-02-24 20:50:27 +00:00
|
|
|
|
|
|
|
def get_pair_base_currency(self, pair: str) -> str:
|
|
|
|
"""
|
2022-02-16 13:12:24 +00:00
|
|
|
Return a pair's base currency
|
2020-02-24 20:50:27 +00:00
|
|
|
"""
|
2020-02-26 06:09:54 +00:00
|
|
|
return self.markets.get(pair, {}).get('base', '')
|
2020-02-24 20:50:27 +00:00
|
|
|
|
2021-11-01 07:40:55 +00:00
|
|
|
def market_is_future(self, market: Dict[str, Any]) -> bool:
|
2022-02-14 22:53:29 +00:00
|
|
|
return (
|
|
|
|
market.get(self._ft_has["ccxt_futures_name"], False) is True and
|
|
|
|
market.get('linear', False) is True
|
|
|
|
)
|
2021-11-01 07:40:55 +00:00
|
|
|
|
|
|
|
def market_is_spot(self, market: Dict[str, Any]) -> bool:
|
|
|
|
return market.get('spot', False) is True
|
|
|
|
|
|
|
|
def market_is_margin(self, market: Dict[str, Any]) -> bool:
|
|
|
|
return market.get('margin', False) is True
|
|
|
|
|
2020-06-02 18:29:48 +00:00
|
|
|
def market_is_tradable(self, market: Dict[str, Any]) -> bool:
|
|
|
|
"""
|
|
|
|
Check if the market symbol is tradable by Freqtrade.
|
2021-11-01 07:40:55 +00:00
|
|
|
Ensures that Configured mode aligns to
|
2020-06-02 18:29:48 +00:00
|
|
|
"""
|
2021-11-01 07:40:55 +00:00
|
|
|
return (
|
2021-11-01 08:24:00 +00:00
|
|
|
market.get('quote', None) is not None
|
|
|
|
and market.get('base', None) is not None
|
|
|
|
and (self.trading_mode == TradingMode.SPOT and self.market_is_spot(market))
|
2021-11-01 07:40:55 +00:00
|
|
|
or (self.trading_mode == TradingMode.MARGIN and self.market_is_margin(market))
|
|
|
|
or (self.trading_mode == TradingMode.FUTURES and self.market_is_future(market))
|
|
|
|
)
|
2020-06-02 18:29:48 +00:00
|
|
|
|
2021-12-03 13:11:24 +00:00
|
|
|
def klines(self, pair_interval: PairWithTimeframe, copy: bool = True) -> DataFrame:
|
2018-12-30 06:15:21 +00:00
|
|
|
if pair_interval in self._klines:
|
|
|
|
return self._klines[pair_interval].copy() if copy else self._klines[pair_interval]
|
2018-12-11 18:47:48 +00:00
|
|
|
else:
|
2018-12-29 12:00:50 +00:00
|
|
|
return DataFrame()
|
2018-12-11 18:47:48 +00:00
|
|
|
|
2022-01-06 09:40:31 +00:00
|
|
|
def _get_contract_size(self, pair: str) -> float:
|
2021-12-19 06:56:34 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
2021-12-21 21:45:16 +00:00
|
|
|
market = self.markets[pair]
|
2022-01-06 09:40:31 +00:00
|
|
|
contract_size: float = 1.0
|
2022-01-31 08:40:10 +00:00
|
|
|
if market['contractSize'] is not None:
|
2022-01-06 09:40:31 +00:00
|
|
|
# ccxt has contractSize in markets as string
|
|
|
|
contract_size = float(market['contractSize'])
|
2021-12-21 21:45:16 +00:00
|
|
|
return contract_size
|
2021-12-19 06:56:34 +00:00
|
|
|
else:
|
|
|
|
return 1
|
|
|
|
|
|
|
|
def _trades_contracts_to_amount(self, trades: List) -> List:
|
2021-12-31 10:29:19 +00:00
|
|
|
if len(trades) > 0 and 'symbol' in trades[0]:
|
2021-12-21 21:45:16 +00:00
|
|
|
contract_size = self._get_contract_size(trades[0]['symbol'])
|
2021-12-19 06:56:34 +00:00
|
|
|
if contract_size != 1:
|
|
|
|
for trade in trades:
|
|
|
|
trade['amount'] = trade['amount'] * contract_size
|
2021-12-31 10:29:19 +00:00
|
|
|
return trades
|
2021-12-19 06:56:34 +00:00
|
|
|
|
|
|
|
def _order_contracts_to_amount(self, order: Dict) -> Dict:
|
2021-12-31 10:29:19 +00:00
|
|
|
if 'symbol' in order:
|
|
|
|
contract_size = self._get_contract_size(order['symbol'])
|
|
|
|
if contract_size != 1:
|
|
|
|
for prop in ['amount', 'cost', 'filled', 'remaining']:
|
|
|
|
if prop in order and order[prop] is not None:
|
|
|
|
order[prop] = order[prop] * contract_size
|
2021-12-19 06:56:34 +00:00
|
|
|
return order
|
|
|
|
|
2022-01-01 20:08:10 +00:00
|
|
|
def _amount_to_contracts(self, pair: str, amount: float):
|
|
|
|
|
2022-01-03 17:12:45 +00:00
|
|
|
contract_size = self._get_contract_size(pair)
|
|
|
|
if contract_size and contract_size != 1:
|
2022-01-01 20:08:10 +00:00
|
|
|
return amount / contract_size
|
|
|
|
else:
|
|
|
|
return amount
|
|
|
|
|
|
|
|
def _contracts_to_amount(self, pair: str, num_contracts: float):
|
|
|
|
|
2022-01-03 17:12:45 +00:00
|
|
|
contract_size = self._get_contract_size(pair)
|
|
|
|
if contract_size and contract_size != 1:
|
2022-01-01 20:08:10 +00:00
|
|
|
return num_contracts * contract_size
|
|
|
|
else:
|
|
|
|
return num_contracts
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def set_sandbox(self, api: ccxt.Exchange, exchange_config: dict, name: str) -> None:
|
2018-07-28 20:32:10 +00:00
|
|
|
if exchange_config.get('sandbox'):
|
|
|
|
if api.urls.get('test'):
|
|
|
|
api.urls['api'] = api.urls['test']
|
2018-07-29 09:15:13 +00:00
|
|
|
logger.info("Enabled Sandbox API on %s", name)
|
2018-07-28 20:32:10 +00:00
|
|
|
else:
|
2020-08-03 17:28:57 +00:00
|
|
|
logger.warning(
|
|
|
|
f"No Sandbox URL in CCXT for {name}, exiting. Please check your config.json")
|
2018-07-28 20:32:10 +00:00
|
|
|
raise OperationalException(f'Exchange {name} does not provide a sandbox api')
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def _load_async_markets(self, reload: bool = False) -> None:
|
2018-08-10 11:04:43 +00:00
|
|
|
try:
|
|
|
|
if self._api_async:
|
2021-12-31 15:34:15 +00:00
|
|
|
self.loop.run_until_complete(
|
2019-03-06 21:48:04 +00:00
|
|
|
self._api_async.load_markets(reload=reload))
|
2018-08-10 11:04:43 +00:00
|
|
|
|
2020-11-13 09:43:48 +00:00
|
|
|
except (asyncio.TimeoutError, ccxt.BaseError) as e:
|
2018-08-10 11:04:43 +00:00
|
|
|
logger.warning('Could not load async markets. Reason: %s', e)
|
|
|
|
return
|
|
|
|
|
2019-03-12 15:35:32 +00:00
|
|
|
def _load_markets(self) -> None:
|
2018-09-10 18:19:12 +00:00
|
|
|
""" Initialize markets both sync and async """
|
|
|
|
try:
|
2021-01-28 18:40:10 +00:00
|
|
|
self._markets = self._api.load_markets()
|
2018-09-10 18:19:12 +00:00
|
|
|
self._load_async_markets()
|
2020-10-12 17:58:04 +00:00
|
|
|
self._last_markets_refresh = arrow.utcnow().int_timestamp
|
2021-03-20 12:33:49 +00:00
|
|
|
except ccxt.BaseError:
|
|
|
|
logger.exception('Unable to initialize markets.')
|
2018-09-10 18:19:12 +00:00
|
|
|
|
2020-06-09 22:39:23 +00:00
|
|
|
def reload_markets(self) -> None:
|
2020-06-13 10:34:29 +00:00
|
|
|
"""Reload markets both sync and async if refresh interval has passed """
|
2019-03-10 12:30:45 +00:00
|
|
|
# Check whether markets have to be reloaded
|
|
|
|
if (self._last_markets_refresh > 0) and (
|
|
|
|
self._last_markets_refresh + self.markets_refresh_interval
|
2020-10-12 17:58:04 +00:00
|
|
|
> arrow.utcnow().int_timestamp):
|
2019-03-10 12:30:45 +00:00
|
|
|
return None
|
2019-03-10 15:36:25 +00:00
|
|
|
logger.debug("Performing scheduled market reload..")
|
2019-04-24 19:56:24 +00:00
|
|
|
try:
|
2021-01-28 18:40:10 +00:00
|
|
|
self._markets = self._api.load_markets(reload=True)
|
2020-06-17 05:23:20 +00:00
|
|
|
# Also reload async markets to avoid issues with newly listed pairs
|
|
|
|
self._load_async_markets(reload=True)
|
2020-10-12 17:58:04 +00:00
|
|
|
self._last_markets_refresh = arrow.utcnow().int_timestamp
|
2022-02-07 08:01:00 +00:00
|
|
|
self.fill_leverage_tiers()
|
2019-04-24 20:20:05 +00:00
|
|
|
except ccxt.BaseError:
|
2019-04-24 19:56:24 +00:00
|
|
|
logger.exception("Could not reload markets.")
|
2018-09-10 18:19:12 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def validate_stakecurrency(self, stake_currency: str) -> None:
|
2020-01-11 10:53:44 +00:00
|
|
|
"""
|
|
|
|
Checks stake-currency against available currencies on the exchange.
|
2021-08-29 07:18:46 +00:00
|
|
|
Only runs on startup. If markets have not been loaded, there's been a problem with
|
|
|
|
the connection to the exchange.
|
2020-01-11 10:53:44 +00:00
|
|
|
:param stake_currency: Stake-currency to validate
|
|
|
|
:raise: OperationalException if stake-currency is not available.
|
|
|
|
"""
|
2021-08-29 07:18:46 +00:00
|
|
|
if not self._markets:
|
|
|
|
raise OperationalException(
|
|
|
|
'Could not load markets, therefore cannot start. '
|
|
|
|
'Please investigate the above error for more details.'
|
2021-09-05 03:55:55 +00:00
|
|
|
)
|
2020-01-11 10:53:44 +00:00
|
|
|
quote_currencies = self.get_quote_currencies()
|
|
|
|
if stake_currency not in quote_currencies:
|
|
|
|
raise OperationalException(
|
2020-01-19 18:54:30 +00:00
|
|
|
f"{stake_currency} is not available as stake on {self.name}. "
|
|
|
|
f"Available currencies are: {', '.join(quote_currencies)}")
|
2020-01-11 10:53:44 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
def validate_pairs(self, pairs: List[str]) -> None:
|
|
|
|
"""
|
|
|
|
Checks if all given pairs are tradable on the current exchange.
|
|
|
|
:param pairs: list of pairs
|
2020-01-11 10:53:44 +00:00
|
|
|
:raise: OperationalException if one pair is not available
|
2018-06-17 10:41:33 +00:00
|
|
|
:return: None
|
|
|
|
"""
|
2017-10-06 10:22:04 +00:00
|
|
|
|
2018-09-10 18:19:12 +00:00
|
|
|
if not self.markets:
|
|
|
|
logger.warning('Unable to validate pairs (assuming they are correct).')
|
2019-08-03 11:11:51 +00:00
|
|
|
return
|
2021-01-14 23:13:11 +00:00
|
|
|
extended_pairs = expand_pairlist(pairs, list(self.markets), keep_invalid=True)
|
2020-02-24 20:22:58 +00:00
|
|
|
invalid_pairs = []
|
2021-01-12 00:13:58 +00:00
|
|
|
for pair in extended_pairs:
|
2018-06-17 10:41:33 +00:00
|
|
|
# Note: ccxt has BaseCurrency/QuoteCurrency format for pairs
|
2018-09-11 17:46:18 +00:00
|
|
|
if self.markets and pair not in self.markets:
|
2018-06-17 10:41:33 +00:00
|
|
|
raise OperationalException(
|
2022-02-13 13:33:37 +00:00
|
|
|
f'Pair {pair} is not available on {self.name} {self.trading_mode.value}. '
|
2018-11-29 06:07:47 +00:00
|
|
|
f'Please remove {pair} from your whitelist.')
|
2019-12-27 15:15:44 +00:00
|
|
|
|
|
|
|
# From ccxt Documentation:
|
|
|
|
# markets.info: An associative array of non-common market properties,
|
|
|
|
# including fees, rates, limits and other general market information.
|
|
|
|
# The internal info array is different for each particular market,
|
|
|
|
# its contents depend on the exchange.
|
|
|
|
# It can also be a string or similar ... so we need to verify that first.
|
|
|
|
elif (isinstance(self.markets[pair].get('info', None), dict)
|
2021-07-29 04:56:37 +00:00
|
|
|
and self.markets[pair].get('info', {}).get('prohibitedIn', False)):
|
2019-08-03 11:11:51 +00:00
|
|
|
# Warn users about restricted pairs in whitelist.
|
|
|
|
# We cannot determine reliably if Users are affected.
|
|
|
|
logger.warning(f"Pair {pair} is restricted for some users on this exchange."
|
|
|
|
f"Please check if you are impacted by this restriction "
|
|
|
|
f"on the exchange and eventually remove {pair} from your whitelist.")
|
2020-02-29 13:56:36 +00:00
|
|
|
if (self._config['stake_currency'] and
|
2020-02-29 19:41:03 +00:00
|
|
|
self.get_pair_quote_currency(pair) != self._config['stake_currency']):
|
2020-02-24 20:22:58 +00:00
|
|
|
invalid_pairs.append(pair)
|
|
|
|
if invalid_pairs:
|
|
|
|
raise OperationalException(
|
|
|
|
f"Stake-currency '{self._config['stake_currency']}' not compatible with "
|
|
|
|
f"pair-whitelist. Please remove the following pairs: {invalid_pairs}")
|
2018-06-17 10:41:33 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def get_valid_pair_combination(self, curr_1: str, curr_2: str) -> str:
|
2019-07-03 18:06:50 +00:00
|
|
|
"""
|
2019-07-07 04:36:35 +00:00
|
|
|
Get valid pair combination of curr_1 and curr_2 by trying both combinations.
|
2019-07-03 18:06:50 +00:00
|
|
|
"""
|
2019-07-07 04:36:35 +00:00
|
|
|
for pair in [f"{curr_1}/{curr_2}", f"{curr_2}/{curr_1}"]:
|
2019-07-03 18:20:12 +00:00
|
|
|
if pair in self.markets and self.markets[pair].get('active'):
|
2019-07-03 18:06:50 +00:00
|
|
|
return pair
|
2020-06-28 14:01:40 +00:00
|
|
|
raise ExchangeError(f"Could not combine {curr_1} and {curr_2} to get a valid pair.")
|
2019-07-03 18:06:50 +00:00
|
|
|
|
2019-09-29 20:08:11 +00:00
|
|
|
def validate_timeframes(self, timeframe: Optional[str]) -> None:
|
Handle if ticker_interval in config.json is not supported on exchange.
Returns.
Tested positive and negative data.
The ticker list in constants.py may be obsolete now, im not sure.
raise OperationalException(f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
freqtrade.OperationalException: Invalid ticker 14m, this Exchange supports {'1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M'}
2018-07-05 11:57:59 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Check if timeframe from config is a supported timeframe on the exchange
|
Handle if ticker_interval in config.json is not supported on exchange.
Returns.
Tested positive and negative data.
The ticker list in constants.py may be obsolete now, im not sure.
raise OperationalException(f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
freqtrade.OperationalException: Invalid ticker 14m, this Exchange supports {'1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M'}
2018-07-05 11:57:59 +00:00
|
|
|
"""
|
2019-07-02 22:03:38 +00:00
|
|
|
if not hasattr(self._api, "timeframes") or self._api.timeframes is None:
|
|
|
|
# If timeframes attribute is missing (or is None), the exchange probably
|
|
|
|
# has no fetchOHLCV method.
|
2019-06-30 18:30:31 +00:00
|
|
|
# Therefore we also show that.
|
|
|
|
raise OperationalException(
|
2019-07-02 22:03:38 +00:00
|
|
|
f"The ccxt library does not provide the list of timeframes "
|
|
|
|
f"for the exchange \"{self.name}\" and this exchange "
|
|
|
|
f"is therefore not supported. ccxt fetchOHLCV: {self.exchange_has('fetchOHLCV')}")
|
|
|
|
|
2019-09-29 20:08:11 +00:00
|
|
|
if timeframe and (timeframe not in self.timeframes):
|
2018-07-05 12:05:31 +00:00
|
|
|
raise OperationalException(
|
2020-03-08 10:35:31 +00:00
|
|
|
f"Invalid timeframe '{timeframe}'. This exchange supports: {self.timeframes}")
|
Handle if ticker_interval in config.json is not supported on exchange.
Returns.
Tested positive and negative data.
The ticker list in constants.py may be obsolete now, im not sure.
raise OperationalException(f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
freqtrade.OperationalException: Invalid ticker 14m, this Exchange supports {'1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M'}
2018-07-05 11:57:59 +00:00
|
|
|
|
2020-01-11 10:53:44 +00:00
|
|
|
if timeframe and timeframe_to_minutes(timeframe) < 1:
|
2020-05-18 09:40:25 +00:00
|
|
|
raise OperationalException("Timeframes < 1m are currently not supported by Freqtrade.")
|
2020-01-11 10:36:28 +00:00
|
|
|
|
2018-11-17 18:54:55 +00:00
|
|
|
def validate_ordertypes(self, order_types: Dict) -> None:
|
|
|
|
"""
|
|
|
|
Checks if order-types configured in strategy/config are supported
|
|
|
|
"""
|
|
|
|
if any(v == 'market' for k, v in order_types.items()):
|
|
|
|
if not self.exchange_has('createMarketOrder'):
|
|
|
|
raise OperationalException(
|
|
|
|
f'Exchange {self.name} does not support market orders.')
|
|
|
|
|
2019-02-24 18:35:29 +00:00
|
|
|
if (order_types.get("stoploss_on_exchange")
|
|
|
|
and not self._ft_has.get("stoploss_on_exchange", False)):
|
2019-02-24 19:18:41 +00:00
|
|
|
raise OperationalException(
|
2019-08-24 17:41:11 +00:00
|
|
|
f'On exchange stoploss is not supported for {self.name}.'
|
2019-02-24 19:18:41 +00:00
|
|
|
)
|
2018-11-25 16:22:56 +00:00
|
|
|
|
2018-11-25 20:09:35 +00:00
|
|
|
def validate_order_time_in_force(self, order_time_in_force: Dict) -> None:
|
|
|
|
"""
|
|
|
|
Checks if order time in force configured in strategy/config are supported
|
|
|
|
"""
|
2019-03-25 23:49:39 +00:00
|
|
|
if any(v not in self._ft_has["order_time_in_force"]
|
|
|
|
for k, v in order_time_in_force.items()):
|
2019-03-21 18:12:15 +00:00
|
|
|
raise OperationalException(
|
2019-03-27 19:51:55 +00:00
|
|
|
f'Time in force policies are not supported for {self.name} yet.')
|
2018-11-25 20:09:35 +00:00
|
|
|
|
2021-11-07 12:14:29 +00:00
|
|
|
def validate_required_startup_candles(self, startup_candles: int, timeframe: str) -> int:
|
2019-10-27 09:38:21 +00:00
|
|
|
"""
|
2021-02-14 09:29:45 +00:00
|
|
|
Checks if required startup_candles is more than ohlcv_candle_limit().
|
2019-10-27 09:56:38 +00:00
|
|
|
Requires a grace-period of 5 candles - so a startup-period up to 494 is allowed by default.
|
2019-10-27 09:38:21 +00:00
|
|
|
"""
|
2021-02-14 09:29:45 +00:00
|
|
|
candle_limit = self.ohlcv_candle_limit(timeframe)
|
2021-11-07 12:10:40 +00:00
|
|
|
# Require one more candle - to account for the still open candle.
|
|
|
|
candle_count = startup_candles + 1
|
|
|
|
# Allow 5 calls to the exchange per pair
|
|
|
|
required_candle_call_count = int(
|
|
|
|
(candle_count / candle_limit) + (0 if candle_count % candle_limit == 0 else 1))
|
|
|
|
|
|
|
|
if required_candle_call_count > 5:
|
|
|
|
# Only allow 5 calls per pair to somewhat limit the impact
|
2019-10-27 09:38:21 +00:00
|
|
|
raise OperationalException(
|
2021-11-07 12:10:40 +00:00
|
|
|
f"This strategy requires {startup_candles} candles to start, which is more than 5x "
|
|
|
|
f"the amount of candles {self.name} provides for {timeframe}.")
|
|
|
|
|
|
|
|
if required_candle_call_count > 1:
|
|
|
|
logger.warning(f"Using {required_candle_call_count} calls to get OHLCV. "
|
|
|
|
f"This can result in slower operations for the bot. Please check "
|
|
|
|
f"if you really need {startup_candles} candles for your strategy")
|
|
|
|
return required_candle_call_count
|
2019-10-27 09:38:21 +00:00
|
|
|
|
2022-02-01 18:53:38 +00:00
|
|
|
def validate_trading_mode_and_margin_mode(
|
2021-09-05 03:55:55 +00:00
|
|
|
self,
|
|
|
|
trading_mode: TradingMode,
|
2022-02-01 18:53:38 +00:00
|
|
|
margin_mode: Optional[MarginMode] # Only None when trading_mode = TradingMode.SPOT
|
2021-09-05 03:55:55 +00:00
|
|
|
):
|
|
|
|
"""
|
2021-11-09 18:22:29 +00:00
|
|
|
Checks if freqtrade can perform trades using the configured
|
2022-02-01 18:53:38 +00:00
|
|
|
trading mode(Margin, Futures) and MarginMode(Cross, Isolated)
|
2021-11-09 18:22:29 +00:00
|
|
|
Throws OperationalException:
|
2022-02-01 18:53:38 +00:00
|
|
|
If the trading_mode/margin_mode type are not supported by freqtrade on this exchange
|
2021-09-05 03:55:55 +00:00
|
|
|
"""
|
|
|
|
if trading_mode != TradingMode.SPOT and (
|
2022-02-01 18:53:38 +00:00
|
|
|
(trading_mode, margin_mode) not in self._supported_trading_mode_margin_pairs
|
2021-09-05 03:55:55 +00:00
|
|
|
):
|
2022-02-01 18:53:38 +00:00
|
|
|
mm_value = margin_mode and margin_mode.value
|
2021-09-05 03:55:55 +00:00
|
|
|
raise OperationalException(
|
2022-02-01 18:53:38 +00:00
|
|
|
f"Freqtrade does not support {mm_value} {trading_mode.value} on {self.name}"
|
2021-09-05 03:55:55 +00:00
|
|
|
)
|
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
def exchange_has(self, endpoint: str) -> bool:
|
|
|
|
"""
|
|
|
|
Checks if exchange implements a specific API endpoint.
|
|
|
|
Wrapper around ccxt 'has' attribute
|
|
|
|
:param endpoint: Name of endpoint (e.g. 'fetchOHLCV', 'fetchTickers')
|
|
|
|
:return: bool
|
|
|
|
"""
|
2018-06-18 20:07:15 +00:00
|
|
|
return endpoint in self._api.has and self._api.has[endpoint]
|
2018-06-17 10:41:33 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def amount_to_precision(self, pair: str, amount: float) -> float:
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2018-07-30 16:49:58 +00:00
|
|
|
Returns the amount to buy or sell to a precision the Exchange accepts
|
2021-05-16 12:50:25 +00:00
|
|
|
Re-implementation of ccxt internal methods - ensuring we can test the result is correct
|
2020-01-12 13:55:05 +00:00
|
|
|
based on our definitions.
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2019-03-05 18:44:23 +00:00
|
|
|
if self.markets[pair]['precision']['amount']:
|
2020-01-12 13:40:58 +00:00
|
|
|
amount = float(decimal_to_precision(amount, rounding_mode=TRUNCATE,
|
|
|
|
precision=self.markets[pair]['precision']['amount'],
|
|
|
|
counting_mode=self.precisionMode,
|
|
|
|
))
|
|
|
|
|
2018-07-30 16:49:58 +00:00
|
|
|
return amount
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def price_to_precision(self, pair: str, price: float) -> float:
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2020-01-14 19:16:20 +00:00
|
|
|
Returns the price rounded up to the precision the Exchange accepts.
|
2021-05-16 12:50:25 +00:00
|
|
|
Partial Re-implementation of ccxt internal method decimal_to_precision(),
|
2020-01-14 19:16:20 +00:00
|
|
|
which does not support rounding up
|
|
|
|
TODO: If ccxt supports ROUND_UP for decimal_to_precision(), we could remove this and
|
|
|
|
align with amount_to_precision().
|
2018-07-30 16:49:58 +00:00
|
|
|
Rounds up
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2019-03-05 18:44:23 +00:00
|
|
|
if self.markets[pair]['precision']['price']:
|
2020-01-14 19:16:20 +00:00
|
|
|
# price = float(decimal_to_precision(price, rounding_mode=ROUND,
|
|
|
|
# precision=self.markets[pair]['precision']['price'],
|
|
|
|
# counting_mode=self.precisionMode,
|
|
|
|
# ))
|
|
|
|
if self.precisionMode == TICK_SIZE:
|
|
|
|
precision = self.markets[pair]['precision']['price']
|
|
|
|
missing = price % precision
|
|
|
|
if missing != 0:
|
2021-10-08 18:22:07 +00:00
|
|
|
price = round(price - missing + precision, 10)
|
2020-01-14 19:16:20 +00:00
|
|
|
else:
|
|
|
|
symbol_prec = self.markets[pair]['precision']['price']
|
|
|
|
big_price = price * pow(10, symbol_prec)
|
|
|
|
price = ceil(big_price) / pow(10, symbol_prec)
|
2018-07-30 16:49:58 +00:00
|
|
|
return price
|
|
|
|
|
2020-04-15 05:19:27 +00:00
|
|
|
def price_get_one_pip(self, pair: str, price: float) -> float:
|
|
|
|
"""
|
|
|
|
Get's the "1 pip" value for this pair.
|
|
|
|
Used in PriceFilter to calculate the 1pip movements.
|
|
|
|
"""
|
|
|
|
precision = self.markets[pair]['precision']['price']
|
|
|
|
if self.precisionMode == TICK_SIZE:
|
2020-04-15 05:53:31 +00:00
|
|
|
return precision
|
2020-04-15 05:19:27 +00:00
|
|
|
else:
|
|
|
|
return 1 / pow(10, precision)
|
|
|
|
|
2022-02-02 02:24:06 +00:00
|
|
|
def get_min_pair_stake_amount(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
price: float,
|
|
|
|
stoploss: float,
|
|
|
|
leverage: Optional[float] = 1.0
|
|
|
|
) -> Optional[float]:
|
|
|
|
return self._get_stake_amount_limit(pair, price, stoploss, 'min', leverage)
|
|
|
|
|
|
|
|
def get_max_pair_stake_amount(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
price: float,
|
2022-02-07 09:44:37 +00:00
|
|
|
leverage: float = 1.0
|
2022-02-03 06:10:14 +00:00
|
|
|
) -> float:
|
2022-02-04 20:26:15 +00:00
|
|
|
max_stake_amount = self._get_stake_amount_limit(pair, price, 0.0, 'max')
|
2022-02-03 06:10:14 +00:00
|
|
|
if max_stake_amount is None:
|
|
|
|
# * Should never be executed
|
|
|
|
raise OperationalException(f'{self.name}.get_max_pair_stake_amount should'
|
|
|
|
'never set max_stake_amount to None')
|
2022-02-07 09:44:37 +00:00
|
|
|
return max_stake_amount / leverage
|
2022-02-02 02:24:06 +00:00
|
|
|
|
|
|
|
def _get_stake_amount_limit(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
price: float,
|
|
|
|
stoploss: float,
|
|
|
|
limit: Literal['min', 'max'],
|
|
|
|
leverage: Optional[float] = 1.0
|
|
|
|
) -> Optional[float]:
|
|
|
|
|
|
|
|
isMin = limit == 'min'
|
|
|
|
|
2021-02-02 18:47:21 +00:00
|
|
|
try:
|
|
|
|
market = self.markets[pair]
|
|
|
|
except KeyError:
|
|
|
|
raise ValueError(f"Can't get market information for symbol {pair}")
|
|
|
|
|
2022-02-02 02:24:06 +00:00
|
|
|
stake_limits = []
|
2021-02-02 18:47:21 +00:00
|
|
|
limits = market['limits']
|
2022-02-02 02:24:06 +00:00
|
|
|
if (limits['cost'][limit] is not None):
|
|
|
|
stake_limits.append(
|
2022-01-31 08:31:44 +00:00
|
|
|
self._contracts_to_amount(
|
|
|
|
pair,
|
2022-02-02 02:24:06 +00:00
|
|
|
limits['cost'][limit]
|
2022-01-31 08:31:44 +00:00
|
|
|
)
|
2021-12-21 21:45:16 +00:00
|
|
|
)
|
2021-02-02 18:47:21 +00:00
|
|
|
|
2022-02-02 02:24:06 +00:00
|
|
|
if (limits['amount'][limit] is not None):
|
|
|
|
stake_limits.append(
|
2022-01-31 08:31:44 +00:00
|
|
|
self._contracts_to_amount(
|
|
|
|
pair,
|
2022-02-02 02:24:06 +00:00
|
|
|
limits['amount'][limit] * price
|
2022-01-31 08:31:44 +00:00
|
|
|
)
|
2021-11-17 13:17:27 +00:00
|
|
|
)
|
2021-02-02 18:47:21 +00:00
|
|
|
|
2022-02-02 02:24:06 +00:00
|
|
|
if not stake_limits:
|
|
|
|
return None if isMin else float('inf')
|
2021-02-02 18:47:21 +00:00
|
|
|
|
|
|
|
# reserve some percent defined in config (5% default) + stoploss
|
2021-03-20 06:21:22 +00:00
|
|
|
amount_reserve_percent = 1.0 + self._config.get('amount_reserve_percent',
|
2021-02-02 18:47:21 +00:00
|
|
|
DEFAULT_AMOUNT_RESERVE_PERCENT)
|
2021-04-15 00:19:30 +00:00
|
|
|
amount_reserve_percent = (
|
2021-07-18 03:58:54 +00:00
|
|
|
amount_reserve_percent / (1 - abs(stoploss)) if abs(stoploss) != 1 else 1.5
|
2021-04-15 20:38:00 +00:00
|
|
|
)
|
2021-02-02 18:47:21 +00:00
|
|
|
# it should not be more than 50%
|
2021-03-20 06:21:22 +00:00
|
|
|
amount_reserve_percent = max(min(amount_reserve_percent, 1.5), 1)
|
2021-02-02 18:47:21 +00:00
|
|
|
|
|
|
|
# The value returned should satisfy both limits: for amount (base currency) and
|
|
|
|
# for cost (quote, stake currency), so max() is used here.
|
|
|
|
# See also #2575 at github.
|
2022-01-31 08:31:44 +00:00
|
|
|
return self._get_stake_amount_considering_leverage(
|
2022-02-02 02:24:06 +00:00
|
|
|
max(stake_limits) * amount_reserve_percent,
|
2022-01-31 08:31:44 +00:00
|
|
|
leverage or 1.0
|
2022-02-07 08:47:18 +00:00
|
|
|
) if isMin else min(stake_limits)
|
2021-07-24 07:32:42 +00:00
|
|
|
|
2021-11-17 13:00:53 +00:00
|
|
|
def _get_stake_amount_considering_leverage(self, stake_amount: float, leverage: float):
|
|
|
|
"""
|
|
|
|
Takes the minimum stake amount for a pair with no leverage and returns the minimum
|
|
|
|
stake amount when leverage is considered
|
|
|
|
:param stake_amount: The stake amount for a pair before leverage is considered
|
|
|
|
:param leverage: The amount of leverage being used on the current trade
|
|
|
|
"""
|
|
|
|
return stake_amount / leverage
|
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
# Dry-run methods
|
|
|
|
|
2021-04-10 11:50:56 +00:00
|
|
|
def create_dry_run_order(self, pair: str, ordertype: str, side: str, amount: float,
|
2021-09-19 23:44:12 +00:00
|
|
|
rate: float, leverage: float, params: Dict = {}) -> Dict[str, Any]:
|
2020-08-13 12:13:58 +00:00
|
|
|
order_id = f'dry_run_{side}_{datetime.now().timestamp()}'
|
2022-01-30 01:48:51 +00:00
|
|
|
_amount = self.amount_to_precision(pair, amount)
|
2021-06-03 18:55:18 +00:00
|
|
|
dry_order: Dict[str, Any] = {
|
2020-08-13 12:13:58 +00:00
|
|
|
'id': order_id,
|
|
|
|
'symbol': pair,
|
2019-02-22 18:02:31 +00:00
|
|
|
'price': rate,
|
2020-07-20 17:39:12 +00:00
|
|
|
'average': rate,
|
2019-12-13 05:59:10 +00:00
|
|
|
'amount': _amount,
|
2020-04-30 17:56:48 +00:00
|
|
|
'cost': _amount * rate,
|
2019-02-22 18:02:31 +00:00
|
|
|
'type': ordertype,
|
2019-03-08 20:17:12 +00:00
|
|
|
'side': side,
|
2022-01-15 14:23:08 +00:00
|
|
|
'filled': 0,
|
2019-12-13 05:59:10 +00:00
|
|
|
'remaining': _amount,
|
2022-01-22 15:31:59 +00:00
|
|
|
'datetime': arrow.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
|
2021-07-05 17:51:14 +00:00
|
|
|
'timestamp': arrow.utcnow().int_timestamp * 1000,
|
2019-08-12 14:46:34 +00:00
|
|
|
'status': "closed" if ordertype == "market" else "open",
|
2019-02-23 15:03:15 +00:00
|
|
|
'fee': None,
|
2021-09-19 23:44:12 +00:00
|
|
|
'info': {},
|
|
|
|
'leverage': leverage
|
2019-02-22 18:02:31 +00:00
|
|
|
}
|
2021-06-03 18:55:18 +00:00
|
|
|
if dry_order["type"] in ["stop_loss_limit", "stop-loss-limit"]:
|
|
|
|
dry_order["info"] = {"stopPrice": dry_order["price"]}
|
|
|
|
|
|
|
|
if dry_order["type"] == "market":
|
|
|
|
# Update market order pricing
|
|
|
|
average = self.get_dry_market_fill_price(pair, side, amount, rate)
|
|
|
|
dry_order.update({
|
|
|
|
'average': average,
|
2022-01-15 14:23:08 +00:00
|
|
|
'filled': _amount,
|
2021-09-19 23:44:12 +00:00
|
|
|
'cost': (dry_order['amount'] * average) / leverage
|
2021-06-03 18:55:18 +00:00
|
|
|
})
|
2021-06-05 13:22:52 +00:00
|
|
|
dry_order = self.add_dry_order_fee(pair, dry_order)
|
|
|
|
|
|
|
|
dry_order = self.check_dry_limit_order_filled(dry_order)
|
2021-06-03 18:55:18 +00:00
|
|
|
|
|
|
|
self._dry_run_open_orders[dry_order["id"]] = dry_order
|
2019-09-01 15:48:06 +00:00
|
|
|
# Copy order and close it - so the returned order is open unless it's a market order
|
2019-02-22 18:02:31 +00:00
|
|
|
return dry_order
|
2018-04-06 07:57:08 +00:00
|
|
|
|
2021-06-05 13:22:52 +00:00
|
|
|
def add_dry_order_fee(self, pair: str, dry_order: Dict[str, Any]) -> Dict[str, Any]:
|
2021-06-03 18:55:18 +00:00
|
|
|
dry_order.update({
|
|
|
|
'fee': {
|
|
|
|
'currency': self.get_pair_quote_currency(pair),
|
|
|
|
'cost': dry_order['cost'] * self.get_fee(pair),
|
|
|
|
'rate': self.get_fee(pair)
|
|
|
|
}
|
|
|
|
})
|
2021-06-05 13:22:52 +00:00
|
|
|
return dry_order
|
2021-06-03 18:55:18 +00:00
|
|
|
|
|
|
|
def get_dry_market_fill_price(self, pair: str, side: str, amount: float, rate: float) -> float:
|
|
|
|
"""
|
|
|
|
Get the market order fill price based on orderbook interpolation
|
|
|
|
"""
|
|
|
|
if self.exchange_has('fetchL2OrderBook'):
|
|
|
|
ob = self.fetch_l2_order_book(pair, 20)
|
2021-06-04 04:44:51 +00:00
|
|
|
ob_type = 'asks' if side == 'buy' else 'bids'
|
2021-08-11 10:11:29 +00:00
|
|
|
slippage = 0.05
|
|
|
|
max_slippage_val = rate * ((1 + slippage) if side == 'buy' else (1 - slippage))
|
2021-06-03 18:55:18 +00:00
|
|
|
|
|
|
|
remaining_amount = amount
|
2022-01-06 07:48:30 +00:00
|
|
|
filled_amount = 0.0
|
|
|
|
book_entry_price = 0.0
|
2021-06-04 04:44:51 +00:00
|
|
|
for book_entry in ob[ob_type]:
|
2021-06-03 18:55:18 +00:00
|
|
|
book_entry_price = book_entry[0]
|
|
|
|
book_entry_coin_volume = book_entry[1]
|
|
|
|
if remaining_amount > 0:
|
2021-06-04 04:44:51 +00:00
|
|
|
if remaining_amount < book_entry_coin_volume:
|
2021-08-11 10:11:29 +00:00
|
|
|
# Orderbook at this slot bigger than remaining amount
|
2021-06-03 18:55:18 +00:00
|
|
|
filled_amount += remaining_amount * book_entry_price
|
2021-08-11 10:11:29 +00:00
|
|
|
break
|
2021-06-03 18:55:18 +00:00
|
|
|
else:
|
2021-06-04 04:44:51 +00:00
|
|
|
filled_amount += book_entry_coin_volume * book_entry_price
|
|
|
|
remaining_amount -= book_entry_coin_volume
|
2021-06-03 18:55:18 +00:00
|
|
|
else:
|
|
|
|
break
|
2021-06-04 04:44:51 +00:00
|
|
|
else:
|
|
|
|
# If remaining_amount wasn't consumed completely (break was not called)
|
|
|
|
filled_amount += remaining_amount * book_entry_price
|
2021-08-11 10:11:29 +00:00
|
|
|
forecast_avg_filled_price = max(filled_amount, 0) / amount
|
|
|
|
# Limit max. slippage to specified value
|
|
|
|
if side == 'buy':
|
|
|
|
forecast_avg_filled_price = min(forecast_avg_filled_price, max_slippage_val)
|
|
|
|
|
|
|
|
else:
|
|
|
|
forecast_avg_filled_price = max(forecast_avg_filled_price, max_slippage_val)
|
|
|
|
|
2021-06-03 18:55:18 +00:00
|
|
|
return self.price_to_precision(pair, forecast_avg_filled_price)
|
|
|
|
|
|
|
|
return rate
|
|
|
|
|
2021-06-05 13:22:52 +00:00
|
|
|
def _is_dry_limit_order_filled(self, pair: str, side: str, limit: float) -> bool:
|
2021-06-03 18:55:18 +00:00
|
|
|
if not self.exchange_has('fetchL2OrderBook'):
|
|
|
|
return True
|
|
|
|
ob = self.fetch_l2_order_book(pair, 1)
|
2021-12-02 18:17:00 +00:00
|
|
|
try:
|
|
|
|
if side == 'buy':
|
|
|
|
price = ob['asks'][0][0]
|
|
|
|
logger.debug(f"{pair} checking dry buy-order: price={price}, limit={limit}")
|
|
|
|
if limit >= price:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
price = ob['bids'][0][0]
|
|
|
|
logger.debug(f"{pair} checking dry sell-order: price={price}, limit={limit}")
|
|
|
|
if limit <= price:
|
|
|
|
return True
|
|
|
|
except IndexError:
|
|
|
|
# Ignore empty orderbooks when filling - can be filled with the next iteration.
|
|
|
|
pass
|
2021-06-03 18:55:18 +00:00
|
|
|
return False
|
2019-02-23 15:03:15 +00:00
|
|
|
|
2021-06-05 13:22:52 +00:00
|
|
|
def check_dry_limit_order_filled(self, order: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Check dry-run limit order fill and update fee (if it filled).
|
|
|
|
"""
|
|
|
|
if order['status'] != "closed" and order['type'] in ["limit"]:
|
|
|
|
pair = order['symbol']
|
|
|
|
if self._is_dry_limit_order_filled(pair, order['side'], order['price']):
|
|
|
|
order.update({
|
|
|
|
'status': 'closed',
|
|
|
|
'filled': order['amount'],
|
|
|
|
'remaining': 0,
|
|
|
|
})
|
|
|
|
self.add_dry_order_fee(pair, order)
|
|
|
|
|
|
|
|
return order
|
2019-02-23 15:03:15 +00:00
|
|
|
|
2021-06-02 09:06:32 +00:00
|
|
|
def fetch_dry_run_order(self, order_id) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Return dry-run order
|
|
|
|
Only call if running in dry-run mode.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
order = self._dry_run_open_orders[order_id]
|
2021-06-05 13:22:52 +00:00
|
|
|
order = self.check_dry_limit_order_filled(order)
|
2021-06-02 09:06:32 +00:00
|
|
|
return order
|
|
|
|
except KeyError as e:
|
|
|
|
# Gracefully handle errors with dry-run orders.
|
|
|
|
raise InvalidOrderException(
|
|
|
|
f'Tried to get an invalid dry-run-order (id: {order_id}). Message: {e}') from e
|
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
# Order handling
|
|
|
|
|
2022-02-02 06:28:57 +00:00
|
|
|
def _lev_prep(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
leverage: float,
|
|
|
|
side: str # buy or sell
|
|
|
|
):
|
2021-09-12 08:06:18 +00:00
|
|
|
if self.trading_mode != TradingMode.SPOT:
|
2022-02-01 18:53:38 +00:00
|
|
|
self.set_margin_mode(pair, self.margin_mode)
|
2021-09-12 08:42:13 +00:00
|
|
|
self._set_leverage(leverage, pair)
|
2021-07-25 01:30:34 +00:00
|
|
|
|
2022-02-02 04:23:05 +00:00
|
|
|
def _get_params(
|
|
|
|
self,
|
|
|
|
ordertype: str,
|
|
|
|
leverage: float,
|
|
|
|
reduceOnly: bool,
|
|
|
|
time_in_force: str = 'gtc',
|
|
|
|
) -> Dict:
|
2021-07-25 01:30:34 +00:00
|
|
|
params = self._params.copy()
|
|
|
|
if time_in_force != 'gtc' and ordertype != 'market':
|
2021-09-03 06:48:53 +00:00
|
|
|
param = self._ft_has.get('time_in_force_parameter', '')
|
|
|
|
params.update({param: time_in_force})
|
2022-02-02 04:23:05 +00:00
|
|
|
if reduceOnly:
|
|
|
|
params.update({'reduceOnly': True})
|
2021-09-19 23:44:12 +00:00
|
|
|
return params
|
|
|
|
|
2022-02-02 04:23:05 +00:00
|
|
|
def create_order(
|
|
|
|
self,
|
2022-02-04 00:28:08 +00:00
|
|
|
*,
|
2022-02-02 04:23:05 +00:00
|
|
|
pair: str,
|
|
|
|
ordertype: str,
|
|
|
|
side: str,
|
|
|
|
amount: float,
|
|
|
|
rate: float,
|
2022-02-02 19:15:42 +00:00
|
|
|
leverage: float,
|
2022-02-02 04:23:05 +00:00
|
|
|
reduceOnly: bool = False,
|
|
|
|
time_in_force: str = 'gtc',
|
|
|
|
) -> Dict:
|
2021-09-19 23:44:12 +00:00
|
|
|
if self._config['dry_run']:
|
|
|
|
dry_order = self.create_dry_run_order(pair, ordertype, side, amount, rate, leverage)
|
|
|
|
return dry_order
|
|
|
|
|
2022-02-02 04:23:05 +00:00
|
|
|
params = self._get_params(ordertype, leverage, reduceOnly, time_in_force)
|
2021-07-25 01:30:34 +00:00
|
|
|
|
2018-06-06 18:18:16 +00:00
|
|
|
try:
|
2018-07-30 16:49:58 +00:00
|
|
|
# Set the precision for amount and price(rate) as accepted by the exchange
|
2022-01-30 01:48:51 +00:00
|
|
|
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
2019-07-14 12:17:09 +00:00
|
|
|
needs_price = (ordertype != 'market'
|
|
|
|
or self._api.options.get("createMarketBuyOrderRequiresPrice", False))
|
2020-01-12 13:55:05 +00:00
|
|
|
rate_for_order = self.price_to_precision(pair, rate) if needs_price else None
|
2018-07-30 16:49:58 +00:00
|
|
|
|
2022-02-02 04:23:05 +00:00
|
|
|
if not reduceOnly:
|
2022-02-02 06:28:57 +00:00
|
|
|
self._lev_prep(pair, leverage, side)
|
2022-02-02 04:23:05 +00:00
|
|
|
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._api.create_order(
|
|
|
|
pair,
|
|
|
|
ordertype,
|
|
|
|
side,
|
|
|
|
amount,
|
|
|
|
rate_for_order,
|
2022-02-04 00:28:08 +00:00
|
|
|
params,
|
2021-10-04 05:13:34 +00:00
|
|
|
)
|
2021-06-10 18:09:25 +00:00
|
|
|
self._log_exchange_response('create_order', order)
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._order_contracts_to_amount(order)
|
2021-06-10 18:09:25 +00:00
|
|
|
return order
|
2018-11-25 20:38:11 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.InsufficientFunds as e:
|
2020-08-14 07:57:13 +00:00
|
|
|
raise InsufficientFundsError(
|
2020-06-18 18:00:18 +00:00
|
|
|
f'Insufficient funds to create {ordertype} {side} order on market {pair}. '
|
2019-08-18 13:46:10 +00:00
|
|
|
f'Tried to {side} amount {amount} at rate {rate}.'
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Message: {e}') from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.InvalidOrder as e:
|
2020-06-28 14:01:40 +00:00
|
|
|
raise ExchangeError(
|
2020-06-18 18:00:18 +00:00
|
|
|
f'Could not create {ordertype} {side} order on market {pair}. '
|
|
|
|
f'Tried to {side} amount {amount} at rate {rate}. '
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Message: {e}') from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2018-06-06 18:18:16 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Could not place {side} order due to {e.__class__.__name__}. Message: {e}') from e
|
2018-06-06 18:18:16 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(e) from e
|
2017-10-06 10:22:04 +00:00
|
|
|
|
2021-07-26 06:01:57 +00:00
|
|
|
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
|
2020-01-19 18:54:30 +00:00
|
|
|
"""
|
|
|
|
Verify stop_loss against stoploss-order value (limit or price)
|
|
|
|
Returns True if adjustment is necessary.
|
|
|
|
"""
|
|
|
|
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
|
|
|
|
2021-09-19 23:44:12 +00:00
|
|
|
def stoploss(self, pair: str, amount: float, stop_price: float,
|
|
|
|
order_types: Dict, side: str, leverage: float) -> Dict:
|
2018-11-25 09:54:36 +00:00
|
|
|
"""
|
2020-01-19 12:30:56 +00:00
|
|
|
creates a stoploss order.
|
|
|
|
The precise ordertype is determined by the order_types dict or exchange default.
|
2019-08-25 07:50:37 +00:00
|
|
|
Since ccxt does not unify stoploss-limit orders yet, this needs to be implemented in each
|
|
|
|
exchange's subclass.
|
|
|
|
The exception below should never raise, since we disallow
|
|
|
|
starting the bot in validate_ordertypes()
|
|
|
|
Note: Changes to this interface need to be applied to all sub-classes too.
|
2018-11-25 09:54:36 +00:00
|
|
|
"""
|
2018-11-22 15:24:40 +00:00
|
|
|
|
2020-01-19 13:39:51 +00:00
|
|
|
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
2018-11-26 17:46:59 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
|
|
|
|
def fetch_order(self, order_id: str, pair: str) -> Dict:
|
|
|
|
if self._config['dry_run']:
|
|
|
|
return self.fetch_dry_run_order(order_id)
|
|
|
|
try:
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._api.fetch_order(order_id, pair)
|
2021-06-10 18:09:25 +00:00
|
|
|
self._log_exchange_response('fetch_order', order)
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._order_contracts_to_amount(order)
|
2021-06-10 18:09:25 +00:00
|
|
|
return order
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.OrderNotFound as e:
|
|
|
|
raise RetryableOrderError(
|
|
|
|
f'Order not found (pair: {pair} id: {order_id}). Message: {e}') from e
|
|
|
|
except ccxt.InvalidOrder as e:
|
|
|
|
raise InvalidOrderException(
|
|
|
|
f'Tried to get an invalid order (pair: {pair} id: {order_id}). Message: {e}') from e
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get order due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
|
|
|
# Assign method to fetch_stoploss_order to allow easy overriding in other classes
|
|
|
|
fetch_stoploss_order = fetch_order
|
|
|
|
|
|
|
|
def fetch_order_or_stoploss_order(self, order_id: str, pair: str,
|
|
|
|
stoploss_order: bool = False) -> Dict:
|
|
|
|
"""
|
|
|
|
Simple wrapper calling either fetch_order or fetch_stoploss_order depending on
|
|
|
|
the stoploss_order parameter
|
2021-06-25 17:13:31 +00:00
|
|
|
:param order_id: OrderId to fetch order
|
|
|
|
:param pair: Pair corresponding to order_id
|
2021-06-02 09:17:50 +00:00
|
|
|
:param stoploss_order: If true, uses fetch_stoploss_order, otherwise fetch_order.
|
|
|
|
"""
|
|
|
|
if stoploss_order:
|
|
|
|
return self.fetch_stoploss_order(order_id, pair)
|
|
|
|
return self.fetch_order(order_id, pair)
|
|
|
|
|
|
|
|
def check_order_canceled_empty(self, order: Dict) -> bool:
|
|
|
|
"""
|
|
|
|
Verify if an order has been cancelled without being partially filled
|
|
|
|
:param order: Order dict as returned from fetch_order()
|
|
|
|
:return: True if order has been cancelled without being filled, False otherwise.
|
|
|
|
"""
|
2021-08-27 17:54:53 +00:00
|
|
|
return (order.get('status') in NON_OPEN_EXCHANGE_STATES
|
2021-06-02 09:17:50 +00:00
|
|
|
and order.get('filled') == 0.0)
|
|
|
|
|
|
|
|
@retrier
|
|
|
|
def cancel_order(self, order_id: str, pair: str) -> Dict:
|
|
|
|
if self._config['dry_run']:
|
|
|
|
try:
|
|
|
|
order = self.fetch_dry_run_order(order_id)
|
|
|
|
|
|
|
|
order.update({'status': 'canceled', 'filled': 0.0, 'remaining': order['amount']})
|
|
|
|
return order
|
|
|
|
except InvalidOrderException:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
try:
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._api.cancel_order(order_id, pair)
|
2021-06-10 18:09:25 +00:00
|
|
|
self._log_exchange_response('cancel_order', order)
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._order_contracts_to_amount(order)
|
2021-06-10 18:09:25 +00:00
|
|
|
return order
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.InvalidOrder as e:
|
|
|
|
raise InvalidOrderException(
|
|
|
|
f'Could not cancel order. Message: {e}') from e
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not cancel order due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
|
|
|
# Assign method to cancel_stoploss_order to allow easy overriding in other classes
|
|
|
|
cancel_stoploss_order = cancel_order
|
|
|
|
|
|
|
|
def is_cancel_order_result_suitable(self, corder) -> bool:
|
|
|
|
if not isinstance(corder, dict):
|
|
|
|
return False
|
|
|
|
|
|
|
|
required = ('fee', 'status', 'amount')
|
|
|
|
return all(k in corder for k in required)
|
|
|
|
|
|
|
|
def cancel_order_with_result(self, order_id: str, pair: str, amount: float) -> Dict:
|
|
|
|
"""
|
|
|
|
Cancel order returning a result.
|
|
|
|
Creates a fake result if cancel order returns a non-usable result
|
|
|
|
and fetch_order does not work (certain exchanges don't return cancelled orders)
|
|
|
|
:param order_id: Orderid to cancel
|
|
|
|
:param pair: Pair corresponding to order_id
|
|
|
|
:param amount: Amount to use for fake response
|
|
|
|
:return: Result from either cancel_order if usable, or fetch_order
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
corder = self.cancel_order(order_id, pair)
|
|
|
|
if self.is_cancel_order_result_suitable(corder):
|
|
|
|
return corder
|
|
|
|
except InvalidOrderException:
|
|
|
|
logger.warning(f"Could not cancel order {order_id} for {pair}.")
|
|
|
|
try:
|
|
|
|
order = self.fetch_order(order_id, pair)
|
|
|
|
except InvalidOrderException:
|
|
|
|
logger.warning(f"Could not fetch cancelled order {order_id}.")
|
|
|
|
order = {'fee': {}, 'status': 'canceled', 'amount': amount, 'info': {}}
|
|
|
|
|
|
|
|
return order
|
|
|
|
|
|
|
|
def cancel_stoploss_order_with_result(self, order_id: str, pair: str, amount: float) -> Dict:
|
|
|
|
"""
|
|
|
|
Cancel stoploss order returning a result.
|
|
|
|
Creates a fake result if cancel order returns a non-usable result
|
|
|
|
and fetch_order does not work (certain exchanges don't return cancelled orders)
|
|
|
|
:param order_id: stoploss-order-id to cancel
|
|
|
|
:param pair: Pair corresponding to order_id
|
|
|
|
:param amount: Amount to use for fake response
|
|
|
|
:return: Result from either cancel_order if usable, or fetch_order
|
|
|
|
"""
|
|
|
|
corder = self.cancel_stoploss_order(order_id, pair)
|
|
|
|
if self.is_cancel_order_result_suitable(corder):
|
|
|
|
return corder
|
|
|
|
try:
|
|
|
|
order = self.fetch_stoploss_order(order_id, pair)
|
|
|
|
except InvalidOrderException:
|
|
|
|
logger.warning(f"Could not fetch cancelled stoploss order {order_id}.")
|
|
|
|
order = {'fee': {}, 'status': 'canceled', 'amount': amount, 'info': {}}
|
|
|
|
|
|
|
|
return order
|
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
@retrier
|
|
|
|
def get_balances(self) -> dict:
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
try:
|
2018-06-18 20:07:15 +00:00
|
|
|
balances = self._api.fetch_balance()
|
2018-06-17 10:41:33 +00:00
|
|
|
# Remove additional info from ccxt results
|
|
|
|
balances.pop("info", None)
|
|
|
|
balances.pop("free", None)
|
|
|
|
balances.pop("total", None)
|
|
|
|
balances.pop("used", None)
|
|
|
|
|
2022-02-16 13:12:24 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
|
|
|
|
|
|
|
open_orders_response: List[dict] = self._api.fetch_open_orders()
|
|
|
|
open_orders: dict = {}
|
|
|
|
for order in open_orders_response:
|
|
|
|
symbol: str = order['symbol']
|
|
|
|
open_orders[symbol] = order
|
|
|
|
|
|
|
|
positions: List[dict] = self._api.fetch_positions()
|
|
|
|
for position in positions:
|
|
|
|
symbol = position['symbol']
|
|
|
|
market: dict = self.markets[symbol]
|
|
|
|
size: float = self._contracts_to_amount(symbol, position['contracts'])
|
|
|
|
side: str = position['side']
|
|
|
|
if size > 0:
|
|
|
|
|
|
|
|
if symbol in open_orders:
|
2022-02-16 13:26:23 +00:00
|
|
|
order = open_orders[symbol]
|
|
|
|
order_amount: float = order['remaining']
|
|
|
|
order_side: str = order['side']
|
|
|
|
if order_side == 'buy' or order_side == 'long':
|
|
|
|
order_amount = 0
|
2022-02-16 13:12:24 +00:00
|
|
|
else:
|
|
|
|
order_amount = 0
|
|
|
|
|
2022-02-16 13:16:21 +00:00
|
|
|
if side == 'long' or side == 'buy':
|
2022-02-16 13:12:24 +00:00
|
|
|
currency = market['base']
|
2022-02-16 13:26:23 +00:00
|
|
|
free = size - order_amount
|
|
|
|
|
|
|
|
balances[currency] = {
|
|
|
|
'free': free,
|
|
|
|
'used': order_amount,
|
|
|
|
'total': size,
|
|
|
|
}
|
|
|
|
balances['free'][currency] = free
|
|
|
|
balances['used'][currency] = order_amount
|
|
|
|
balances['total'][currency] = size
|
2022-02-16 13:12:24 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
return balances
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Could not get balance due to {e.__class__.__name__}. Message: {e}') from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(e) from e
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
@retrier
|
2022-01-28 06:20:47 +00:00
|
|
|
def get_tickers(self, symbols: List[str] = None, cached: bool = False) -> Dict:
|
2021-04-13 18:09:22 +00:00
|
|
|
"""
|
|
|
|
:param cached: Allow cached result
|
|
|
|
:return: fetch_tickers result
|
|
|
|
"""
|
|
|
|
if cached:
|
|
|
|
tickers = self._fetch_tickers_cache.get('fetch_tickers')
|
|
|
|
if tickers:
|
|
|
|
return tickers
|
2018-06-17 10:41:33 +00:00
|
|
|
try:
|
2022-01-28 06:20:47 +00:00
|
|
|
tickers = self._api.fetch_tickers(symbols)
|
2021-04-13 18:09:22 +00:00
|
|
|
self._fetch_tickers_cache['fetch_tickers'] = tickers
|
|
|
|
return tickers
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
2020-03-08 10:35:31 +00:00
|
|
|
f'Exchange {self._api.name} does not support fetching tickers in batch. '
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Message: {e}') from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
2019-07-25 18:06:20 +00:00
|
|
|
f'Could not load tickers due to {e.__class__.__name__}. Message: {e}') from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(e) from e
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
# Pricing info
|
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
@retrier
|
2020-02-22 10:03:25 +00:00
|
|
|
def fetch_ticker(self, pair: str) -> dict:
|
|
|
|
try:
|
2021-01-28 18:40:10 +00:00
|
|
|
if (pair not in self.markets or
|
|
|
|
self.markets[pair].get('active', False) is False):
|
2020-06-28 14:01:40 +00:00
|
|
|
raise ExchangeError(f"Pair {pair} not available")
|
2020-02-22 10:03:25 +00:00
|
|
|
data = self._api.fetch_ticker(pair)
|
|
|
|
return data
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2020-02-22 10:03:25 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not load ticker due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_next_limit_in_list(limit: int, limit_range: Optional[List[int]],
|
|
|
|
range_required: bool = True):
|
|
|
|
"""
|
|
|
|
Get next greater value in the list.
|
|
|
|
Used by fetch_l2_order_book if the api only supports a limited range
|
|
|
|
"""
|
|
|
|
if not limit_range:
|
|
|
|
return limit
|
|
|
|
|
|
|
|
result = min([x for x in limit_range if limit <= x] + [max(limit_range)])
|
|
|
|
if not range_required and limit > result:
|
|
|
|
# Range is not required - we can use None as parameter.
|
|
|
|
return None
|
|
|
|
return result
|
|
|
|
|
|
|
|
@retrier
|
|
|
|
def fetch_l2_order_book(self, pair: str, limit: int = 100) -> dict:
|
|
|
|
"""
|
|
|
|
Get L2 order book from exchange.
|
|
|
|
Can be limited to a certain amount (if supported).
|
|
|
|
Returns a dict in the format
|
|
|
|
{'asks': [price, volume], 'bids': [price, volume]}
|
|
|
|
"""
|
|
|
|
limit1 = self.get_next_limit_in_list(limit, self._ft_has['l2_limit_range'],
|
|
|
|
self._ft_has['l2_limit_range_required'])
|
|
|
|
try:
|
|
|
|
|
|
|
|
return self._api.fetch_l2_order_book(pair, limit1)
|
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
|
|
|
f'Exchange {self._api.name} does not support fetching order book.'
|
|
|
|
f'Message: {e}') from e
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get order book due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2021-07-19 17:37:52 +00:00
|
|
|
def get_rate(self, pair: str, refresh: bool, side: str) -> float:
|
2021-06-02 09:30:19 +00:00
|
|
|
"""
|
2021-07-18 06:00:18 +00:00
|
|
|
Calculates bid/ask target
|
|
|
|
bid rate - between current ask price and last price
|
|
|
|
ask rate - either using ticker bid or first bid based on orderbook
|
|
|
|
or remain static in any other case since it's not updating.
|
2021-06-02 09:30:19 +00:00
|
|
|
:param pair: Pair to get rate for
|
|
|
|
:param refresh: allow cached data
|
2021-07-18 03:58:54 +00:00
|
|
|
:param side: "buy" or "sell"
|
2021-06-02 09:30:19 +00:00
|
|
|
:return: float: Price
|
|
|
|
:raises PricingError if orderbook price could not be determined.
|
|
|
|
"""
|
2021-07-18 03:58:54 +00:00
|
|
|
cache_rate: TTLCache = self._buy_rate_cache if side == "buy" else self._sell_rate_cache
|
|
|
|
[strat_name, name] = ['bid_strategy', 'Buy'] if side == "buy" else ['ask_strategy', 'Sell']
|
|
|
|
|
2021-06-02 09:30:19 +00:00
|
|
|
if not refresh:
|
2021-07-18 03:58:54 +00:00
|
|
|
rate = cache_rate.get(pair)
|
2021-06-02 09:30:19 +00:00
|
|
|
# Check if cache has been invalidated
|
|
|
|
if rate:
|
2021-07-18 03:58:54 +00:00
|
|
|
logger.debug(f"Using cached {side} rate for {pair}.")
|
2021-06-02 09:30:19 +00:00
|
|
|
return rate
|
|
|
|
|
2021-07-19 17:37:52 +00:00
|
|
|
conf_strategy = self._config.get(strat_name, {})
|
2021-07-18 03:58:54 +00:00
|
|
|
|
2021-07-19 17:37:52 +00:00
|
|
|
if conf_strategy.get('use_order_book', False) and ('use_order_book' in conf_strategy):
|
2021-06-02 09:30:19 +00:00
|
|
|
|
2021-07-19 17:37:52 +00:00
|
|
|
order_book_top = conf_strategy.get('order_book_top', 1)
|
2021-06-02 09:30:19 +00:00
|
|
|
order_book = self.fetch_l2_order_book(pair, order_book_top)
|
|
|
|
logger.debug('order_book %s', order_book)
|
|
|
|
# top 1 = index 0
|
|
|
|
try:
|
2021-07-19 17:37:52 +00:00
|
|
|
rate = order_book[f"{conf_strategy['price_side']}s"][order_book_top - 1][0]
|
2021-06-02 09:39:18 +00:00
|
|
|
except (IndexError, KeyError) as e:
|
2021-06-25 18:36:39 +00:00
|
|
|
logger.warning(
|
2021-07-18 03:58:54 +00:00
|
|
|
f"{name} Price at location {order_book_top} from orderbook could not be "
|
2021-06-25 18:51:45 +00:00
|
|
|
f"determined. Orderbook: {order_book}"
|
2021-06-25 18:36:39 +00:00
|
|
|
)
|
2021-06-02 09:39:18 +00:00
|
|
|
raise PricingError from e
|
2021-07-25 22:13:04 +00:00
|
|
|
price_side = {conf_strategy['price_side'].capitalize()}
|
|
|
|
logger.debug(f"{name} price from orderbook {price_side}"
|
|
|
|
f"side - top {order_book_top} order book {side} rate {rate:.8f}")
|
2021-06-02 09:39:18 +00:00
|
|
|
else:
|
2021-07-25 22:13:04 +00:00
|
|
|
logger.debug(f"Using Last {conf_strategy['price_side'].capitalize()} / Last Price")
|
2021-06-02 09:39:18 +00:00
|
|
|
ticker = self.fetch_ticker(pair)
|
2021-07-19 17:37:52 +00:00
|
|
|
ticker_rate = ticker[conf_strategy['price_side']]
|
2021-08-25 05:13:09 +00:00
|
|
|
if ticker['last'] and ticker_rate:
|
2021-07-18 03:58:54 +00:00
|
|
|
if side == 'buy' and ticker_rate > ticker['last']:
|
2021-10-08 18:22:07 +00:00
|
|
|
balance = conf_strategy.get('ask_last_balance', 0.0)
|
2021-07-18 03:58:54 +00:00
|
|
|
ticker_rate = ticker_rate + balance * (ticker['last'] - ticker_rate)
|
|
|
|
elif side == 'sell' and ticker_rate < ticker['last']:
|
2021-07-19 17:37:52 +00:00
|
|
|
balance = conf_strategy.get('bid_last_balance', 0.0)
|
2021-07-18 03:58:54 +00:00
|
|
|
ticker_rate = ticker_rate - balance * (ticker_rate - ticker['last'])
|
2021-06-02 09:39:18 +00:00
|
|
|
rate = ticker_rate
|
|
|
|
|
2021-07-18 06:00:18 +00:00
|
|
|
if rate is None:
|
2021-07-18 03:58:54 +00:00
|
|
|
raise PricingError(f"{name}-Rate for {pair} was empty.")
|
|
|
|
cache_rate[pair] = rate
|
|
|
|
|
2021-06-02 09:39:18 +00:00
|
|
|
return rate
|
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
# Fee handling
|
|
|
|
|
|
|
|
@retrier
|
2021-12-25 13:28:22 +00:00
|
|
|
def get_trades_for_order(self, order_id: str, pair: str, since: datetime,
|
|
|
|
params: Optional[Dict] = None) -> List:
|
2021-06-02 09:20:26 +00:00
|
|
|
"""
|
|
|
|
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
|
|
|
The "since" argument passed in is coming from the database and is in UTC,
|
|
|
|
as timezone-native datetime object.
|
|
|
|
From the python documentation:
|
|
|
|
> Naive datetime instances are assumed to represent local time
|
|
|
|
Therefore, calling "since.timestamp()" will get the UTC timestamp, after applying the
|
|
|
|
transformation from local timezone to UTC.
|
|
|
|
This works for timezones UTC+ since then the result will contain trades from a few hours
|
|
|
|
instead of from the last 5 seconds, however fails for UTC- timezones,
|
|
|
|
since we're then asking for trades with a "since" argument in the future.
|
|
|
|
|
|
|
|
:param order_id order_id: Order-id as given when creating the order
|
|
|
|
:param pair: Pair the order is for
|
|
|
|
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
|
|
|
"""
|
|
|
|
if self._config['dry_run']:
|
|
|
|
return []
|
|
|
|
if not self.exchange_has('fetchMyTrades'):
|
|
|
|
return []
|
|
|
|
try:
|
|
|
|
# Allow 5s offset to catch slight time offsets (discovered in #1185)
|
|
|
|
# since needs to be int in milliseconds
|
2021-12-25 13:28:22 +00:00
|
|
|
_params = params if params else {}
|
2021-06-02 09:20:26 +00:00
|
|
|
my_trades = self._api.fetch_my_trades(
|
2022-01-02 12:11:29 +00:00
|
|
|
pair, int((since.replace(tzinfo=timezone.utc).timestamp() - 5) * 1000),
|
|
|
|
params=_params)
|
|
|
|
matched_trades = [trade for trade in my_trades if trade['order'] == order_id]
|
2021-06-02 09:20:26 +00:00
|
|
|
|
2021-06-10 18:09:25 +00:00
|
|
|
self._log_exchange_response('get_trades_for_order', matched_trades)
|
2021-12-25 13:38:17 +00:00
|
|
|
|
|
|
|
matched_trades = self._trades_contracts_to_amount(matched_trades)
|
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
return matched_trades
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get trades due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
|
|
|
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
|
|
|
return order['id']
|
|
|
|
|
|
|
|
@retrier
|
|
|
|
def get_fee(self, symbol: str, type: str = '', side: str = '', amount: float = 1,
|
|
|
|
price: float = 1, taker_or_maker: str = 'maker') -> float:
|
|
|
|
try:
|
|
|
|
if self._config['dry_run'] and self._config.get('fee', None) is not None:
|
|
|
|
return self._config['fee']
|
|
|
|
# validate that markets are loaded before trying to get fee
|
|
|
|
if self._api.markets is None or len(self._api.markets) == 0:
|
|
|
|
self._api.load_markets()
|
2021-12-25 13:38:17 +00:00
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
return self._api.calculate_fee(symbol=symbol, type=type, side=side, amount=amount,
|
|
|
|
price=price, takerOrMaker=taker_or_maker)['rate']
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get fee info due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def order_has_fee(order: Dict) -> bool:
|
|
|
|
"""
|
|
|
|
Verifies if the passed in order dict has the needed keys to extract fees,
|
|
|
|
and that these keys (currency, cost) are not empty.
|
|
|
|
:param order: Order or trade (one trade) dict
|
|
|
|
:return: True if the fee substructure contains currency and cost, false otherwise
|
|
|
|
"""
|
|
|
|
if not isinstance(order, dict):
|
|
|
|
return False
|
|
|
|
return ('fee' in order and order['fee'] is not None
|
|
|
|
and (order['fee'].keys() >= {'currency', 'cost'})
|
|
|
|
and order['fee']['currency'] is not None
|
|
|
|
and order['fee']['cost'] is not None
|
|
|
|
)
|
|
|
|
|
|
|
|
def calculate_fee_rate(self, order: Dict) -> Optional[float]:
|
|
|
|
"""
|
|
|
|
Calculate fee rate if it's not given by the exchange.
|
|
|
|
:param order: Order or trade (one trade) dict
|
|
|
|
"""
|
|
|
|
if order['fee'].get('rate') is not None:
|
|
|
|
return order['fee'].get('rate')
|
|
|
|
fee_curr = order['fee']['currency']
|
|
|
|
# Calculate fee based on order details
|
|
|
|
if fee_curr in self.get_pair_base_currency(order['symbol']):
|
|
|
|
# Base currency - divide by amount
|
|
|
|
return round(
|
|
|
|
order['fee']['cost'] / safe_value_fallback2(order, order, 'filled', 'amount'), 8)
|
|
|
|
elif fee_curr in self.get_pair_quote_currency(order['symbol']):
|
|
|
|
# Quote currency - divide by cost
|
|
|
|
return round(order['fee']['cost'] / order['cost'], 8) if order['cost'] else None
|
|
|
|
else:
|
|
|
|
# If Fee currency is a different currency
|
|
|
|
if not order['cost']:
|
|
|
|
# If cost is None or 0.0 -> falsy, return None
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
comb = self.get_valid_pair_combination(fee_curr, self._config['stake_currency'])
|
|
|
|
tick = self.fetch_ticker(comb)
|
|
|
|
|
|
|
|
fee_to_quote_rate = safe_value_fallback2(tick, tick, 'last', 'ask')
|
|
|
|
except ExchangeError:
|
2021-12-11 14:26:08 +00:00
|
|
|
fee_to_quote_rate = self._config['exchange'].get('unknown_fee_rate', None)
|
|
|
|
if not fee_to_quote_rate:
|
|
|
|
return None
|
|
|
|
return round((order['fee']['cost'] * fee_to_quote_rate) / order['cost'], 8)
|
2021-06-02 09:20:26 +00:00
|
|
|
|
|
|
|
def extract_cost_curr_rate(self, order: Dict) -> Tuple[float, str, Optional[float]]:
|
|
|
|
"""
|
|
|
|
Extract tuple of cost, currency, rate.
|
|
|
|
Requires order_has_fee to run first!
|
|
|
|
:param order: Order or trade (one trade) dict
|
|
|
|
:return: Tuple with cost, currency, rate of the given fee dict
|
|
|
|
"""
|
|
|
|
return (order['fee']['cost'],
|
|
|
|
order['fee']['currency'],
|
|
|
|
self.calculate_fee_rate(order))
|
|
|
|
|
|
|
|
# Historic data
|
|
|
|
|
2021-11-07 06:35:27 +00:00
|
|
|
def get_historic_ohlcv(self, pair: str, timeframe: str,
|
2021-12-03 13:11:24 +00:00
|
|
|
since_ms: int, candle_type: CandleType,
|
|
|
|
is_new_pair: bool = False) -> List:
|
2018-08-10 09:08:28 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Get candle history using asyncio and returns the list of candles.
|
|
|
|
Handles all async work for this.
|
2021-02-14 09:29:45 +00:00
|
|
|
Async over one pair, assuming we get `self.ohlcv_candle_limit()` candles per call.
|
2019-08-14 07:36:28 +00:00
|
|
|
:param pair: Pair to download
|
2020-03-08 10:35:31 +00:00
|
|
|
:param timeframe: Timeframe to get data for
|
2019-08-14 07:36:28 +00:00
|
|
|
:param since_ms: Timestamp in milliseconds to get history from
|
2021-11-27 08:55:42 +00:00
|
|
|
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
2020-11-21 14:28:50 +00:00
|
|
|
:return: List with candle (OHLCV) data
|
2018-08-10 09:08:28 +00:00
|
|
|
"""
|
2022-01-01 18:16:24 +00:00
|
|
|
pair, _, _, data = self.loop.run_until_complete(
|
2021-11-07 06:35:27 +00:00
|
|
|
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
|
|
|
since_ms=since_ms, is_new_pair=is_new_pair,
|
|
|
|
candle_type=candle_type))
|
2021-11-04 19:00:02 +00:00
|
|
|
logger.info(f"Downloaded data for {pair} with length {len(data)}.")
|
|
|
|
return data
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2021-11-07 06:35:27 +00:00
|
|
|
def get_historic_ohlcv_as_df(self, pair: str, timeframe: str,
|
2021-12-03 13:11:24 +00:00
|
|
|
since_ms: int, candle_type: CandleType) -> DataFrame:
|
2020-11-21 14:28:50 +00:00
|
|
|
"""
|
|
|
|
Minimal wrapper around get_historic_ohlcv - converting the result into a dataframe
|
2020-11-22 18:59:18 +00:00
|
|
|
:param pair: Pair to download
|
2020-11-21 14:28:50 +00:00
|
|
|
:param timeframe: Timeframe to get data for
|
|
|
|
:param since_ms: Timestamp in milliseconds to get history from
|
2021-12-03 13:11:24 +00:00
|
|
|
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
2020-11-21 14:28:50 +00:00
|
|
|
:return: OHLCV DataFrame
|
|
|
|
"""
|
2021-11-21 07:43:05 +00:00
|
|
|
ticks = self.get_historic_ohlcv(pair, timeframe, since_ms=since_ms, candle_type=candle_type)
|
2020-11-21 14:28:50 +00:00
|
|
|
return ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
|
2021-11-08 03:37:57 +00:00
|
|
|
drop_incomplete=self._ohlcv_partial_candle)
|
2020-11-21 14:28:50 +00:00
|
|
|
|
2021-11-07 06:35:27 +00:00
|
|
|
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
2021-12-03 13:11:24 +00:00
|
|
|
since_ms: int, candle_type: CandleType,
|
|
|
|
is_new_pair: bool = False, raise_: bool = False,
|
2021-11-21 07:43:05 +00:00
|
|
|
) -> Tuple[str, str, str, List]:
|
2020-10-23 05:45:11 +00:00
|
|
|
"""
|
|
|
|
Download historic ohlcv
|
2021-09-07 17:29:10 +00:00
|
|
|
:param is_new_pair: used by binance subclass to allow "fast" new pair downloading
|
2021-12-03 13:11:24 +00:00
|
|
|
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
2020-10-23 05:45:11 +00:00
|
|
|
"""
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2021-02-14 09:29:45 +00:00
|
|
|
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(timeframe)
|
2019-05-25 19:42:17 +00:00
|
|
|
logger.debug(
|
|
|
|
"one_call: %s msecs (%s)",
|
|
|
|
one_call,
|
|
|
|
arrow.utcnow().shift(seconds=one_call // 1000).humanize(only_distance=True)
|
|
|
|
)
|
2021-11-07 06:35:27 +00:00
|
|
|
input_coroutines = [self._async_get_candle_history(
|
2021-12-03 15:44:05 +00:00
|
|
|
pair, timeframe, candle_type, since) for since in
|
2020-10-12 17:58:04 +00:00
|
|
|
range(since_ms, arrow.utcnow().int_timestamp * 1000, one_call)]
|
2018-12-29 18:30:47 +00:00
|
|
|
|
2018-08-10 09:15:02 +00:00
|
|
|
data: List = []
|
2021-09-10 17:46:38 +00:00
|
|
|
# Chunk requests into batches of 100 to avoid overwelming ccxt Throttling
|
2021-09-10 05:52:48 +00:00
|
|
|
for input_coro in chunks(input_coroutines, 100):
|
|
|
|
|
|
|
|
results = await asyncio.gather(*input_coro, return_exceptions=True)
|
|
|
|
for res in results:
|
|
|
|
if isinstance(res, Exception):
|
2021-11-30 05:58:32 +00:00
|
|
|
logger.warning(f"Async code raised an exception: {repr(res)}")
|
2021-11-04 19:00:02 +00:00
|
|
|
if raise_:
|
|
|
|
raise
|
2021-09-10 05:52:48 +00:00
|
|
|
continue
|
2021-11-04 19:00:02 +00:00
|
|
|
else:
|
|
|
|
# Deconstruct tuple if it's not an exception
|
2021-11-21 07:43:05 +00:00
|
|
|
p, _, c, new_data = res
|
|
|
|
if p == pair and c == candle_type:
|
2021-11-04 19:00:02 +00:00
|
|
|
data.extend(new_data)
|
2018-12-29 13:18:49 +00:00
|
|
|
# Sort data again after extending the result - above calls return in "async order"
|
2018-08-18 19:08:59 +00:00
|
|
|
data = sorted(data, key=lambda x: x[0])
|
2021-11-21 07:43:05 +00:00
|
|
|
return pair, timeframe, candle_type, data
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2021-11-07 06:35:27 +00:00
|
|
|
def refresh_latest_ohlcv(self, pair_list: ListPairsWithTimeframes, *,
|
2021-12-10 06:14:41 +00:00
|
|
|
since_ms: Optional[int] = None, cache: bool = True,
|
|
|
|
drop_incomplete: bool = None
|
2021-12-03 13:11:24 +00:00
|
|
|
) -> Dict[PairWithTimeframe, DataFrame]:
|
2018-08-16 10:15:09 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Refresh in-memory OHLCV asynchronously and set `_klines` with the result
|
2019-08-15 04:37:26 +00:00
|
|
|
Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
|
2020-03-08 10:35:31 +00:00
|
|
|
Only used in the dataprovider.refresh() method.
|
2019-08-15 04:37:26 +00:00
|
|
|
:param pair_list: List of 2 element tuples containing pair, interval to refresh
|
2020-12-15 07:22:45 +00:00
|
|
|
:param since_ms: time since when to download, in milliseconds
|
|
|
|
:param cache: Assign result to _klines. Usefull for one-off downloads like for pairlists
|
2021-12-10 06:14:41 +00:00
|
|
|
:param drop_incomplete: Control candle dropping.
|
|
|
|
Specifying None defaults to _ohlcv_partial_candle
|
2020-12-15 07:22:45 +00:00
|
|
|
:return: Dict of [{(pair, timeframe): Dataframe}]
|
2018-08-16 10:15:09 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
logger.debug("Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
|
2021-12-10 06:14:41 +00:00
|
|
|
drop_incomplete = self._ohlcv_partial_candle if drop_incomplete is None else drop_incomplete
|
2018-12-11 06:11:43 +00:00
|
|
|
input_coroutines = []
|
2021-08-17 04:44:20 +00:00
|
|
|
cached_pairs = []
|
2019-02-20 21:46:35 +00:00
|
|
|
# Gather coroutines to run
|
2021-11-21 07:43:05 +00:00
|
|
|
for pair, timeframe, candle_type in set(pair_list):
|
2021-12-02 06:09:37 +00:00
|
|
|
if ((pair, timeframe, candle_type) not in self._klines or not cache
|
2021-12-03 13:11:24 +00:00
|
|
|
or self._now_is_time_to_refresh(pair, timeframe, candle_type)):
|
2021-11-07 12:10:40 +00:00
|
|
|
if not since_ms and self.required_candle_call_count > 1:
|
2021-11-04 19:00:02 +00:00
|
|
|
# Multiple calls for one pair - to get more history
|
|
|
|
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(timeframe)
|
2021-11-07 12:10:40 +00:00
|
|
|
move_to = one_call * self.required_candle_call_count
|
2021-11-04 19:00:02 +00:00
|
|
|
now = timeframe_to_next_date(timeframe)
|
|
|
|
since_ms = int((now - timedelta(seconds=move_to // 1000)).timestamp() * 1000)
|
|
|
|
|
|
|
|
if since_ms:
|
|
|
|
input_coroutines.append(self._async_get_historic_ohlcv(
|
2021-11-21 07:43:05 +00:00
|
|
|
pair, timeframe, since_ms=since_ms, raise_=True, candle_type=candle_type))
|
2021-11-04 19:00:02 +00:00
|
|
|
else:
|
|
|
|
# One call ... "regular" refresh
|
|
|
|
input_coroutines.append(self._async_get_candle_history(
|
2021-11-21 07:43:05 +00:00
|
|
|
pair, timeframe, since_ms=since_ms, candle_type=candle_type))
|
2018-12-11 06:11:43 +00:00
|
|
|
else:
|
2019-05-25 19:42:17 +00:00
|
|
|
logger.debug(
|
2022-01-28 14:52:12 +00:00
|
|
|
f"Using cached candle (OHLCV) data for {pair}, {timeframe}, {candle_type} ..."
|
2019-05-25 19:42:17 +00:00
|
|
|
)
|
2021-11-21 07:43:05 +00:00
|
|
|
cached_pairs.append((pair, timeframe, candle_type))
|
2018-12-11 06:11:43 +00:00
|
|
|
|
2020-12-15 07:22:45 +00:00
|
|
|
results_df = {}
|
2021-11-30 06:02:49 +00:00
|
|
|
# Chunk requests into batches of 100 to avoid overwelming ccxt Throttling
|
|
|
|
for input_coro in chunks(input_coroutines, 100):
|
2021-12-31 15:34:15 +00:00
|
|
|
async def gather_stuff():
|
|
|
|
return await asyncio.gather(*input_coro, return_exceptions=True)
|
|
|
|
|
|
|
|
results = self.loop.run_until_complete(gather_stuff())
|
2021-11-30 06:02:49 +00:00
|
|
|
|
|
|
|
for res in results:
|
|
|
|
if isinstance(res, Exception):
|
|
|
|
logger.warning(f"Async code raised an exception: {repr(res)}")
|
|
|
|
continue
|
2021-12-02 06:09:37 +00:00
|
|
|
# Deconstruct tuple (has 4 elements)
|
|
|
|
pair, timeframe, c_type, ticks = res
|
2021-11-30 06:02:49 +00:00
|
|
|
# keeping last candle time as last refreshed time of the pair
|
|
|
|
if ticks:
|
2021-12-02 06:09:37 +00:00
|
|
|
self._pairs_last_refresh_time[(pair, timeframe, c_type)] = ticks[-1][0] // 1000
|
2021-11-30 06:02:49 +00:00
|
|
|
# keeping parsed dataframe in cache
|
|
|
|
ohlcv_df = ohlcv_to_dataframe(
|
|
|
|
ticks, timeframe, pair=pair, fill_missing=True,
|
2021-12-10 06:14:41 +00:00
|
|
|
drop_incomplete=drop_incomplete)
|
2021-12-02 06:09:37 +00:00
|
|
|
results_df[(pair, timeframe, c_type)] = ohlcv_df
|
2021-11-30 06:02:49 +00:00
|
|
|
if cache:
|
2021-12-02 06:09:37 +00:00
|
|
|
self._klines[(pair, timeframe, c_type)] = ohlcv_df
|
2021-08-17 04:44:20 +00:00
|
|
|
# Return cached klines
|
2021-11-21 07:43:05 +00:00
|
|
|
for pair, timeframe, c_type in cached_pairs:
|
|
|
|
results_df[(pair, timeframe, c_type)] = self.klines(
|
|
|
|
(pair, timeframe, c_type),
|
|
|
|
copy=False
|
|
|
|
)
|
2021-08-17 04:44:20 +00:00
|
|
|
|
2020-12-15 07:22:45 +00:00
|
|
|
return results_df
|
2018-07-31 10:47:32 +00:00
|
|
|
|
2021-12-03 13:11:24 +00:00
|
|
|
def _now_is_time_to_refresh(self, pair: str, timeframe: str, candle_type: CandleType) -> bool:
|
2020-03-08 10:35:31 +00:00
|
|
|
# Timeframe in seconds
|
2019-11-02 19:25:18 +00:00
|
|
|
interval_in_sec = timeframe_to_seconds(timeframe)
|
2019-02-20 22:20:24 +00:00
|
|
|
|
2021-11-21 07:43:05 +00:00
|
|
|
return not (
|
|
|
|
(self._pairs_last_refresh_time.get(
|
|
|
|
(pair, timeframe, candle_type),
|
|
|
|
0
|
|
|
|
) + interval_in_sec) >= arrow.utcnow().int_timestamp
|
|
|
|
)
|
2019-02-20 22:20:24 +00:00
|
|
|
|
2018-08-18 19:05:38 +00:00
|
|
|
@retrier_async
|
2021-10-24 03:10:36 +00:00
|
|
|
async def _async_get_candle_history(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
2021-12-03 15:44:05 +00:00
|
|
|
candle_type: CandleType,
|
2021-10-24 03:10:36 +00:00
|
|
|
since_ms: Optional[int] = None,
|
2021-11-21 06:21:10 +00:00
|
|
|
) -> Tuple[str, str, str, List]:
|
2018-12-29 13:32:24 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Asynchronously get candle history data using fetch_ohlcv
|
2021-11-27 08:55:42 +00:00
|
|
|
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
2019-11-02 19:25:18 +00:00
|
|
|
returns tuple: (pair, timeframe, ohlcv_list)
|
2018-12-29 13:32:24 +00:00
|
|
|
"""
|
2018-07-31 10:47:32 +00:00
|
|
|
try:
|
2020-03-08 10:35:31 +00:00
|
|
|
# Fetch OHLCV asynchronously
|
2019-05-25 19:42:17 +00:00
|
|
|
s = '(' + arrow.get(since_ms // 1000).isoformat() + ') ' if since_ms is not None else ''
|
|
|
|
logger.debug(
|
|
|
|
"Fetching pair %s, interval %s, since %s %s...",
|
2019-11-02 19:25:18 +00:00
|
|
|
pair, timeframe, since_ms, s
|
2019-05-25 19:42:17 +00:00
|
|
|
)
|
2021-11-27 16:00:06 +00:00
|
|
|
params = deepcopy(self._ft_has.get('ohlcv_params', {}))
|
2021-12-08 13:35:15 +00:00
|
|
|
if candle_type != CandleType.SPOT:
|
2021-11-27 16:00:06 +00:00
|
|
|
params.update({'price': candle_type})
|
2021-12-10 18:50:58 +00:00
|
|
|
if candle_type != CandleType.FUNDING_RATE:
|
|
|
|
data = await self._api_async.fetch_ohlcv(
|
|
|
|
pair, timeframe=timeframe, since=since_ms,
|
|
|
|
limit=self.ohlcv_candle_limit(timeframe), params=params)
|
|
|
|
else:
|
|
|
|
# Funding rate
|
|
|
|
data = await self._api_async.fetch_funding_rate_history(
|
|
|
|
pair, since=since_ms,
|
|
|
|
limit=self.ohlcv_candle_limit(timeframe))
|
|
|
|
# Convert funding rate to candle pattern
|
|
|
|
data = [[x['timestamp'], x['fundingRate'], 0, 0, 0, 0] for x in data]
|
2020-03-08 10:35:31 +00:00
|
|
|
# Some exchanges sort OHLCV in ASC order and others in DESC.
|
|
|
|
# Ex: Bittrex returns the list of OHLCV in ASC order (oldest first, newest last)
|
|
|
|
# while GDAX returns the list of OHLCV in DESC order (newest first, oldest last)
|
2018-11-25 14:00:50 +00:00
|
|
|
# Only sort if necessary to save computing time
|
2019-01-19 19:02:37 +00:00
|
|
|
try:
|
|
|
|
if data and data[0][0] > data[-1][0]:
|
|
|
|
data = sorted(data, key=lambda x: x[0])
|
|
|
|
except IndexError:
|
|
|
|
logger.exception("Error loading %s. Result was %s.", pair, data)
|
2021-11-21 07:43:05 +00:00
|
|
|
return pair, timeframe, candle_type, []
|
2019-11-02 19:25:18 +00:00
|
|
|
logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe)
|
2021-11-21 07:43:05 +00:00
|
|
|
return pair, timeframe, candle_type, data
|
2018-07-31 10:47:32 +00:00
|
|
|
|
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
2020-03-08 10:35:31 +00:00
|
|
|
f'Exchange {self._api.name} does not support fetching historical '
|
|
|
|
f'candle (OHLCV) data. Message: {e}') from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2018-07-31 10:47:32 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
2020-03-08 10:35:31 +00:00
|
|
|
raise TemporaryError(f'Could not fetch historical candle (OHLCV) data '
|
|
|
|
f'for pair {pair} due to {e.__class__.__name__}. '
|
|
|
|
f'Message: {e}') from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.BaseError as e:
|
2020-03-08 10:35:31 +00:00
|
|
|
raise OperationalException(f'Could not fetch historical candle (OHLCV) data '
|
|
|
|
f'for pair {pair}. Message: {e}') from e
|
2017-10-31 23:12:18 +00:00
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
# Fetch historic trades
|
|
|
|
|
2019-08-14 18:30:29 +00:00
|
|
|
@retrier_async
|
|
|
|
async def _async_fetch_trades(self, pair: str,
|
|
|
|
since: Optional[int] = None,
|
2020-03-31 18:20:10 +00:00
|
|
|
params: Optional[dict] = None) -> List[List]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
Asyncronously gets trade history using fetch_trades.
|
2019-08-29 10:56:10 +00:00
|
|
|
Handles exchange errors, does one call to the exchange.
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to fetch trade data for
|
|
|
|
:param since: Since as integer timestamp in milliseconds
|
2019-10-19 08:05:30 +00:00
|
|
|
returns: List of dicts containing trades
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
# fetch trades asynchronously
|
|
|
|
if params:
|
|
|
|
logger.debug("Fetching trades for pair %s, params: %s ", pair, params)
|
2021-12-25 13:38:17 +00:00
|
|
|
trades = await self._api_async.fetch_trades(pair, params=params, limit=1000)
|
2019-08-14 18:30:29 +00:00
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"Fetching trades for pair %s, since %s %s...",
|
|
|
|
pair, since,
|
|
|
|
'(' + arrow.get(since // 1000).isoformat() + ') ' if since is not None else ''
|
|
|
|
)
|
2021-12-25 13:38:17 +00:00
|
|
|
trades = await self._api_async.fetch_trades(pair, since=since, limit=1000)
|
|
|
|
trades = self._trades_contracts_to_amount(trades)
|
2020-03-31 18:20:10 +00:00
|
|
|
return trades_dict_to_list(trades)
|
2019-08-14 18:30:29 +00:00
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
|
|
|
f'Exchange {self._api.name} does not support fetching historical trade data.'
|
|
|
|
f'Message: {e}') from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2019-08-14 18:30:29 +00:00
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(f'Could not load trade history due to {e.__class__.__name__}. '
|
|
|
|
f'Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(f'Could not fetch trade data. Msg: {e}') from e
|
|
|
|
|
|
|
|
async def _async_get_trade_history_id(self, pair: str,
|
2019-08-29 11:01:44 +00:00
|
|
|
until: int,
|
2019-08-14 18:30:29 +00:00
|
|
|
since: Optional[int] = None,
|
2020-03-31 18:20:10 +00:00
|
|
|
from_id: Optional[str] = None) -> Tuple[str, List[List]]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
Asyncronously gets trade history using fetch_trades
|
2019-09-28 08:56:43 +00:00
|
|
|
use this when exchange uses id-based iteration (check `self._trades_pagination`)
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to fetch trade data for
|
|
|
|
:param since: Since as integer timestamp in milliseconds
|
|
|
|
:param until: Until as integer timestamp in milliseconds
|
2019-08-16 08:51:04 +00:00
|
|
|
:param from_id: Download data starting with ID (if id is known). Ignores "since" if set.
|
2019-10-19 08:05:30 +00:00
|
|
|
returns tuple: (pair, trades-list)
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
trades: List[List] = []
|
2019-08-29 10:56:10 +00:00
|
|
|
|
|
|
|
if not from_id:
|
|
|
|
# Fetch first elements using timebased method to get an ID to paginate on
|
|
|
|
# Depending on the Exchange, this can introduce a drift at the start of the interval
|
|
|
|
# of up to an hour.
|
2019-08-29 11:13:41 +00:00
|
|
|
# e.g. Binance returns the "last 1000" candles within a 1h time interval
|
|
|
|
# - so we will miss the first trades.
|
2019-08-29 10:56:10 +00:00
|
|
|
t = await self._async_fetch_trades(pair, since=since)
|
2020-03-31 18:20:10 +00:00
|
|
|
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
|
|
|
# DEFAULT_TRADES_COLUMNS: 1 -> id
|
|
|
|
from_id = t[-1][1]
|
2019-08-29 10:56:10 +00:00
|
|
|
trades.extend(t[:-1])
|
|
|
|
while True:
|
|
|
|
t = await self._async_fetch_trades(pair,
|
|
|
|
params={self._trades_pagination_arg: from_id})
|
2021-03-21 11:44:32 +00:00
|
|
|
if t:
|
2019-08-29 10:56:10 +00:00
|
|
|
# Skip last id since its the key for the next call
|
2019-08-27 05:12:04 +00:00
|
|
|
trades.extend(t[:-1])
|
2020-03-31 18:20:10 +00:00
|
|
|
if from_id == t[-1][1] or t[-1][0] > until:
|
2019-08-29 10:56:10 +00:00
|
|
|
logger.debug(f"Stopping because from_id did not change. "
|
2020-03-31 18:20:10 +00:00
|
|
|
f"Reached {t[-1][0]} > {until}")
|
2019-09-28 11:35:25 +00:00
|
|
|
# Reached the end of the defined-download period - add last trade as well.
|
|
|
|
trades.extend(t[-1:])
|
2019-08-14 18:30:29 +00:00
|
|
|
break
|
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
from_id = t[-1][1]
|
2019-08-29 10:56:10 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
return (pair, trades)
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2019-08-29 10:56:10 +00:00
|
|
|
async def _async_get_trade_history_time(self, pair: str, until: int,
|
2020-03-31 18:20:10 +00:00
|
|
|
since: Optional[int] = None) -> Tuple[str, List[List]]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2019-08-27 05:13:32 +00:00
|
|
|
Asyncronously gets trade history using fetch_trades,
|
2019-09-28 08:56:43 +00:00
|
|
|
when the exchange uses time-based iteration (check `self._trades_pagination`)
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to fetch trade data for
|
|
|
|
:param since: Since as integer timestamp in milliseconds
|
|
|
|
:param until: Until as integer timestamp in milliseconds
|
2019-10-19 08:05:30 +00:00
|
|
|
returns tuple: (pair, trades-list)
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
|
2020-03-31 18:20:10 +00:00
|
|
|
trades: List[List] = []
|
|
|
|
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
|
|
|
# DEFAULT_TRADES_COLUMNS: 1 -> id
|
2019-08-29 10:56:10 +00:00
|
|
|
while True:
|
|
|
|
t = await self._async_fetch_trades(pair, since=since)
|
2021-03-21 11:44:32 +00:00
|
|
|
if t:
|
2021-01-07 19:03:34 +00:00
|
|
|
since = t[-1][0]
|
2019-08-29 10:56:10 +00:00
|
|
|
trades.extend(t)
|
|
|
|
# Reached the end of the defined-download period
|
2020-03-31 18:20:10 +00:00
|
|
|
if until and t[-1][0] > until:
|
2019-08-29 11:01:44 +00:00
|
|
|
logger.debug(
|
2020-03-31 18:20:10 +00:00
|
|
|
f"Stopping because until was reached. {t[-1][0]} > {until}")
|
2019-08-14 18:30:29 +00:00
|
|
|
break
|
2019-08-29 10:56:10 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
return (pair, trades)
|
|
|
|
|
|
|
|
async def _async_get_trade_history(self, pair: str,
|
|
|
|
since: Optional[int] = None,
|
|
|
|
until: Optional[int] = None,
|
2020-03-31 18:20:10 +00:00
|
|
|
from_id: Optional[str] = None) -> Tuple[str, List[List]]:
|
2019-08-29 10:56:10 +00:00
|
|
|
"""
|
|
|
|
Async wrapper handling downloading trades using either time or id based methods.
|
|
|
|
"""
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2020-05-31 10:05:58 +00:00
|
|
|
logger.debug(f"_async_get_trade_history(), pair: {pair}, "
|
|
|
|
f"since: {since}, until: {until}, from_id: {from_id}")
|
|
|
|
|
2020-06-02 15:43:37 +00:00
|
|
|
if until is None:
|
2020-06-02 18:09:23 +00:00
|
|
|
until = ccxt.Exchange.milliseconds()
|
|
|
|
logger.debug(f"Exchange milliseconds: {until}")
|
2020-05-31 10:05:58 +00:00
|
|
|
|
2019-09-28 08:52:53 +00:00
|
|
|
if self._trades_pagination == 'time':
|
|
|
|
return await self._async_get_trade_history_time(
|
2020-05-31 10:05:58 +00:00
|
|
|
pair=pair, since=since, until=until)
|
2019-09-28 08:52:53 +00:00
|
|
|
elif self._trades_pagination == 'id':
|
|
|
|
return await self._async_get_trade_history_id(
|
2020-05-31 10:05:58 +00:00
|
|
|
pair=pair, since=since, until=until, from_id=from_id
|
2019-09-28 08:52:53 +00:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise OperationalException(f"Exchange {self.name} does use neither time, "
|
|
|
|
f"nor id based pagination")
|
2019-08-14 18:30:29 +00:00
|
|
|
|
|
|
|
def get_historic_trades(self, pair: str,
|
|
|
|
since: Optional[int] = None,
|
2019-08-16 08:34:52 +00:00
|
|
|
until: Optional[int] = None,
|
2019-08-29 10:56:10 +00:00
|
|
|
from_id: Optional[str] = None) -> Tuple[str, List]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Get trade history data using asyncio.
|
|
|
|
Handles all async work and returns the list of candles.
|
2021-02-14 09:29:45 +00:00
|
|
|
Async over one pair, assuming we get `self.ohlcv_candle_limit()` candles per call.
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to download
|
2019-08-16 08:51:04 +00:00
|
|
|
:param since: Timestamp in milliseconds to get history from
|
|
|
|
:param until: Timestamp in milliseconds. Defaults to current timestamp if not defined.
|
2019-08-16 08:34:52 +00:00
|
|
|
:param from_id: Download data starting with ID (if id is known)
|
2020-03-08 10:35:31 +00:00
|
|
|
:returns List of trade data
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2019-10-19 08:05:30 +00:00
|
|
|
if not self.exchange_has("fetchTrades"):
|
2021-08-16 12:16:24 +00:00
|
|
|
raise OperationalException("This exchange does not support downloading Trades.")
|
2019-08-29 10:56:10 +00:00
|
|
|
|
2021-12-31 15:34:15 +00:00
|
|
|
return self.loop.run_until_complete(
|
2019-08-29 10:56:10 +00:00
|
|
|
self._async_get_trade_history(pair=pair, since=since,
|
|
|
|
until=until, from_id=from_id))
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2021-12-19 07:03:02 +00:00
|
|
|
@retrier
|
2021-11-08 07:50:50 +00:00
|
|
|
def _get_funding_fees_from_exchange(self, pair: str, since: Union[datetime, int]) -> float:
|
2021-09-08 19:46:52 +00:00
|
|
|
"""
|
2021-11-09 07:00:57 +00:00
|
|
|
Returns the sum of all funding fees that were exchanged for a pair within a timeframe
|
2021-11-12 06:26:59 +00:00
|
|
|
Dry-run handling happens as part of _calculate_funding_fees.
|
2021-11-09 07:00:57 +00:00
|
|
|
:param pair: (e.g. ADA/USDT)
|
|
|
|
:param since: The earliest time of consideration for calculating funding fees,
|
|
|
|
in unix time or as a datetime
|
2021-09-08 19:46:52 +00:00
|
|
|
"""
|
|
|
|
if not self.exchange_has("fetchFundingHistory"):
|
|
|
|
raise OperationalException(
|
2021-11-12 00:32:39 +00:00
|
|
|
f"fetch_funding_history() is not available using {self.name}"
|
|
|
|
)
|
2021-09-08 19:46:52 +00:00
|
|
|
|
|
|
|
if type(since) is datetime:
|
2021-09-30 04:18:15 +00:00
|
|
|
since = int(since.timestamp()) * 1000 # * 1000 for ms
|
2021-09-08 19:46:52 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
funding_history = self._api.fetch_funding_history(
|
2022-02-02 03:57:07 +00:00
|
|
|
symbol=pair,
|
2021-09-08 19:46:52 +00:00
|
|
|
since=since
|
|
|
|
)
|
|
|
|
return sum(fee['amount'] for fee in funding_history)
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not get funding fees due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2022-02-15 06:04:50 +00:00
|
|
|
@retrier
|
2022-02-07 08:01:00 +00:00
|
|
|
def load_leverage_tiers(self) -> Dict[str, List[Dict]]:
|
2022-02-13 11:48:28 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES and self.exchange_has('fetchLeverageTiers'):
|
2022-02-07 10:19:18 +00:00
|
|
|
try:
|
|
|
|
return self._api.fetch_leverage_tiers()
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not load leverage tiers due to {e.__class__.__name__}.'
|
|
|
|
f'Message: {e}'
|
|
|
|
) from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
else:
|
|
|
|
return {}
|
2022-02-07 07:33:42 +00:00
|
|
|
|
2022-02-07 08:01:00 +00:00
|
|
|
def fill_leverage_tiers(self) -> None:
|
2021-08-20 08:40:22 +00:00
|
|
|
"""
|
2022-02-07 08:01:00 +00:00
|
|
|
Assigns property _leverage_tiers to a dictionary of information about the leverage
|
2021-11-09 07:00:57 +00:00
|
|
|
allowed on each pair
|
2021-08-20 08:40:22 +00:00
|
|
|
"""
|
2022-02-07 10:19:18 +00:00
|
|
|
leverage_tiers = self.load_leverage_tiers()
|
|
|
|
for pair, tiers in leverage_tiers.items():
|
2022-02-07 10:43:19 +00:00
|
|
|
pair_tiers = []
|
2022-02-07 10:19:18 +00:00
|
|
|
for tier in tiers:
|
2022-02-07 10:43:19 +00:00
|
|
|
pair_tiers.append(self.parse_leverage_tier(tier))
|
|
|
|
self._leverage_tiers[pair] = pair_tiers
|
2022-02-07 07:33:42 +00:00
|
|
|
|
|
|
|
def parse_leverage_tier(self, tier) -> Dict:
|
2022-02-15 05:59:10 +00:00
|
|
|
info = tier.get('info', {})
|
2022-02-07 07:33:42 +00:00
|
|
|
return {
|
|
|
|
'min': tier['notionalFloor'],
|
|
|
|
'max': tier['notionalCap'],
|
2022-02-16 15:04:43 +00:00
|
|
|
'mmr': tier['maintenanceMarginRate'],
|
2022-02-07 07:33:42 +00:00
|
|
|
'lev': tier['maxLeverage'],
|
|
|
|
'maintAmt': float(info['cum']) if 'cum' in info else None,
|
|
|
|
}
|
2021-08-20 08:40:22 +00:00
|
|
|
|
2022-01-31 10:47:52 +00:00
|
|
|
def get_max_leverage(self, pair: str, stake_amount: Optional[float]) -> float:
|
2021-08-20 08:40:22 +00:00
|
|
|
"""
|
2021-11-09 07:00:57 +00:00
|
|
|
Returns the maximum leverage that a pair can be traded at
|
|
|
|
:param pair: The base/quote currency pair being traded
|
2022-02-07 07:33:42 +00:00
|
|
|
:stake_amount: The total value of the traders margin_mode in quote currency
|
2021-08-20 08:40:22 +00:00
|
|
|
"""
|
2022-02-07 07:33:42 +00:00
|
|
|
|
|
|
|
if self.trading_mode == TradingMode.SPOT:
|
2021-10-17 13:06:55 +00:00
|
|
|
return 1.0
|
2021-08-20 08:40:22 +00:00
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
2022-02-07 07:33:42 +00:00
|
|
|
|
|
|
|
# Checks and edge cases
|
|
|
|
if stake_amount is None:
|
|
|
|
raise OperationalException(
|
2022-02-10 09:44:22 +00:00
|
|
|
f'{self.name}.get_max_leverage requires argument stake_amount'
|
|
|
|
)
|
2022-02-09 09:18:32 +00:00
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
if pair not in self._leverage_tiers:
|
|
|
|
# Maybe raise exception because it can't be traded on futures?
|
2022-02-10 09:44:22 +00:00
|
|
|
return 1.0
|
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
pair_tiers = self._leverage_tiers[pair]
|
|
|
|
|
2022-02-10 10:06:02 +00:00
|
|
|
if stake_amount == 0:
|
|
|
|
return self._leverage_tiers[pair][0]['lev'] # Max lev for lowest amount
|
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
for tier_index in range(len(pair_tiers)):
|
2022-02-07 07:33:42 +00:00
|
|
|
|
2022-02-07 08:01:00 +00:00
|
|
|
tier = pair_tiers[tier_index]
|
|
|
|
lev = tier['lev']
|
2022-02-07 07:33:42 +00:00
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
if tier_index < len(pair_tiers) - 1:
|
2022-02-07 08:01:00 +00:00
|
|
|
next_tier = pair_tiers[tier_index+1]
|
|
|
|
next_floor = next_tier['min'] / next_tier['lev']
|
|
|
|
if next_floor > stake_amount: # Next tier min too high for stake amount
|
|
|
|
return min((tier['max'] / stake_amount), lev)
|
2022-02-07 07:33:42 +00:00
|
|
|
#
|
2022-02-07 08:01:00 +00:00
|
|
|
# With the two leverage tiers below,
|
2022-02-07 07:33:42 +00:00
|
|
|
# - a stake amount of 150 would mean a max leverage of (10000 / 150) = 66.66
|
|
|
|
# - stakes below 133.33 = max_lev of 75
|
|
|
|
# - stakes between 133.33-200 = max_lev of 10000/stake = 50.01-74.99
|
|
|
|
# - stakes from 200 + 1000 = max_lev of 50
|
|
|
|
#
|
|
|
|
# {
|
|
|
|
# "min": 0, # stake = 0.0
|
|
|
|
# "max": 10000, # max_stake@75 = 10000/75 = 133.33333333333334
|
|
|
|
# "lev": 75,
|
|
|
|
# },
|
|
|
|
# {
|
|
|
|
# "min": 10000, # stake = 200.0
|
|
|
|
# "max": 50000, # max_stake@50 = 50000/50 = 1000.0
|
|
|
|
# "lev": 50,
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
|
2022-02-07 08:01:00 +00:00
|
|
|
else: # if on the last tier
|
|
|
|
if stake_amount > tier['max']: # If stake is > than max tradeable amount
|
2022-02-07 07:33:42 +00:00
|
|
|
raise InvalidOrderException(f'Amount {stake_amount} too high for {pair}')
|
|
|
|
else:
|
2022-02-07 08:01:00 +00:00
|
|
|
return tier['lev']
|
2022-02-07 07:33:42 +00:00
|
|
|
|
|
|
|
raise OperationalException(
|
|
|
|
'Looped through all tiers without finding a max leverage. Should never be reached'
|
|
|
|
)
|
|
|
|
|
2022-02-13 03:59:26 +00:00
|
|
|
elif self.trading_mode == TradingMode.MARGIN: # Search markets.limits for max lev
|
2022-02-07 07:33:42 +00:00
|
|
|
market = self.markets[pair]
|
|
|
|
if market['limits']['leverage']['max'] is not None:
|
|
|
|
return market['limits']['leverage']['max']
|
|
|
|
else:
|
|
|
|
return 1.0 # Default if max leverage cannot be found
|
2022-02-13 03:59:26 +00:00
|
|
|
else:
|
|
|
|
return 1.0
|
2022-02-07 07:33:42 +00:00
|
|
|
|
2021-09-05 01:16:17 +00:00
|
|
|
@retrier
|
2021-09-12 08:42:13 +00:00
|
|
|
def _set_leverage(
|
|
|
|
self,
|
|
|
|
leverage: float,
|
2021-09-12 09:09:51 +00:00
|
|
|
pair: Optional[str] = None,
|
|
|
|
trading_mode: Optional[TradingMode] = None
|
2021-09-12 08:42:13 +00:00
|
|
|
):
|
2021-08-09 01:34:33 +00:00
|
|
|
"""
|
2021-11-09 07:00:57 +00:00
|
|
|
Set's the leverage before making a trade, in order to not
|
|
|
|
have the same leverage on every trade
|
2021-08-09 01:34:33 +00:00
|
|
|
"""
|
2021-09-19 23:44:12 +00:00
|
|
|
if self._config['dry_run'] or not self.exchange_has("setLeverage"):
|
2022-02-01 18:53:38 +00:00
|
|
|
# Some exchanges only support one margin_mode type
|
2021-09-10 19:39:42 +00:00
|
|
|
return
|
|
|
|
|
2021-08-21 22:26:04 +00:00
|
|
|
try:
|
2022-02-02 06:28:57 +00:00
|
|
|
self._api.set_leverage(symbol=pair, leverage=leverage)
|
2021-08-21 22:26:04 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not set leverage due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
2021-08-20 08:40:22 +00:00
|
|
|
|
2021-11-09 07:17:29 +00:00
|
|
|
def funding_fee_cutoff(self, open_date: datetime):
|
2021-11-09 18:40:42 +00:00
|
|
|
"""
|
2021-11-09 07:17:29 +00:00
|
|
|
:param open_date: The open date for a trade
|
2021-11-09 07:00:57 +00:00
|
|
|
:return: The cutoff open time for when a funding fee is charged
|
2021-11-09 18:40:42 +00:00
|
|
|
"""
|
2021-11-09 07:17:29 +00:00
|
|
|
return open_date.minute > 0 or open_date.second > 0
|
2021-11-01 07:09:11 +00:00
|
|
|
|
2021-12-19 07:03:02 +00:00
|
|
|
@retrier
|
2022-02-01 18:53:38 +00:00
|
|
|
def set_margin_mode(self, pair: str, margin_mode: MarginMode, params: dict = {}):
|
2021-11-09 18:22:29 +00:00
|
|
|
"""
|
2021-11-09 07:00:57 +00:00
|
|
|
Set's the margin mode on the exchange to cross or isolated for a specific pair
|
|
|
|
:param pair: base/quote currency pair (e.g. "ADA/USDT")
|
2021-11-09 18:22:29 +00:00
|
|
|
"""
|
2021-09-19 23:44:12 +00:00
|
|
|
if self._config['dry_run'] or not self.exchange_has("setMarginMode"):
|
2022-02-01 18:53:38 +00:00
|
|
|
# Some exchanges only support one margin_mode type
|
2021-09-04 00:11:39 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
2022-02-02 04:24:48 +00:00
|
|
|
self._api.set_margin_mode(margin_mode.value, pair, params)
|
2021-09-04 00:11:39 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
2021-09-04 01:00:04 +00:00
|
|
|
f'Could not set margin mode due to {e.__class__.__name__}. Message: {e}') from e
|
2021-09-04 00:11:39 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
2021-09-02 05:40:32 +00:00
|
|
|
|
2022-01-08 10:01:14 +00:00
|
|
|
def _fetch_and_calculate_funding_fees(
|
2021-09-26 10:11:35 +00:00
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
amount: float,
|
2022-01-17 18:59:33 +00:00
|
|
|
is_short: bool,
|
2021-09-26 10:11:35 +00:00
|
|
|
open_date: datetime,
|
2021-11-01 13:52:40 +00:00
|
|
|
close_date: Optional[datetime] = None
|
2021-09-26 10:11:35 +00:00
|
|
|
) -> float:
|
|
|
|
"""
|
2022-01-08 10:01:14 +00:00
|
|
|
Fetches and calculates the sum of all funding fees that occurred for a pair
|
|
|
|
during a futures trade.
|
2021-11-11 19:34:45 +00:00
|
|
|
Only used during dry-run or if the exchange does not provide a funding_rates endpoint.
|
2021-11-09 07:00:57 +00:00
|
|
|
:param pair: The quote/base pair of the trade
|
|
|
|
:param amount: The quantity of the trade
|
2022-01-17 18:59:33 +00:00
|
|
|
:param is_short: trade direction
|
2021-11-09 07:00:57 +00:00
|
|
|
:param open_date: The date and time that the trade started
|
|
|
|
:param close_date: The date and time that the trade ended
|
2021-09-26 10:11:35 +00:00
|
|
|
"""
|
|
|
|
|
2021-11-13 10:45:23 +00:00
|
|
|
if self.funding_fee_cutoff(open_date):
|
|
|
|
open_date += timedelta(hours=1)
|
2021-12-18 14:32:02 +00:00
|
|
|
timeframe = self._ft_has['mark_ohlcv_timeframe']
|
2021-12-19 13:48:59 +00:00
|
|
|
timeframe_ff = self._ft_has.get('funding_fee_timeframe',
|
|
|
|
self._ft_has['mark_ohlcv_timeframe'])
|
2021-12-18 14:32:02 +00:00
|
|
|
open_date = timeframe_to_prev_date(timeframe, open_date)
|
2021-11-13 10:45:23 +00:00
|
|
|
|
2021-11-01 07:13:37 +00:00
|
|
|
if not close_date:
|
|
|
|
close_date = datetime.now(timezone.utc)
|
2021-11-07 04:05:38 +00:00
|
|
|
open_timestamp = int(open_date.timestamp()) * 1000
|
|
|
|
# close_timestamp = int(close_date.timestamp()) * 1000
|
2021-12-10 18:50:58 +00:00
|
|
|
|
|
|
|
mark_comb: PairWithTimeframe = (
|
2021-12-18 14:32:02 +00:00
|
|
|
pair, timeframe, CandleType.from_string(self._ft_has["mark_ohlcv_price"]))
|
2021-12-11 08:49:48 +00:00
|
|
|
|
2021-12-19 13:48:59 +00:00
|
|
|
funding_comb: PairWithTimeframe = (pair, timeframe_ff, CandleType.FUNDING_RATE)
|
2021-12-10 18:50:58 +00:00
|
|
|
candle_histories = self.refresh_latest_ohlcv(
|
|
|
|
[mark_comb, funding_comb],
|
|
|
|
since_ms=open_timestamp,
|
|
|
|
cache=False,
|
|
|
|
drop_incomplete=False,
|
2021-10-22 15:35:50 +00:00
|
|
|
)
|
2021-12-10 18:50:58 +00:00
|
|
|
funding_rates = candle_histories[funding_comb]
|
|
|
|
mark_rates = candle_histories[mark_comb]
|
2022-01-17 18:39:58 +00:00
|
|
|
funding_mark_rates = self.combine_funding_and_mark(
|
|
|
|
funding_rates=funding_rates, mark_rates=mark_rates)
|
2021-12-10 18:50:58 +00:00
|
|
|
|
2022-01-17 18:26:03 +00:00
|
|
|
return self.calculate_funding_fees(
|
2022-01-17 18:39:58 +00:00
|
|
|
funding_mark_rates,
|
2022-01-08 10:01:14 +00:00
|
|
|
amount=amount,
|
2022-01-17 18:59:33 +00:00
|
|
|
is_short=is_short,
|
2022-01-08 10:01:14 +00:00
|
|
|
open_date=open_date,
|
|
|
|
close_date=close_date
|
|
|
|
)
|
|
|
|
|
2022-01-17 18:39:58 +00:00
|
|
|
@staticmethod
|
|
|
|
def combine_funding_and_mark(funding_rates: DataFrame, mark_rates: DataFrame) -> DataFrame:
|
|
|
|
"""
|
|
|
|
Combine funding-rates and mark-rates dataframes
|
|
|
|
:param funding_rates: Dataframe containing Funding rates (Type FUNDING_RATE)
|
|
|
|
:param mark_rates: Dataframe containing Mark rates (Type mark_ohlcv_price)
|
|
|
|
"""
|
|
|
|
|
|
|
|
return funding_rates.merge(mark_rates, on='date', how="inner", suffixes=["_fund", "_mark"])
|
|
|
|
|
2022-01-17 18:26:03 +00:00
|
|
|
def calculate_funding_fees(
|
2022-01-08 10:01:14 +00:00
|
|
|
self,
|
2022-01-17 18:39:58 +00:00
|
|
|
df: DataFrame,
|
2022-01-08 10:01:14 +00:00
|
|
|
amount: float,
|
2022-01-17 18:59:33 +00:00
|
|
|
is_short: bool,
|
2022-01-08 10:01:14 +00:00
|
|
|
open_date: datetime,
|
2022-01-08 10:16:56 +00:00
|
|
|
close_date: Optional[datetime] = None,
|
|
|
|
time_in_ratio: Optional[float] = None
|
2022-01-08 10:01:14 +00:00
|
|
|
) -> float:
|
|
|
|
"""
|
|
|
|
calculates the sum of all funding fees that occurred for a pair during a futures trade
|
2022-01-17 18:39:58 +00:00
|
|
|
:param df: Dataframe containing combined funding and mark rates
|
|
|
|
as `open_fund` and `open_mark`.
|
2022-01-08 10:01:14 +00:00
|
|
|
:param amount: The quantity of the trade
|
2022-01-17 18:59:33 +00:00
|
|
|
:param is_short: trade direction
|
2022-01-08 10:01:14 +00:00
|
|
|
:param open_date: The date and time that the trade started
|
|
|
|
:param close_date: The date and time that the trade ended
|
2022-01-08 10:16:56 +00:00
|
|
|
:param time_in_ratio: Not used by most exchange classes
|
2022-01-08 10:01:14 +00:00
|
|
|
"""
|
|
|
|
fees: float = 0
|
2021-12-10 18:50:58 +00:00
|
|
|
|
2021-12-27 23:51:47 +00:00
|
|
|
if not df.empty:
|
|
|
|
df = df[(df['date'] >= open_date) & (df['date'] <= close_date)]
|
|
|
|
fees = sum(df['open_fund'] * df['open_mark'] * amount)
|
2021-09-26 10:11:35 +00:00
|
|
|
|
2022-01-17 18:59:33 +00:00
|
|
|
# Negate fees for longs as funding_fees expects it this way based on live endpoints.
|
|
|
|
return fees if is_short else -fees
|
2021-09-26 10:11:35 +00:00
|
|
|
|
2022-01-17 18:59:33 +00:00
|
|
|
def get_funding_fees(
|
|
|
|
self, pair: str, amount: float, is_short: bool, open_date: datetime) -> float:
|
2021-11-09 18:40:42 +00:00
|
|
|
"""
|
|
|
|
Fetch funding fees, either from the exchange (live) or calculates them
|
|
|
|
based on funding rate/mark price history
|
|
|
|
:param pair: The quote/base pair of the trade
|
2022-01-17 18:59:33 +00:00
|
|
|
:param is_short: trade direction
|
2021-11-09 18:40:42 +00:00
|
|
|
:param amount: Trade amount
|
|
|
|
:param open_date: Open date of the trade
|
|
|
|
"""
|
2021-11-12 01:02:07 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
|
|
|
if self._config['dry_run']:
|
2022-01-17 18:59:33 +00:00
|
|
|
funding_fees = self._fetch_and_calculate_funding_fees(
|
|
|
|
pair, amount, is_short, open_date)
|
2021-11-12 01:02:07 +00:00
|
|
|
else:
|
|
|
|
funding_fees = self._get_funding_fees_from_exchange(pair, open_date)
|
|
|
|
return funding_fees
|
2021-11-08 07:50:50 +00:00
|
|
|
else:
|
2021-11-12 01:02:07 +00:00
|
|
|
return 0.0
|
2021-11-08 07:50:50 +00:00
|
|
|
|
2021-11-05 20:25:22 +00:00
|
|
|
@retrier
|
2022-01-30 00:47:17 +00:00
|
|
|
def get_liquidation_price(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
# Dry-run
|
2022-01-31 19:49:06 +00:00
|
|
|
open_rate: float, # Entry price of position
|
|
|
|
is_short: bool,
|
|
|
|
position: float, # Absolute value of position size
|
|
|
|
wallet_balance: float, # Or margin balance
|
2022-01-30 00:47:17 +00:00
|
|
|
mm_ex_1: float = 0.0, # (Binance) Cross only
|
|
|
|
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
2022-01-31 19:51:26 +00:00
|
|
|
) -> Optional[float]:
|
2022-01-14 11:49:46 +00:00
|
|
|
"""
|
|
|
|
Set's the margin mode on the exchange to cross or isolated for a specific pair
|
|
|
|
:param pair: base/quote currency pair (e.g. "ADA/USDT")
|
|
|
|
"""
|
2022-01-30 00:47:17 +00:00
|
|
|
if self.trading_mode == TradingMode.SPOT:
|
|
|
|
return None
|
2022-02-01 18:53:38 +00:00
|
|
|
elif (self.margin_mode is None):
|
|
|
|
raise OperationalException(f'{self.name}.margin_mode must be set for liquidation_price')
|
|
|
|
elif (self.trading_mode != TradingMode.FUTURES and self.margin_mode != MarginMode.ISOLATED):
|
2022-01-30 00:47:17 +00:00
|
|
|
raise OperationalException(
|
2022-02-01 18:53:38 +00:00
|
|
|
f"{self.name} does not support {self.margin_mode.value} {self.trading_mode.value}")
|
2022-01-30 00:47:17 +00:00
|
|
|
|
2021-11-05 20:25:22 +00:00
|
|
|
if self._config['dry_run'] or not self.exchange_has("fetchPositions"):
|
2022-01-30 00:47:17 +00:00
|
|
|
|
2022-02-11 03:14:07 +00:00
|
|
|
isolated_liq = self.dry_run_liquidation_price(
|
2022-01-30 00:47:17 +00:00
|
|
|
pair=pair,
|
|
|
|
open_rate=open_rate,
|
|
|
|
is_short=is_short,
|
|
|
|
position=position,
|
|
|
|
wallet_balance=wallet_balance,
|
|
|
|
mm_ex_1=mm_ex_1,
|
|
|
|
upnl_ex_1=upnl_ex_1
|
|
|
|
)
|
2022-02-11 09:48:09 +00:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
positions = self._api.fetch_positions([pair])
|
|
|
|
if len(positions) > 0:
|
|
|
|
pos = positions[0]
|
|
|
|
isolated_liq = pos['liquidationPrice']
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f'Could not set margin mode due to {e.__class__.__name__}. Message: {e}') from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
2021-11-05 20:25:22 +00:00
|
|
|
|
2022-02-11 03:14:07 +00:00
|
|
|
if isolated_liq:
|
|
|
|
buffer_amount = abs(open_rate - isolated_liq) * self.liquidation_buffer
|
|
|
|
isolated_liq = (
|
|
|
|
isolated_liq - buffer_amount
|
|
|
|
if is_short else
|
|
|
|
isolated_liq + buffer_amount
|
|
|
|
)
|
|
|
|
return isolated_liq
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2022-01-30 00:47:17 +00:00
|
|
|
def dry_run_liquidation_price(
|
2022-01-23 02:03:38 +00:00
|
|
|
self,
|
2022-01-30 00:47:17 +00:00
|
|
|
pair: str,
|
2022-01-23 02:03:38 +00:00
|
|
|
open_rate: float, # Entry price of position
|
|
|
|
is_short: bool,
|
2022-01-28 11:06:06 +00:00
|
|
|
position: float, # Absolute value of position size
|
2022-01-29 08:06:56 +00:00
|
|
|
wallet_balance: float, # Or margin balance
|
2022-01-30 00:47:17 +00:00
|
|
|
mm_ex_1: float = 0.0, # (Binance) Cross only
|
|
|
|
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
2022-01-23 02:03:38 +00:00
|
|
|
) -> Optional[float]:
|
|
|
|
"""
|
2022-01-29 08:06:56 +00:00
|
|
|
PERPETUAL:
|
|
|
|
gateio: https://www.gate.io/help/futures/perpetual/22160/calculation-of-liquidation-price
|
|
|
|
okex: https://www.okex.com/support/hc/en-us/articles/
|
|
|
|
360053909592-VI-Introduction-to-the-isolated-mode-of-Single-Multi-currency-Portfolio-margin
|
|
|
|
|
2022-01-23 02:03:38 +00:00
|
|
|
:param exchange_name:
|
2022-01-29 08:06:56 +00:00
|
|
|
:param open_rate: Entry price of position
|
2022-01-23 02:03:38 +00:00
|
|
|
:param is_short: True if the trade is a short, false otherwise
|
2022-02-01 05:37:13 +00:00
|
|
|
:param position: Absolute value of position size incl. leverage (in base currency)
|
2022-01-28 11:47:19 +00:00
|
|
|
:param trading_mode: SPOT, MARGIN, FUTURES, etc.
|
2022-02-01 18:53:38 +00:00
|
|
|
:param margin_mode: Either ISOLATED or CROSS
|
|
|
|
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
2022-01-23 02:03:38 +00:00
|
|
|
Cross-Margin Mode: crossWalletBalance
|
|
|
|
Isolated-Margin Mode: isolatedWalletBalance
|
|
|
|
|
2022-01-29 08:06:56 +00:00
|
|
|
# * Not required by Gateio or OKX
|
|
|
|
:param mm_ex_1:
|
|
|
|
:param upnl_ex_1:
|
2022-01-23 02:03:38 +00:00
|
|
|
"""
|
|
|
|
|
2022-01-30 00:47:17 +00:00
|
|
|
market = self.markets[pair]
|
|
|
|
taker_fee_rate = market['taker']
|
|
|
|
mm_ratio, _ = self.get_maintenance_ratio_and_amt(pair, position)
|
2022-01-23 02:03:38 +00:00
|
|
|
|
2022-02-01 18:53:38 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES and self.margin_mode == MarginMode.ISOLATED:
|
2022-01-30 00:47:17 +00:00
|
|
|
|
|
|
|
if market['inverse']:
|
|
|
|
raise OperationalException(
|
|
|
|
"Freqtrade does not yet support inverse contracts")
|
2022-01-29 03:55:17 +00:00
|
|
|
|
2022-01-29 08:06:56 +00:00
|
|
|
value = wallet_balance / position
|
2022-01-29 03:55:17 +00:00
|
|
|
|
2022-01-29 08:06:56 +00:00
|
|
|
mm_ratio_taker = (mm_ratio + taker_fee_rate)
|
|
|
|
if is_short:
|
|
|
|
return (open_rate + value) / (1 + mm_ratio_taker)
|
2022-01-29 03:55:17 +00:00
|
|
|
else:
|
2022-01-29 08:06:56 +00:00
|
|
|
return (open_rate - value) / (1 - mm_ratio_taker)
|
|
|
|
else:
|
|
|
|
raise OperationalException(
|
2022-01-30 00:47:17 +00:00
|
|
|
"Freqtrade only supports isolated futures for leverage trading")
|
2022-01-23 02:03:38 +00:00
|
|
|
|
2022-02-09 09:38:25 +00:00
|
|
|
def get_maintenance_ratio_and_amt(
|
|
|
|
self,
|
|
|
|
pair: str,
|
2022-02-10 11:04:29 +00:00
|
|
|
nominal_value: float = 0.0,
|
2022-02-09 09:38:25 +00:00
|
|
|
) -> Tuple[float, Optional[float]]:
|
2022-02-06 01:32:46 +00:00
|
|
|
"""
|
|
|
|
:param pair: Market symbol
|
|
|
|
:param nominal_value: The total trade amount in quote currency including leverage
|
|
|
|
maintenance amount only on Binance
|
|
|
|
:return: (maintenance margin ratio, maintenance amount)
|
|
|
|
"""
|
2022-02-09 10:57:43 +00:00
|
|
|
|
2022-02-13 11:48:28 +00:00
|
|
|
if self.exchange_has('fetchLeverageTiers'):
|
2022-02-10 12:10:15 +00:00
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
if pair not in self._leverage_tiers:
|
2022-02-10 12:10:15 +00:00
|
|
|
raise InvalidOrderException(
|
|
|
|
f"Maintenance margin rate for {pair} is unavailable for {self.name}"
|
|
|
|
)
|
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
pair_tiers = self._leverage_tiers[pair]
|
|
|
|
|
2022-02-07 08:01:00 +00:00
|
|
|
for tier in reversed(pair_tiers):
|
|
|
|
if nominal_value >= tier['min']:
|
|
|
|
return (tier['mmr'], tier['maintAmt'])
|
2022-02-10 12:10:15 +00:00
|
|
|
|
2022-02-06 01:32:46 +00:00
|
|
|
raise OperationalException("nominal value can not be lower than 0")
|
2022-02-06 04:36:28 +00:00
|
|
|
# The lowest notional_floor for any pair in fetch_leverage_tiers is always 0 because it
|
2022-02-07 08:01:00 +00:00
|
|
|
# describes the min amt for a tier, and the lowest tier will always go down to 0
|
2022-02-06 01:32:46 +00:00
|
|
|
else:
|
2022-02-11 12:38:57 +00:00
|
|
|
raise OperationalException(f"Cannot get maintenance ratio using {self.name}")
|
2022-02-06 01:32:46 +00:00
|
|
|
|
2019-04-09 09:27:35 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def is_exchange_known_ccxt(exchange_name: str, ccxt_module: CcxtModuleType = None) -> bool:
|
2019-09-30 21:33:33 +00:00
|
|
|
return exchange_name in ccxt_exchanges(ccxt_module)
|
2019-06-11 10:18:35 +00:00
|
|
|
|
|
|
|
|
2019-08-22 17:01:41 +00:00
|
|
|
def is_exchange_officially_supported(exchange_name: str) -> bool:
|
2022-02-08 18:45:39 +00:00
|
|
|
return exchange_name in ['bittrex', 'binance', 'kraken', 'ftx', 'gateio', 'okx']
|
2019-04-09 09:27:35 +00:00
|
|
|
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def ccxt_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
2019-09-30 21:33:33 +00:00
|
|
|
"""
|
|
|
|
Return the list of all exchanges known to ccxt
|
|
|
|
"""
|
2019-04-10 21:07:27 +00:00
|
|
|
return ccxt_module.exchanges if ccxt_module is not None else ccxt.exchanges
|
2019-04-09 09:27:35 +00:00
|
|
|
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def available_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
2019-09-30 21:33:33 +00:00
|
|
|
"""
|
|
|
|
Return exchanges available to the bot, i.e. non-bad exchanges in the ccxt list
|
|
|
|
"""
|
|
|
|
exchanges = ccxt_exchanges(ccxt_module)
|
2021-04-06 18:16:29 +00:00
|
|
|
return [x for x in exchanges if validate_exchange(x)[0]]
|
2019-09-30 21:33:33 +00:00
|
|
|
|
|
|
|
|
2021-04-06 05:47:44 +00:00
|
|
|
def validate_exchange(exchange: str) -> Tuple[bool, str]:
|
|
|
|
ex_mod = getattr(ccxt, exchange.lower())()
|
|
|
|
if not ex_mod or not ex_mod.has:
|
|
|
|
return False, ''
|
2021-04-06 18:16:29 +00:00
|
|
|
missing = [k for k in EXCHANGE_HAS_REQUIRED if ex_mod.has.get(k) is not True]
|
2021-04-06 05:47:44 +00:00
|
|
|
if missing:
|
|
|
|
return False, f"missing: {', '.join(missing)}"
|
|
|
|
|
|
|
|
missing_opt = [k for k in EXCHANGE_HAS_OPTIONAL if not ex_mod.has.get(k)]
|
2021-04-06 18:16:29 +00:00
|
|
|
|
|
|
|
if exchange.lower() in BAD_EXCHANGES:
|
|
|
|
return False, BAD_EXCHANGES.get(exchange.lower(), '')
|
2021-04-06 05:47:44 +00:00
|
|
|
if missing_opt:
|
|
|
|
return True, f"missing opt: {', '.join(missing_opt)}"
|
|
|
|
|
|
|
|
return True, ''
|
|
|
|
|
|
|
|
|
|
|
|
def validate_exchanges(all_exchanges: bool) -> List[Tuple[str, bool, str]]:
|
|
|
|
"""
|
|
|
|
:return: List of tuples with exchangename, valid, reason.
|
|
|
|
"""
|
|
|
|
exchanges = ccxt_exchanges() if all_exchanges else available_exchanges()
|
|
|
|
exchanges_valid = [
|
|
|
|
(e, *validate_exchange(e)) for e in exchanges
|
|
|
|
]
|
|
|
|
return exchanges_valid
|
|
|
|
|
|
|
|
|
2019-11-02 19:25:18 +00:00
|
|
|
def timeframe_to_seconds(timeframe: str) -> int:
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
|
|
|
Translates the timeframe interval value written in the human readable
|
|
|
|
form ('1m', '5m', '1h', '1d', '1w', etc.) to the number
|
|
|
|
of seconds for one timeframe interval.
|
|
|
|
"""
|
2019-11-02 19:25:18 +00:00
|
|
|
return ccxt.Exchange.parse_timeframe(timeframe)
|
2019-04-09 09:27:35 +00:00
|
|
|
|
|
|
|
|
2019-11-02 19:25:18 +00:00
|
|
|
def timeframe_to_minutes(timeframe: str) -> int:
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
2019-08-12 14:07:19 +00:00
|
|
|
Same as timeframe_to_seconds, but returns minutes.
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
2019-11-02 19:25:18 +00:00
|
|
|
return ccxt.Exchange.parse_timeframe(timeframe) // 60
|
2019-04-09 09:27:35 +00:00
|
|
|
|
|
|
|
|
2019-11-02 19:25:18 +00:00
|
|
|
def timeframe_to_msecs(timeframe: str) -> int:
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
2019-08-12 14:07:19 +00:00
|
|
|
Same as timeframe_to_seconds, but returns milliseconds.
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
2019-11-02 19:25:18 +00:00
|
|
|
return ccxt.Exchange.parse_timeframe(timeframe) * 1000
|
2019-08-12 14:07:19 +00:00
|
|
|
|
|
|
|
|
2019-08-12 14:17:06 +00:00
|
|
|
def timeframe_to_prev_date(timeframe: str, date: datetime = None) -> datetime:
|
2019-08-12 14:11:43 +00:00
|
|
|
"""
|
|
|
|
Use Timeframe and determine last possible candle.
|
2019-08-12 14:17:06 +00:00
|
|
|
:param timeframe: timeframe in string format (e.g. "5m")
|
|
|
|
:param date: date to use. Defaults to utcnow()
|
|
|
|
:returns: date of previous candle (with utc timezone)
|
2019-08-12 14:11:43 +00:00
|
|
|
"""
|
|
|
|
if not date:
|
2019-08-12 18:37:11 +00:00
|
|
|
date = datetime.now(timezone.utc)
|
2019-08-25 08:34:56 +00:00
|
|
|
|
|
|
|
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, date.timestamp() * 1000,
|
|
|
|
ROUND_DOWN) // 1000
|
2019-08-12 14:11:43 +00:00
|
|
|
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
|
|
|
|
|
|
|
|
2019-08-12 14:17:06 +00:00
|
|
|
def timeframe_to_next_date(timeframe: str, date: datetime = None) -> datetime:
|
2019-08-12 14:07:19 +00:00
|
|
|
"""
|
|
|
|
Use Timeframe and determine next candle.
|
2019-08-12 14:17:06 +00:00
|
|
|
:param timeframe: timeframe in string format (e.g. "5m")
|
|
|
|
:param date: date to use. Defaults to utcnow()
|
|
|
|
:returns: date of next candle (with utc timezone)
|
2019-08-12 14:07:19 +00:00
|
|
|
"""
|
2019-08-25 08:34:56 +00:00
|
|
|
if not date:
|
|
|
|
date = datetime.now(timezone.utc)
|
|
|
|
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, date.timestamp() * 1000,
|
|
|
|
ROUND_UP) // 1000
|
2019-08-12 14:07:19 +00:00
|
|
|
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
2019-10-14 10:32:39 +00:00
|
|
|
|
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def market_is_active(market: Dict) -> bool:
|
2019-10-14 10:32:39 +00:00
|
|
|
"""
|
|
|
|
Return True if the market is active.
|
|
|
|
"""
|
|
|
|
# "It's active, if the active flag isn't explicitly set to false. If it's missing or
|
|
|
|
# true then it's true. If it's undefined, then it's most likely true, but not 100% )"
|
|
|
|
# See https://github.com/ccxt/ccxt/issues/4874,
|
|
|
|
# https://github.com/ccxt/ccxt/issues/4075#issuecomment-434760520
|
|
|
|
return market.get('active', True) is not False
|