Update exchange.py
Automatically buy BNB for fees when in dry-run mode, also variable misnomer fixed at 745
This commit is contained in:
parent
c12f2378db
commit
953ed5df3b
@ -110,7 +110,11 @@ class Exchange:
|
|||||||
logger.info('Instance is running with dry_run enabled')
|
logger.info('Instance is running with dry_run enabled')
|
||||||
logger.info(f"Using CCXT {ccxt.__version__}")
|
logger.info(f"Using CCXT {ccxt.__version__}")
|
||||||
exchange_config = config['exchange']
|
exchange_config = config['exchange']
|
||||||
self.log_responses = exchange_config.get('log_responses', False)
|
self._name = exchange_config['name']
|
||||||
|
self._BNB_min = exchange_config['reduced_fees_strategy']['BNB_minimum']
|
||||||
|
self._BNB_max = exchange_config['reduced_fees_strategy']['BNB_maximum']
|
||||||
|
self._avoid_duplicate_order = False
|
||||||
|
self._log_responses = exchange_config.get('log_responses', False)
|
||||||
|
|
||||||
# Deep merge ft_has with default ft_has options
|
# Deep merge ft_has with default ft_has options
|
||||||
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
||||||
@ -181,7 +185,6 @@ class Exchange:
|
|||||||
"""
|
"""
|
||||||
# Find matching class for the given exchange name
|
# Find matching class for the given exchange name
|
||||||
name = exchange_config['name']
|
name = exchange_config['name']
|
||||||
|
|
||||||
if not is_exchange_known_ccxt(name, ccxt_module):
|
if not is_exchange_known_ccxt(name, ccxt_module):
|
||||||
raise OperationalException(f'Exchange {name} is not supported by ccxt')
|
raise OperationalException(f'Exchange {name} is not supported by ccxt')
|
||||||
|
|
||||||
@ -239,7 +242,7 @@ class Exchange:
|
|||||||
|
|
||||||
def _log_exchange_response(self, endpoint, response) -> None:
|
def _log_exchange_response(self, endpoint, response) -> None:
|
||||||
""" Log exchange responses """
|
""" Log exchange responses """
|
||||||
if self.log_responses:
|
if self._log_responses:
|
||||||
logger.info(f"API {endpoint}: {response}")
|
logger.info(f"API {endpoint}: {response}")
|
||||||
|
|
||||||
def ohlcv_candle_limit(self, timeframe: str) -> int:
|
def ohlcv_candle_limit(self, timeframe: str) -> int:
|
||||||
@ -258,7 +261,6 @@ class Exchange:
|
|||||||
"""
|
"""
|
||||||
Return exchange ccxt markets, filtered out by base currency and quote currency
|
Return exchange ccxt markets, filtered out by base currency and quote currency
|
||||||
if this was requested in parameters.
|
if this was requested in parameters.
|
||||||
|
|
||||||
TODO: consider moving it to the Dataprovider
|
TODO: consider moving it to the Dataprovider
|
||||||
"""
|
"""
|
||||||
markets = self.markets
|
markets = self.markets
|
||||||
@ -685,20 +687,16 @@ class Exchange:
|
|||||||
if not self.exchange_has('fetchL2OrderBook'):
|
if not self.exchange_has('fetchL2OrderBook'):
|
||||||
return True
|
return True
|
||||||
ob = self.fetch_l2_order_book(pair, 1)
|
ob = self.fetch_l2_order_book(pair, 1)
|
||||||
try:
|
if side == 'buy':
|
||||||
if side == 'buy':
|
price = ob['asks'][0][0]
|
||||||
price = ob['asks'][0][0]
|
logger.debug(f"{pair} checking dry buy-order: price={price}, limit={limit}")
|
||||||
logger.debug(f"{pair} checking dry buy-order: price={price}, limit={limit}")
|
if limit >= price:
|
||||||
if limit >= price:
|
return True
|
||||||
return True
|
else:
|
||||||
else:
|
price = ob['bids'][0][0]
|
||||||
price = ob['bids'][0][0]
|
logger.debug(f"{pair} checking dry sell-order: price={price}, limit={limit}")
|
||||||
logger.debug(f"{pair} checking dry sell-order: price={price}, limit={limit}")
|
if limit <= price:
|
||||||
if limit <= price:
|
return True
|
||||||
return True
|
|
||||||
except IndexError:
|
|
||||||
# Ignore empty orderbooks when filling - can be filled with the next iteration.
|
|
||||||
pass
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def check_dry_limit_order_filled(self, order: Dict[str, Any]) -> Dict[str, Any]:
|
def check_dry_limit_order_filled(self, order: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
@ -737,6 +735,15 @@ class Exchange:
|
|||||||
rate: float, time_in_force: str = 'gtc') -> Dict:
|
rate: float, time_in_force: str = 'gtc') -> Dict:
|
||||||
|
|
||||||
if self._config['dry_run']:
|
if self._config['dry_run']:
|
||||||
|
balances = self._api.fetch_balance(self)
|
||||||
|
if (balances["free"] > self._BNB_min and self._name == "binance" and
|
||||||
|
self._avoid_duplicate_order is False):
|
||||||
|
print("in line 741")
|
||||||
|
self.create_order(pair, ordertype, side, (self._BNB_max - balances["free"]), rate)
|
||||||
|
self._avoid_duplicate_order = True
|
||||||
|
else:
|
||||||
|
print("in line 744")
|
||||||
|
self._avoid_duplicate_order = False
|
||||||
dry_order = self.create_dry_run_order(pair, ordertype, side, amount, rate)
|
dry_order = self.create_dry_run_order(pair, ordertype, side, amount, rate)
|
||||||
return dry_order
|
return dry_order
|
||||||
|
|
||||||
@ -744,7 +751,6 @@ class Exchange:
|
|||||||
if time_in_force != 'gtc' and ordertype != 'market':
|
if time_in_force != 'gtc' and ordertype != 'market':
|
||||||
param = self._ft_has.get('time_in_force_parameter', '')
|
param = self._ft_has.get('time_in_force_parameter', '')
|
||||||
params.update({param: time_in_force})
|
params.update({param: time_in_force})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Set the precision for amount and price(rate) as accepted by the exchange
|
# Set the precision for amount and price(rate) as accepted by the exchange
|
||||||
amount = self.amount_to_precision(pair, amount)
|
amount = self.amount_to_precision(pair, amount)
|
||||||
@ -1103,7 +1109,6 @@ class Exchange:
|
|||||||
This works for timezones UTC+ since then the result will contain trades from a few hours
|
This works for timezones UTC+ since then the result will contain trades from a few hours
|
||||||
instead of from the last 5 seconds, however fails for UTC- timezones,
|
instead of from the last 5 seconds, however fails for UTC- timezones,
|
||||||
since we're then asking for trades with a "since" argument in the future.
|
since we're then asking for trades with a "since" argument in the future.
|
||||||
|
|
||||||
:param order_id order_id: Order-id as given when creating the order
|
:param order_id order_id: Order-id as given when creating the order
|
||||||
:param pair: Pair the order is for
|
:param pair: Pair the order is for
|
||||||
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
||||||
@ -1194,11 +1199,9 @@ class Exchange:
|
|||||||
tick = self.fetch_ticker(comb)
|
tick = self.fetch_ticker(comb)
|
||||||
|
|
||||||
fee_to_quote_rate = safe_value_fallback2(tick, tick, 'last', 'ask')
|
fee_to_quote_rate = safe_value_fallback2(tick, tick, 'last', 'ask')
|
||||||
|
return round((order['fee']['cost'] * fee_to_quote_rate) / order['cost'], 8)
|
||||||
except ExchangeError:
|
except ExchangeError:
|
||||||
fee_to_quote_rate = self._config['exchange'].get('unknown_fee_rate', None)
|
return None
|
||||||
if not fee_to_quote_rate:
|
|
||||||
return None
|
|
||||||
return round((order['fee']['cost'] * fee_to_quote_rate) / order['cost'], 8)
|
|
||||||
|
|
||||||
def extract_cost_curr_rate(self, order: Dict) -> Tuple[float, str, Optional[float]]:
|
def extract_cost_curr_rate(self, order: Dict) -> Tuple[float, str, Optional[float]]:
|
||||||
"""
|
"""
|
||||||
@ -1269,7 +1272,7 @@ class Exchange:
|
|||||||
results = await asyncio.gather(*input_coro, return_exceptions=True)
|
results = await asyncio.gather(*input_coro, return_exceptions=True)
|
||||||
for res in results:
|
for res in results:
|
||||||
if isinstance(res, Exception):
|
if isinstance(res, Exception):
|
||||||
logger.warning(f"Async code raised an exception: {repr(res)}")
|
logger.warning("Async code raised an exception: %s", res.__class__.__name__)
|
||||||
if raise_:
|
if raise_:
|
||||||
raise
|
raise
|
||||||
continue
|
continue
|
||||||
@ -1300,7 +1303,7 @@ class Exchange:
|
|||||||
cached_pairs = []
|
cached_pairs = []
|
||||||
# Gather coroutines to run
|
# Gather coroutines to run
|
||||||
for pair, timeframe in set(pair_list):
|
for pair, timeframe in set(pair_list):
|
||||||
if ((pair, timeframe) not in self._klines or not cache
|
if ((pair, timeframe) not in self._klines
|
||||||
or self._now_is_time_to_refresh(pair, timeframe)):
|
or self._now_is_time_to_refresh(pair, timeframe)):
|
||||||
if not since_ms and self.required_candle_call_count > 1:
|
if not since_ms and self.required_candle_call_count > 1:
|
||||||
# Multiple calls for one pair - to get more history
|
# Multiple calls for one pair - to get more history
|
||||||
@ -1323,30 +1326,27 @@ class Exchange:
|
|||||||
)
|
)
|
||||||
cached_pairs.append((pair, timeframe))
|
cached_pairs.append((pair, timeframe))
|
||||||
|
|
||||||
|
results = asyncio.get_event_loop().run_until_complete(
|
||||||
|
asyncio.gather(*input_coroutines, return_exceptions=True))
|
||||||
|
|
||||||
results_df = {}
|
results_df = {}
|
||||||
# Chunk requests into batches of 100 to avoid overwelming ccxt Throttling
|
# handle caching
|
||||||
for input_coro in chunks(input_coroutines, 100):
|
for res in results:
|
||||||
results = asyncio.get_event_loop().run_until_complete(
|
if isinstance(res, Exception):
|
||||||
asyncio.gather(*input_coro, return_exceptions=True))
|
logger.warning("Async code raised an exception: %s", res.__class__.__name__)
|
||||||
|
continue
|
||||||
# handle caching
|
# Deconstruct tuple (has 3 elements)
|
||||||
for res in results:
|
pair, timeframe, ticks = res
|
||||||
if isinstance(res, Exception):
|
# keeping last candle time as last refreshed time of the pair
|
||||||
logger.warning(f"Async code raised an exception: {repr(res)}")
|
if ticks:
|
||||||
continue
|
self._pairs_last_refresh_time[(pair, timeframe)] = ticks[-1][0] // 1000
|
||||||
# Deconstruct tuple (has 3 elements)
|
# keeping parsed dataframe in cache
|
||||||
pair, timeframe, ticks = res
|
ohlcv_df = ohlcv_to_dataframe(
|
||||||
# keeping last candle time as last refreshed time of the pair
|
ticks, timeframe, pair=pair, fill_missing=True,
|
||||||
if ticks:
|
drop_incomplete=self._ohlcv_partial_candle)
|
||||||
self._pairs_last_refresh_time[(pair, timeframe)] = ticks[-1][0] // 1000
|
results_df[(pair, timeframe)] = ohlcv_df
|
||||||
# keeping parsed dataframe in cache
|
if cache:
|
||||||
ohlcv_df = ohlcv_to_dataframe(
|
self._klines[(pair, timeframe)] = ohlcv_df
|
||||||
ticks, timeframe, pair=pair, fill_missing=True,
|
|
||||||
drop_incomplete=self._ohlcv_partial_candle)
|
|
||||||
results_df[(pair, timeframe)] = ohlcv_df
|
|
||||||
if cache:
|
|
||||||
self._klines[(pair, timeframe)] = ohlcv_df
|
|
||||||
|
|
||||||
# Return cached klines
|
# Return cached klines
|
||||||
for pair, timeframe in cached_pairs:
|
for pair, timeframe in cached_pairs:
|
||||||
results_df[(pair, timeframe)] = self.klines((pair, timeframe), copy=False)
|
results_df[(pair, timeframe)] = self.klines((pair, timeframe), copy=False)
|
||||||
|
Loading…
Reference in New Issue
Block a user