merge develop into RL

This commit is contained in:
robcaulk 2022-11-17 21:59:07 +01:00
commit 387c905a86
52 changed files with 332 additions and 874 deletions

1
.gitignore vendored
View File

@ -109,7 +109,6 @@ target/
!*.gitkeep !*.gitkeep
!config_examples/config_binance.example.json !config_examples/config_binance.example.json
!config_examples/config_bittrex.example.json !config_examples/config_bittrex.example.json
!config_examples/config_ftx.example.json
!config_examples/config_full.example.json !config_examples/config_full.example.json
!config_examples/config_kraken.example.json !config_examples/config_kraken.example.json
!config_examples/config_freqai.example.json !config_examples/config_freqai.example.json

View File

@ -15,9 +15,9 @@ repos:
additional_dependencies: additional_dependencies:
- types-cachetools==5.2.1 - types-cachetools==5.2.1
- types-filelock==3.2.7 - types-filelock==3.2.7
- types-requests==2.28.11.2 - types-requests==2.28.11.4
- types-tabulate==0.9.0.0 - types-tabulate==0.9.0.0
- types-python-dateutil==2.8.19.2 - types-python-dateutil==2.8.19.3
# stages: [push] # stages: [push]
- repo: https://github.com/pycqa/isort - repo: https://github.com/pycqa/isort

View File

@ -1,96 +0,0 @@
{
"max_open_trades": 3,
"stake_currency": "USD",
"stake_amount": 50,
"tradable_balance_ratio": 0.99,
"fiat_display_currency": "USD",
"timeframe": "5m",
"dry_run": true,
"cancel_open_orders_on_exit": false,
"unfilledtimeout": {
"entry": 10,
"exit": 10,
"exit_timeout_count": 0,
"unit": "minutes"
},
"entry_pricing": {
"price_side": "same",
"use_order_book": true,
"order_book_top": 1,
"price_last_balance": 0.0,
"check_depth_of_market": {
"enabled": false,
"bids_to_ask_delta": 1
}
},
"exit_pricing": {
"price_side": "same",
"use_order_book": true,
"order_book_top": 1
},
"exchange": {
"name": "ftx",
"key": "your_exchange_key",
"secret": "your_exchange_secret",
"ccxt_config": {},
"ccxt_async_config": {},
"pair_whitelist": [
"BTC/USD",
"ETH/USD",
"BNB/USD",
"USDT/USD",
"LTC/USD",
"SRM/USD",
"SXP/USD",
"XRP/USD",
"DOGE/USD",
"1INCH/USD",
"CHZ/USD",
"MATIC/USD",
"LINK/USD",
"OXY/USD",
"SUSHI/USD"
],
"pair_blacklist": [
"FTT/USD"
]
},
"pairlists": [
{"method": "StaticPairList"}
],
"edge": {
"enabled": false,
"process_throttle_secs": 3600,
"calculate_since_number_of_days": 7,
"allowed_risk": 0.01,
"stoploss_range_min": -0.01,
"stoploss_range_max": -0.1,
"stoploss_range_step": -0.01,
"minimum_winrate": 0.60,
"minimum_expectancy": 0.20,
"min_trade_number": 10,
"max_trade_duration_minute": 1440,
"remove_pumps": false
},
"telegram": {
"enabled": false,
"token": "your_telegram_token",
"chat_id": "your_telegram_chat_id"
},
"api_server": {
"enabled": false,
"listen_ip_address": "127.0.0.1",
"listen_port": 8080,
"verbosity": "error",
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "freqtrader",
"password": "SuperSecurePassword"
},
"bot_name": "freqtrade",
"initial_state": "running",
"force_entry_enable": false,
"internals": {
"process_throttle_secs": 5
}
}

View File

@ -204,6 +204,7 @@
"strategy_path": "user_data/strategies/", "strategy_path": "user_data/strategies/",
"recursive_strategy_search": false, "recursive_strategy_search": false,
"add_config_files": [], "add_config_files": [],
"reduce_df_footprint": false,
"dataformat_ohlcv": "json", "dataformat_ohlcv": "json",
"dataformat_trades": "jsongz" "dataformat_trades": "jsongz"
} }

View File

@ -253,6 +253,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `add_config_files` | Additional config files. These files will be loaded and merged with the current config file. The files are resolved relative to the initial file.<br> *Defaults to `[]`*. <br> **Datatype:** List of strings | `add_config_files` | Additional config files. These files will be loaded and merged with the current config file. The files are resolved relative to the initial file.<br> *Defaults to `[]`*. <br> **Datatype:** List of strings
| `dataformat_ohlcv` | Data format to use to store historical candle (OHLCV) data. <br> *Defaults to `json`*. <br> **Datatype:** String | `dataformat_ohlcv` | Data format to use to store historical candle (OHLCV) data. <br> *Defaults to `json`*. <br> **Datatype:** String
| `dataformat_trades` | Data format to use to store historical trades data. <br> *Defaults to `jsongz`*. <br> **Datatype:** String | `dataformat_trades` | Data format to use to store historical trades data. <br> *Defaults to `jsongz`*. <br> **Datatype:** String
| `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage (and decreasing train/inference timing in FreqAI). (Currently only affects FreqAI use-cases) <br> **Datatype:** Boolean. <br> Default: `False`.
### Parameters in the strategy ### Parameters in the strategy
@ -552,7 +553,7 @@ The possible values are: `GTC` (default), `FOK` or `IOC`.
``` ```
!!! Warning !!! Warning
This is ongoing work. For now, it is supported only for binance, gate, ftx and kucoin. This is ongoing work. For now, it is supported only for binance, gate and kucoin.
Please don't change the default value unless you know what you are doing and have researched the impact of using different values for your particular exchange. Please don't change the default value unless you know what you are doing and have researched the impact of using different values for your particular exchange.
### What values can be used for fiat_display_currency? ### What values can be used for fiat_display_currency?

View File

@ -173,30 +173,6 @@ res = [p for p, x in lm.items() if 'US' in x['info']['prohibitedIn']]
print(res) print(res)
``` ```
## FTX
!!! Warning
Due to the current situation, we can no longer recommend FTX.
Please make sure to investigate the current situation before depositing any funds to FTX.
!!! Tip "Stoploss on Exchange"
FTX supports `stoploss_on_exchange` and can use both stop-loss-market and stop-loss-limit orders. It provides great advantages, so we recommend to benefit from it.
You can use either `"limit"` or `"market"` in the `order_types.stoploss` configuration setting to decide which type of stoploss shall be used.
### Using subaccounts
To use subaccounts with FTX, you need to edit the configuration and add the following:
``` json
"exchange": {
"ccxt_config": {
"headers": {
"FTX-SUBACCOUNT": "name"
}
},
}
```
## Kucoin ## Kucoin
Kucoin requires a passphrase for each api key, you will therefore need to add this key into the configuration so your exchange section looks as follows: Kucoin requires a passphrase for each api key, you will therefore need to add this key into the configuration so your exchange section looks as follows:

View File

@ -62,3 +62,4 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
| | **Extraneous parameters** | | **Extraneous parameters**
| `keras` | If the selected model makes use of Keras (typical for Tensorflow-based prediction models), this flag needs to be activated so that the model save/loading follows Keras standards. <br> **Datatype:** Boolean. <br> Default: `False`. | `keras` | If the selected model makes use of Keras (typical for Tensorflow-based prediction models), this flag needs to be activated so that the model save/loading follows Keras standards. <br> **Datatype:** Boolean. <br> Default: `False`.
| `conv_width` | The width of a convolutional neural network input tensor. This replaces the need for shifting candles (`include_shifted_candles`) by feeding in historical data points as the second dimension of the tensor. Technically, this parameter can also be used for regressors, but it only adds computational overhead and does not change the model training/prediction. <br> **Datatype:** Integer. <br> Default: `2`. | `conv_width` | The width of a convolutional neural network input tensor. This replaces the need for shifting candles (`include_shifted_candles`) by feeding in historical data points as the second dimension of the tensor. Technically, this parameter can also be used for regressors, but it only adds computational overhead and does not change the model training/prediction. <br> **Datatype:** Integer. <br> Default: `2`.
| `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage and decreasing train/inference timing. This parameter is set in the main level of the Freqtrade configuration file (not inside FreqAI). <br> **Datatype:** Boolean. <br> Default: `False`.

View File

@ -268,7 +268,7 @@ This option is disabled by default, and will only apply if set to > 0.
The `max_value` setting removes pairs where the minimum value change is above a specified value. The `max_value` setting removes pairs where the minimum value change is above a specified value.
This is useful when an exchange has unbalanced limits. For example, if step-size = 1 (so you can only buy 1, or 2, or 3, but not 1.1 Coins) - and the price is pretty high (like 20\$) as the coin has risen sharply since the last limit adaption. This is useful when an exchange has unbalanced limits. For example, if step-size = 1 (so you can only buy 1, or 2, or 3, but not 1.1 Coins) - and the price is pretty high (like 20\$) as the coin has risen sharply since the last limit adaption.
As a result of the above, you can only buy for 20\$, or 40\$ - but not for 25\$. As a result of the above, you can only buy for 20\$, or 40\$ - but not for 25\$.
On exchanges that deduct fees from the receiving currency (e.g. binance, FTX) - this can result in high value coins / amounts that are unsellable as the amount is slightly below the limit. On exchanges that deduct fees from the receiving currency (e.g. binance) - this can result in high value coins / amounts that are unsellable as the amount is slightly below the limit.
The `low_price_ratio` setting removes pairs where a raise of 1 price unit (pip) is above the `low_price_ratio` ratio. The `low_price_ratio` setting removes pairs where a raise of 1 price unit (pip) is above the `low_price_ratio` ratio.
This option is disabled by default, and will only apply if set to > 0. This option is disabled by default, and will only apply if set to > 0.

View File

@ -1,6 +1,6 @@
markdown==3.3.7 markdown==3.3.7
mkdocs==1.4.2 mkdocs==1.4.2
mkdocs-material==8.5.8 mkdocs-material==8.5.10
mdx_truly_sane_lists==1.3 mdx_truly_sane_lists==1.3
pymdown-extensions==9.7 pymdown-extensions==9.8
jinja2==3.1.2 jinja2==3.1.2

View File

@ -24,7 +24,7 @@ These modes can be configured with these values:
``` ```
!!! Note !!! Note
Stoploss on exchange is only supported for Binance (stop-loss-limit), Huobi (stop-limit), Kraken (stop-loss-market, stop-loss-limit), FTX (stop limit and stop-market) Gateio (stop-limit), and Kucoin (stop-limit and stop-market) as of now. Stoploss on exchange is only supported for Binance (stop-loss-limit), Huobi (stop-limit), Kraken (stop-loss-market, stop-loss-limit), Gateio (stop-limit), and Kucoin (stop-limit and stop-market) as of now.
<ins>Do not set too low/tight stoploss value if using stop loss on exchange!</ins> <ins>Do not set too low/tight stoploss value if using stop loss on exchange!</ins>
If set to low/tight then you have greater risk of missing fill on the order and stoploss will not work. If set to low/tight then you have greater risk of missing fill on the order and stoploss will not work.

View File

@ -723,7 +723,7 @@ if self.dp.runmode.value in ('live', 'dry_run'):
!!! Warning !!! Warning
Although the ticker data structure is a part of the ccxt Unified Interface, the values returned by this method can Although the ticker data structure is a part of the ccxt Unified Interface, the values returned by this method can
vary for different exchanges. For instance, many exchanges do not return `vwap` values, the FTX exchange vary for different exchanges. For instance, many exchanges do not return `vwap` values, some exchanges
does not always fills in the `last` field (so it can be None), etc. So you need to carefully verify the ticker does not always fills in the `last` field (so it can be None), etc. So you need to carefully verify the ticker
data returned from the exchange and add appropriate error handling / defaults. data returned from the exchange and add appropriate error handling / defaults.

View File

@ -263,7 +263,6 @@ equos True missing opt: fetchTicker, fetchTickers
eterbase True eterbase True
fcoin True missing opt: fetchMyTrades, fetchTickers fcoin True missing opt: fetchMyTrades, fetchTickers
fcoinjp True missing opt: fetchMyTrades, fetchTickers fcoinjp True missing opt: fetchMyTrades, fetchTickers
ftx True
gateio True gateio True
gemini True gemini True
gopax True gopax True
@ -369,7 +368,6 @@ fcoin True missing opt: fetchMyTrades, fetchTickers
fcoinjp True missing opt: fetchMyTrades, fetchTickers fcoinjp True missing opt: fetchMyTrades, fetchTickers
flowbtc False missing: fetchOrder, fetchOHLCV flowbtc False missing: fetchOrder, fetchOHLCV
foxbit False missing: fetchOrder, fetchOHLCV foxbit False missing: fetchOrder, fetchOHLCV
ftx True
gateio True gateio True
gemini True gemini True
gopax True gopax True

View File

@ -108,7 +108,6 @@ def ask_user_config() -> Dict[str, Any]:
"binance", "binance",
"binanceus", "binanceus",
"bittrex", "bittrex",
"ftx",
"gateio", "gateio",
"huobi", "huobi",
"kraken", "kraken",

View File

@ -3,11 +3,12 @@ This module contains the argument manager class
""" """
import logging import logging
import re import re
from datetime import datetime from datetime import datetime, timezone
from typing import Optional from typing import Optional
import arrow import arrow
from freqtrade.constants import DATETIME_PRINT_FORMAT
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
@ -29,6 +30,52 @@ class TimeRange:
self.startts: int = startts self.startts: int = startts
self.stopts: int = stopts self.stopts: int = stopts
@property
def startdt(self) -> Optional[datetime]:
if self.startts:
return datetime.fromtimestamp(self.startts, tz=timezone.utc)
return None
@property
def stopdt(self) -> Optional[datetime]:
if self.stopts:
return datetime.fromtimestamp(self.stopts, tz=timezone.utc)
return None
@property
def timerange_str(self) -> str:
"""
Returns a string representation of the timerange as used by parse_timerange.
Follows the format yyyymmdd-yyyymmdd - leaving out the parts that are not set.
"""
start = ''
stop = ''
if startdt := self.startdt:
start = startdt.strftime('%Y%m%d')
if stopdt := self.stopdt:
stop = stopdt.strftime('%Y%m%d')
return f"{start}-{stop}"
@property
def start_fmt(self) -> str:
"""
Returns a string representation of the start date
"""
val = 'unbounded'
if (startdt := self.startdt) is not None:
val = startdt.strftime(DATETIME_PRINT_FORMAT)
return val
@property
def stop_fmt(self) -> str:
"""
Returns a string representation of the stop date
"""
val = 'unbounded'
if (stopdt := self.stopdt) is not None:
val = stopdt.strftime(DATETIME_PRINT_FORMAT)
return val
def __eq__(self, other): def __eq__(self, other):
"""Override the default Equals behavior""" """Override the default Equals behavior"""
return (self.starttype == other.starttype and self.stoptype == other.stoptype return (self.starttype == other.starttype and self.stoptype == other.stoptype

View File

@ -159,6 +159,7 @@ CONF_SCHEMA = {
'ignore_buying_expired_candle_after': {'type': 'number'}, 'ignore_buying_expired_candle_after': {'type': 'number'},
'trading_mode': {'type': 'string', 'enum': TRADING_MODES}, 'trading_mode': {'type': 'string', 'enum': TRADING_MODES},
'margin_mode': {'type': 'string', 'enum': MARGIN_MODES}, 'margin_mode': {'type': 'string', 'enum': MARGIN_MODES},
'reduce_df_footprint': {'type': 'boolean', 'default': False},
'liquidation_buffer': {'type': 'number', 'minimum': 0.0, 'maximum': 0.99}, 'liquidation_buffer': {'type': 'number', 'minimum': 0.0, 'maximum': 0.99},
'backtest_breakdown': { 'backtest_breakdown': {
'type': 'array', 'type': 'array',

View File

@ -3,10 +3,10 @@ Functions to convert data from one format to another
""" """
import itertools import itertools
import logging import logging
from datetime import datetime, timezone
from operator import itemgetter from operator import itemgetter
from typing import Dict, List from typing import Dict, List
import numpy as np
import pandas as pd import pandas as pd
from pandas import DataFrame, to_datetime from pandas import DataFrame, to_datetime
@ -137,11 +137,9 @@ def trim_dataframe(df: DataFrame, timerange, df_date_col: str = 'date',
df = df.iloc[startup_candles:, :] df = df.iloc[startup_candles:, :]
else: else:
if timerange.starttype == 'date': if timerange.starttype == 'date':
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc) df = df.loc[df[df_date_col] >= timerange.startdt, :]
df = df.loc[df[df_date_col] >= start, :]
if timerange.stoptype == 'date': if timerange.stoptype == 'date':
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc) df = df.loc[df[df_date_col] <= timerange.stopdt, :]
df = df.loc[df[df_date_col] <= stop, :]
return df return df
@ -313,3 +311,29 @@ def convert_ohlcv_format(
if erase and convert_from != convert_to: if erase and convert_from != convert_to:
logger.info(f"Deleting source data for {pair} / {timeframe}") logger.info(f"Deleting source data for {pair} / {timeframe}")
src.ohlcv_purge(pair=pair, timeframe=timeframe, candle_type=candle_type) src.ohlcv_purge(pair=pair, timeframe=timeframe, candle_type=candle_type)
def reduce_dataframe_footprint(df: DataFrame) -> DataFrame:
"""
Ensure all values are float32 in the incoming dataframe.
:param df: Dataframe to be converted to float/int 32s
:return: Dataframe converted to float/int 32s
"""
logger.debug(f"Memory usage of dataframe is "
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
df_dtypes = df.dtypes
for column, dtype in df_dtypes.items():
if column in ['open', 'high', 'low', 'close', 'volume']:
continue
if dtype == np.float64:
df_dtypes[column] = np.float32
elif dtype == np.int64:
df_dtypes[column] = np.int32
df = df.astype(df_dtypes)
logger.debug(f"Memory usage after optimization is: "
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
return df

View File

@ -1,6 +1,6 @@
import logging import logging
import operator import operator
from datetime import datetime, timezone from datetime import datetime
from pathlib import Path from pathlib import Path
from typing import Dict, List, Optional, Tuple from typing import Dict, List, Optional, Tuple
@ -160,9 +160,9 @@ def _load_cached_data_for_updating(
end = None end = None
if timerange: if timerange:
if timerange.starttype == 'date': if timerange.starttype == 'date':
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc) start = timerange.startdt
if timerange.stoptype == 'date': if timerange.stoptype == 'date':
end = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc) end = timerange.stopdt
# Intentionally don't pass timerange in - since we need to load the full dataset. # Intentionally don't pass timerange in - since we need to load the full dataset.
data = data_handler.ohlcv_load(pair, timeframe=timeframe, data = data_handler.ohlcv_load(pair, timeframe=timeframe,

View File

@ -366,13 +366,11 @@ class IDataHandler(ABC):
""" """
if timerange.starttype == 'date': if timerange.starttype == 'date':
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc) if pairdata.iloc[0]['date'] > timerange.startdt:
if pairdata.iloc[0]['date'] > start:
logger.warning(f"{pair}, {candle_type}, {timeframe}, " logger.warning(f"{pair}, {candle_type}, {timeframe}, "
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}") f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}")
if timerange.stoptype == 'date': if timerange.stoptype == 'date':
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc) if pairdata.iloc[-1]['date'] < timerange.stopdt:
if pairdata.iloc[-1]['date'] < stop:
logger.warning(f"{pair}, {candle_type}, {timeframe}, " logger.warning(f"{pair}, {candle_type}, {timeframe}, "
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}") f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")

View File

@ -392,7 +392,7 @@ class Edge:
# Returning a list of pairs in order of "expectancy" # Returning a list of pairs in order of "expectancy"
return final return final
def _find_trades_for_stoploss_range(self, df, pair, stoploss_range): def _find_trades_for_stoploss_range(self, df, pair: str, stoploss_range) -> list:
buy_column = df['enter_long'].values buy_column = df['enter_long'].values
sell_column = df['exit_long'].values sell_column = df['exit_long'].values
date_column = df['date'].values date_column = df['date'].values
@ -407,7 +407,7 @@ class Edge:
return result return result
def _detect_next_stop_or_sell_point(self, buy_column, sell_column, date_column, def _detect_next_stop_or_sell_point(self, buy_column, sell_column, date_column,
ohlc_columns, stoploss, pair): ohlc_columns, stoploss, pair: str):
""" """
Iterate through ohlc_columns in order to find the next trade Iterate through ohlc_columns in order to find the next trade
Next trade opens from the first buy signal noticed to Next trade opens from the first buy signal noticed to

View File

@ -18,7 +18,6 @@ from freqtrade.exchange.exchange_utils import (amount_to_contract_precision, amo
timeframe_to_next_date, timeframe_to_prev_date, timeframe_to_next_date, timeframe_to_prev_date,
timeframe_to_seconds, validate_exchange, timeframe_to_seconds, validate_exchange,
validate_exchanges) validate_exchanges)
from freqtrade.exchange.ftx import Ftx
from freqtrade.exchange.gateio import Gateio from freqtrade.exchange.gateio import Gateio
from freqtrade.exchange.hitbtc import Hitbtc from freqtrade.exchange.hitbtc import Hitbtc
from freqtrade.exchange.huobi import Huobi from freqtrade.exchange.huobi import Huobi

View File

@ -52,7 +52,6 @@ MAP_EXCHANGE_CHILDCLASS = {
SUPPORTED_EXCHANGES = [ SUPPORTED_EXCHANGES = [
'binance', 'binance',
'bittrex', 'bittrex',
'ftx',
'gateio', 'gateio',
'huobi', 'huobi',
'kraken', 'kraken',

View File

@ -1,178 +0,0 @@
""" FTX exchange subclass """
import logging
from typing import Any, Dict, List, Optional, Tuple
import ccxt
from freqtrade.constants import BuySell
from freqtrade.enums import MarginMode, TradingMode
from freqtrade.exceptions import (DDosProtection, InsufficientFundsError, InvalidOrderException,
OperationalException, TemporaryError)
from freqtrade.exchange import Exchange
from freqtrade.exchange.common import API_FETCH_ORDER_RETRY_COUNT, retrier
from freqtrade.misc import safe_value_fallback2
logger = logging.getLogger(__name__)
class Ftx(Exchange):
_ft_has: Dict = {
"order_time_in_force": ['GTC', 'IOC', 'PO'],
"stoploss_on_exchange": True,
"ohlcv_candle_limit": 1500,
"ohlcv_require_since": True,
"ohlcv_volume_currency": "quote",
"mark_ohlcv_price": "index",
"mark_ohlcv_timeframe": "1h",
}
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
# TradingMode.SPOT always supported and not required in this list
# (TradingMode.MARGIN, MarginMode.CROSS),
# (TradingMode.FUTURES, MarginMode.CROSS)
]
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
"""
Verify stop_loss against stoploss-order value (limit or price)
Returns True if adjustment is necessary.
"""
return order['type'] == 'stop' and (
side == "sell" and stop_loss > float(order['price']) or
side == "buy" and stop_loss < float(order['price'])
)
@retrier(retries=0)
def stoploss(self, pair: str, amount: float, stop_price: float,
order_types: Dict, side: BuySell, leverage: float) -> Dict:
"""
Creates a stoploss order.
depending on order_types.stoploss configuration, uses 'market' or limit order.
Limit orders are defined by having orderPrice set, otherwise a market order is used.
"""
limit_price_pct = order_types.get('stoploss_on_exchange_limit_ratio', 0.99)
if side == "sell":
limit_rate = stop_price * limit_price_pct
else:
limit_rate = stop_price * (2 - limit_price_pct)
ordertype = "stop"
stop_price = self.price_to_precision(pair, stop_price)
if self._config['dry_run']:
dry_order = self.create_dry_run_order(
pair, ordertype, side, amount, stop_price, leverage, stop_loss=True)
return dry_order
try:
params = self._params.copy()
if order_types.get('stoploss', 'market') == 'limit':
# set orderPrice to place limit order, otherwise it's a market order
params['orderPrice'] = limit_rate
if self.trading_mode == TradingMode.FUTURES:
params.update({'reduceOnly': True})
params['stopPrice'] = stop_price
amount = self.amount_to_precision(pair, amount)
self._lev_prep(pair, leverage, side)
order = self._api.create_order(symbol=pair, type=ordertype, side=side,
amount=amount, params=params)
self._log_exchange_response('create_stoploss_order', order)
logger.info('stoploss order added for %s. '
'stop price: %s.', pair, stop_price)
return order
except ccxt.InsufficientFunds as e:
raise InsufficientFundsError(
f'Insufficient funds to create {ordertype} {side} order on market {pair}. '
f'Tried to create stoploss with amount {amount} at stoploss {stop_price}. '
f'Message: {e}') from e
except ccxt.InvalidOrder as e:
raise InvalidOrderException(
f'Could not create {ordertype} {side} order on market {pair}. '
f'Tried to create stoploss with amount {amount} at stoploss {stop_price}. '
f'Message: {e}') from e
except ccxt.DDoSProtection as e:
raise DDosProtection(e) from e
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
raise TemporaryError(
f'Could not place {side} order due to {e.__class__.__name__}. Message: {e}') from e
except ccxt.BaseError as e:
raise OperationalException(e) from e
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
if self._config['dry_run']:
return self.fetch_dry_run_order(order_id)
try:
orders = self._api.fetch_orders(pair, None, params={'type': 'stop'})
order = [order for order in orders if order['id'] == order_id]
self._log_exchange_response('fetch_stoploss_order', order)
if len(order) == 1:
if order[0].get('status') == 'closed':
# Trigger order was triggered ...
real_order_id: Optional[str] = order[0].get('info', {}).get('orderId')
# OrderId may be None for stoploss-market orders
# So we need to get it through the endpoint
# /conditional_orders/{conditional_order_id}/triggers
if not real_order_id:
res = self._api.privateGetConditionalOrdersConditionalOrderIdTriggers(
params={'conditional_order_id': order_id})
self._log_exchange_response('fetch_stoploss_order2', res)
real_order_id = res['result'][0]['orderId'] if res.get(
'result', []) else None
if real_order_id:
order1 = self._api.fetch_order(real_order_id, pair)
self._log_exchange_response('fetch_stoploss_order1', order1)
# Fake type to stop - as this was really a stop order.
order1['id_stop'] = order1['id']
order1['id'] = order_id
order1['type'] = 'stop'
order1['status_stop'] = 'triggered'
return order1
return order[0]
else:
raise InvalidOrderException(f"Could not get stoploss order for id {order_id}")
except ccxt.InvalidOrder as e:
raise InvalidOrderException(
f'Tried to get an invalid order (id: {order_id}). Message: {e}') from e
except ccxt.DDoSProtection as e:
raise DDosProtection(e) from e
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
raise TemporaryError(
f'Could not get order due to {e.__class__.__name__}. Message: {e}') from e
except ccxt.BaseError as e:
raise OperationalException(e) from e
@retrier
def cancel_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
if self._config['dry_run']:
return {}
try:
order = self._api.cancel_order(order_id, pair, params={'type': 'stop'})
self._log_exchange_response('cancel_stoploss_order', order)
return order
except ccxt.InvalidOrder as e:
raise InvalidOrderException(
f'Could not cancel order. Message: {e}') from e
except ccxt.DDoSProtection as e:
raise DDosProtection(e) from e
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
raise TemporaryError(
f'Could not cancel order due to {e.__class__.__name__}. Message: {e}') from e
except ccxt.BaseError as e:
raise OperationalException(e) from e
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
if order['type'] == 'stop':
return safe_value_fallback2(order, order, 'id_stop', 'id')
return order['id']

View File

@ -20,6 +20,7 @@ from sklearn.neighbors import NearestNeighbors
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.constants import Config from freqtrade.constants import Config
from freqtrade.data.converter import reduce_dataframe_footprint
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
from freqtrade.exchange import timeframe_to_seconds from freqtrade.exchange import timeframe_to_seconds
from freqtrade.strategy.interface import IStrategy from freqtrade.strategy.interface import IStrategy
@ -436,9 +437,7 @@ class FreqaiDataKitchen:
timerange_train.stopts = timerange_train.startts + train_period_days timerange_train.stopts = timerange_train.startts + train_period_days
first = False first = False
start = datetime.fromtimestamp(timerange_train.startts, tz=timezone.utc) tr_training_list.append(timerange_train.timerange_str)
stop = datetime.fromtimestamp(timerange_train.stopts, tz=timezone.utc)
tr_training_list.append(start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d"))
tr_training_list_timerange.append(copy.deepcopy(timerange_train)) tr_training_list_timerange.append(copy.deepcopy(timerange_train))
# associated backtest period # associated backtest period
@ -450,9 +449,7 @@ class FreqaiDataKitchen:
if timerange_backtest.stopts > config_timerange.stopts: if timerange_backtest.stopts > config_timerange.stopts:
timerange_backtest.stopts = config_timerange.stopts timerange_backtest.stopts = config_timerange.stopts
start = datetime.fromtimestamp(timerange_backtest.startts, tz=timezone.utc) tr_backtesting_list.append(timerange_backtest.timerange_str)
stop = datetime.fromtimestamp(timerange_backtest.stopts, tz=timezone.utc)
tr_backtesting_list.append(start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d"))
tr_backtesting_list_timerange.append(copy.deepcopy(timerange_backtest)) tr_backtesting_list_timerange.append(copy.deepcopy(timerange_backtest))
# ensure we are predicting on exactly same amount of data as requested by user defined # ensure we are predicting on exactly same amount of data as requested by user defined
@ -494,11 +491,9 @@ class FreqaiDataKitchen:
it is sliced down to just the present training period. it is sliced down to just the present training period.
""" """
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc) df = df.loc[df["date"] >= timerange.startdt, :]
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
df = df.loc[df["date"] >= start, :]
if not self.live: if not self.live:
df = df.loc[df["date"] < stop, :] df = df.loc[df["date"] < timerange.stopdt, :]
return df return df
@ -1061,9 +1056,7 @@ class FreqaiDataKitchen:
backtest_timerange.startts = ( backtest_timerange.startts = (
backtest_timerange.startts - backtest_period_days * SECONDS_IN_DAY backtest_timerange.startts - backtest_period_days * SECONDS_IN_DAY
) )
start = datetime.fromtimestamp(backtest_timerange.startts, tz=timezone.utc) full_timerange = backtest_timerange.timerange_str
stop = datetime.fromtimestamp(backtest_timerange.stopts, tz=timezone.utc)
full_timerange = start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d")
config_path = Path(self.config["config_files"][0]) config_path = Path(self.config["config_files"][0])
if not self.full_path.is_dir(): if not self.full_path.is_dir():
@ -1279,6 +1272,9 @@ class FreqaiDataKitchen:
dataframe = self.remove_special_chars_from_feature_names(dataframe) dataframe = self.remove_special_chars_from_feature_names(dataframe)
if self.config.get('reduce_df_footprint', False):
dataframe = reduce_dataframe_footprint(dataframe)
return dataframe return dataframe
def fit_labels(self) -> None: def fit_labels(self) -> None:

View File

@ -14,7 +14,7 @@ from numpy.typing import NDArray
from pandas import DataFrame from pandas import DataFrame
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config from freqtrade.constants import Config
from freqtrade.data.dataprovider import DataProvider from freqtrade.data.dataprovider import DataProvider
from freqtrade.enums import RunMode from freqtrade.enums import RunMode
from freqtrade.exceptions import OperationalException from freqtrade.exceptions import OperationalException
@ -803,14 +803,8 @@ class IFreqaiModel(ABC):
:return: if the data exists or not :return: if the data exists or not
""" """
if self.config.get("freqai_backtest_live_models", False) and len(dataframe_backtest) == 0: if self.config.get("freqai_backtest_live_models", False) and len(dataframe_backtest) == 0:
tr_backtest_startts_str = datetime.fromtimestamp( logger.info(f"No data found for pair {pair} from "
tr_backtest.startts, f"from { tr_backtest.start_fmt} to {tr_backtest.stop_fmt}. "
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
tr_backtest_stopts_str = datetime.fromtimestamp(
tr_backtest.stopts,
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
logger.info(f"No data found for pair {pair} from {tr_backtest_startts_str} "
f" from {tr_backtest_startts_str} to {tr_backtest_stopts_str}. "
"Probably more than one training within the same candle period.") "Probably more than one training within the same candle period.")
return False return False
return True return True
@ -825,18 +819,11 @@ class IFreqaiModel(ABC):
:param pair: the current pair :param pair: the current pair
:param total_trains: total trains (total number of slides for the sliding window) :param total_trains: total trains (total number of slides for the sliding window)
""" """
tr_train_startts_str = datetime.fromtimestamp(
tr_train.startts,
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
tr_train_stopts_str = datetime.fromtimestamp(
tr_train.stopts,
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
if not self.config.get("freqai_backtest_live_models", False): if not self.config.get("freqai_backtest_live_models", False):
logger.info( logger.info(
f"Training {pair}, {self.pair_it}/{self.total_pairs} pairs" f"Training {pair}, {self.pair_it}/{self.total_pairs} pairs"
f" from {tr_train_startts_str} " f" from {tr_train.start_fmt} "
f"to {tr_train_stopts_str}, {train_it}/{total_trains} " f"to {tr_train.stop_fmt}, {train_it}/{total_trains} "
"trains" "trains"
) )
# Following methods which are overridden by user made prediction models. # Following methods which are overridden by user made prediction models.

View File

@ -230,7 +230,4 @@ def get_timerange_backtest_live_models(config: Config) -> str:
dk = FreqaiDataKitchen(config) dk = FreqaiDataKitchen(config)
models_path = dk.get_full_models_path(config) models_path = dk.get_full_models_path(config)
timerange, _ = dk.get_timerange_and_assets_end_dates_from_ready_models(models_path) timerange, _ = dk.get_timerange_and_assets_end_dates_from_ready_models(models_path)
start_date = datetime.fromtimestamp(timerange.startts, tz=timezone.utc) return timerange.timerange_str
end_date = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
tr = f"{start_date.strftime('%Y%m%d')}-{end_date.strftime('%Y%m%d')}"
return tr

View File

@ -354,7 +354,7 @@ class FreqtradeBot(LoggingMixin):
if self.trading_mode == TradingMode.FUTURES: if self.trading_mode == TradingMode.FUTURES:
self._schedule.run_pending() self._schedule.run_pending()
def update_closed_trades_without_assigned_fees(self): def update_closed_trades_without_assigned_fees(self) -> None:
""" """
Update closed trades without close fees assigned. Update closed trades without close fees assigned.
Only acts when Orders are in the database, otherwise the last order-id is unknown. Only acts when Orders are in the database, otherwise the last order-id is unknown.
@ -379,7 +379,7 @@ class FreqtradeBot(LoggingMixin):
stoploss_order=order.ft_order_side == 'stoploss', stoploss_order=order.ft_order_side == 'stoploss',
send_msg=False) send_msg=False)
trades: List[Trade] = Trade.get_open_trades_without_assigned_fees() trades = Trade.get_open_trades_without_assigned_fees()
for trade in trades: for trade in trades:
if trade.is_open and not trade.fee_updated(trade.entry_side): if trade.is_open and not trade.fee_updated(trade.entry_side):
order = trade.select_order(trade.entry_side, False) order = trade.select_order(trade.entry_side, False)

View File

@ -35,9 +35,5 @@ def interest(
elif exchange_name == "kraken": elif exchange_name == "kraken":
# Rounded based on https://kraken-fees-calculator.github.io/ # Rounded based on https://kraken-fees-calculator.github.io/
return borrowed * rate * (one + FtPrecise(ceil(hours / four))) return borrowed * rate * (one + FtPrecise(ceil(hours / four)))
elif exchange_name == "ftx":
# As Explained under #Interest rates section in
# https://help.ftx.com/hc/en-us/articles/360053007671-Spot-Margin-Trading-Explainer
return borrowed * rate * FtPrecise(ceil(hours)) / twenty_four
else: else:
raise OperationalException(f"Leverage not available on {exchange_name} with freqtrade") raise OperationalException(f"Leverage not available on {exchange_name} with freqtrade")

View File

@ -166,7 +166,7 @@ class Backtesting:
PairLocks.use_db = True PairLocks.use_db = True
Trade.use_db = True Trade.use_db = True
def init_backtest_detail(self): def init_backtest_detail(self) -> None:
# Load detail timeframe if specified # Load detail timeframe if specified
self.timeframe_detail = str(self.config.get('timeframe_detail', '')) self.timeframe_detail = str(self.config.get('timeframe_detail', ''))
if self.timeframe_detail: if self.timeframe_detail:
@ -1286,8 +1286,7 @@ class Backtesting:
def _get_min_cached_backtest_date(self): def _get_min_cached_backtest_date(self):
min_backtest_date = None min_backtest_date = None
backtest_cache_age = self.config.get('backtest_cache', constants.BACKTEST_CACHE_DEFAULT) backtest_cache_age = self.config.get('backtest_cache', constants.BACKTEST_CACHE_DEFAULT)
if self.timerange.stopts == 0 or datetime.fromtimestamp( if self.timerange.stopts == 0 or self.timerange.stopdt > datetime.now(tz=timezone.utc):
self.timerange.stopts, tz=timezone.utc) > datetime.now(tz=timezone.utc):
logger.warning('Backtest result caching disabled due to use of open-ended timerange.') logger.warning('Backtest result caching disabled due to use of open-ended timerange.')
elif backtest_cache_age == 'day': elif backtest_cache_age == 'day':
min_backtest_date = datetime.now(tz=timezone.utc) - timedelta(days=1) min_backtest_date = datetime.now(tz=timezone.utc) - timedelta(days=1)

View File

@ -1,5 +1,5 @@
import logging import logging
from typing import List from typing import List, Optional
from sqlalchemy import inspect, select, text, tuple_, update from sqlalchemy import inspect, select, text, tuple_, update
@ -31,9 +31,9 @@ def get_backup_name(tabs: List[str], backup_prefix: str):
return table_back_name return table_back_name
def get_last_sequence_ids(engine, trade_back_name, order_back_name): def get_last_sequence_ids(engine, trade_back_name: str, order_back_name: str):
order_id: int = None order_id: Optional[int] = None
trade_id: int = None trade_id: Optional[int] = None
if engine.name == 'postgresql': if engine.name == 'postgresql':
with engine.begin() as connection: with engine.begin() as connection:

View File

@ -90,6 +90,13 @@ class Order(_DECL_BASE):
def safe_filled(self) -> float: def safe_filled(self) -> float:
return self.filled if self.filled is not None else self.amount or 0.0 return self.filled if self.filled is not None else self.amount or 0.0
@property
def safe_remaining(self) -> float:
return (
self.remaining if self.remaining is not None else
self.amount - (self.filled or 0.0)
)
@property @property
def safe_fee_base(self) -> float: def safe_fee_base(self) -> float:
return self.ft_fee_base or 0.0 return self.ft_fee_base or 0.0

View File

@ -84,11 +84,8 @@ async def _process_consumer_request(
# Limit the amount of candles per dataframe to 'limit' or 1500 # Limit the amount of candles per dataframe to 'limit' or 1500
limit = max(data.get('limit', 1500), 1500) limit = max(data.get('limit', 1500), 1500)
# They requested the full historical analyzed dataframes # For every pair in the generator, send a separate message
analyzed_df = rpc._ws_request_analyzed_df(limit) for message in rpc._ws_request_analyzed_df(limit):
# For every dataframe, send as a separate message
for _, message in analyzed_df.items():
response = WSAnalyzedDFMessage(data=message) response = WSAnalyzedDFMessage(data=message)
await channel_manager.send_direct(channel, response.dict(exclude_none=True)) await channel_manager.send_direct(channel, response.dict(exclude_none=True))

View File

@ -2,7 +2,7 @@ import asyncio
import logging import logging
from ipaddress import IPv4Address from ipaddress import IPv4Address
from threading import Thread from threading import Thread
from typing import Any, Dict from typing import Any, Dict, Optional
import orjson import orjson
import uvicorn import uvicorn
@ -51,9 +51,9 @@ class ApiServer(RPCHandler):
# Exchange - only available in webserver mode. # Exchange - only available in webserver mode.
_exchange = None _exchange = None
# websocket message queue stuff # websocket message queue stuff
_ws_channel_manager = None _ws_channel_manager: ChannelManager
_ws_thread = None _ws_thread = None
_ws_loop = None _ws_loop: Optional[asyncio.AbstractEventLoop] = None
def __new__(cls, *args, **kwargs): def __new__(cls, *args, **kwargs):
""" """
@ -71,7 +71,7 @@ class ApiServer(RPCHandler):
return return
self._standalone: bool = standalone self._standalone: bool = standalone
self._server = None self._server = None
self._ws_queue = None self._ws_queue: Optional[ThreadedQueue] = None
self._ws_background_task = None self._ws_background_task = None
ApiServer.__initialized = True ApiServer.__initialized = True
@ -186,7 +186,7 @@ class ApiServer(RPCHandler):
self._ws_background_task = asyncio.run_coroutine_threadsafe( self._ws_background_task = asyncio.run_coroutine_threadsafe(
self._broadcast_queue_data(), loop=self._ws_loop) self._broadcast_queue_data(), loop=self._ws_loop)
async def _broadcast_queue_data(self): async def _broadcast_queue_data(self) -> None:
# Instantiate the queue in this coroutine so it's attached to our loop # Instantiate the queue in this coroutine so it's attached to our loop
self._ws_queue = ThreadedQueue() self._ws_queue = ThreadedQueue()
async_queue = self._ws_queue.async_q async_queue = self._ws_queue.async_q
@ -210,7 +210,8 @@ class ApiServer(RPCHandler):
finally: finally:
# Disconnect channels and stop the loop on cancel # Disconnect channels and stop the loop on cancel
await self._ws_channel_manager.disconnect_all() await self._ws_channel_manager.disconnect_all()
self._ws_loop.stop() if self._ws_loop:
self._ws_loop.stop()
# Avoid adding more items to the queue if they aren't # Avoid adding more items to the queue if they aren't
# going to get broadcasted. # going to get broadcasted.
self._ws_queue = None self._ws_queue = None

View File

@ -5,7 +5,7 @@ import logging
from abc import abstractmethod from abc import abstractmethod
from datetime import date, datetime, timedelta, timezone from datetime import date, datetime, timedelta, timezone
from math import isnan from math import isnan
from typing import Any, Dict, List, Optional, Tuple, Union from typing import Any, Dict, Generator, List, Optional, Tuple, Union
import arrow import arrow
import psutil import psutil
@ -218,9 +218,10 @@ class RPC:
stoploss_current_dist_pct=round(stoploss_current_dist_ratio * 100, 2), stoploss_current_dist_pct=round(stoploss_current_dist_ratio * 100, 2),
stoploss_entry_dist=stoploss_entry_dist, stoploss_entry_dist=stoploss_entry_dist,
stoploss_entry_dist_ratio=round(stoploss_entry_dist_ratio, 8), stoploss_entry_dist_ratio=round(stoploss_entry_dist_ratio, 8),
open_order='({} {} rem={:.8f})'.format( open_order=(
order.order_type, order.side, order.remaining f'({order.order_type} {order.side} rem={order.safe_remaining:.8f})' if
) if order else None, order else None
),
)) ))
results.append(trade_dict) results.append(trade_dict)
return results return results
@ -1063,23 +1064,20 @@ class RPC:
self, self,
pairlist: List[str], pairlist: List[str],
limit: Optional[int] limit: Optional[int]
) -> Dict[str, Any]: ) -> Generator[Dict[str, Any], None, None]:
""" Get the analysed dataframes of each pair in the pairlist """ """ Get the analysed dataframes of each pair in the pairlist """
timeframe = self._freqtrade.config['timeframe'] timeframe = self._freqtrade.config['timeframe']
candle_type = self._freqtrade.config.get('candle_type_def', CandleType.SPOT) candle_type = self._freqtrade.config.get('candle_type_def', CandleType.SPOT)
_data = {}
for pair in pairlist: for pair in pairlist:
dataframe, last_analyzed = self.__rpc_analysed_dataframe_raw(pair, timeframe, limit) dataframe, last_analyzed = self.__rpc_analysed_dataframe_raw(pair, timeframe, limit)
_data[pair] = { yield {
"key": (pair, timeframe, candle_type), "key": (pair, timeframe, candle_type),
"df": dataframe, "df": dataframe,
"la": last_analyzed "la": last_analyzed
} }
return _data
def _ws_request_analyzed_df(self, limit: Optional[int]): def _ws_request_analyzed_df(self, limit: Optional[int]):
""" Historical Analyzed Dataframes for WebSocket """ """ Historical Analyzed Dataframes for WebSocket """
whitelist = self._freqtrade.active_pair_whitelist whitelist = self._freqtrade.active_pair_whitelist

View File

@ -1062,7 +1062,7 @@ class Telegram(RPCHandler):
self._rpc._rpc_force_entry(pair, price, order_side=order_side) self._rpc._rpc_force_entry(pair, price, order_side=order_side)
except RPCException as e: except RPCException as e:
logger.exception("Forcebuy error!") logger.exception("Forcebuy error!")
self._send_msg(str(e)) self._send_msg(str(e), ParseMode.HTML)
def _force_enter_inline(self, update: Update, _: CallbackContext) -> None: def _force_enter_inline(self, update: Update, _: CallbackContext) -> None:
if update.callback_query: if update.callback_query:

View File

@ -30,6 +30,8 @@ asyncio_mode = "auto"
[tool.mypy] [tool.mypy]
ignore_missing_imports = true ignore_missing_imports = true
namespace_packages = false
implicit_optional = true
warn_unused_ignores = true warn_unused_ignores = true
exclude = [ exclude = [
'^build_helpers\.py$' '^build_helpers\.py$'

View File

@ -9,23 +9,25 @@
coveralls==3.3.1 coveralls==3.3.1
flake8==5.0.4 flake8==5.0.4
flake8-tidy-imports==4.8.0 flake8-tidy-imports==4.8.0
mypy==0.982 mypy==0.990
pre-commit==2.20.0 pre-commit==2.20.0
pytest==7.2.0 pytest==7.2.0
pytest-asyncio==0.20.1 pytest-asyncio==0.20.2
pytest-cov==4.0.0 pytest-cov==4.0.0
pytest-mock==3.10.0 pytest-mock==3.10.0
pytest-random-order==1.0.4 pytest-random-order==1.0.4
isort==5.10.1 isort==5.10.1
# For datetime mocking # For datetime mocking
time-machine==2.8.2 time-machine==2.8.2
# fastapi testing
httpx==0.23.0
# Convert jupyter notebooks to markdown documents # Convert jupyter notebooks to markdown documents
nbconvert==7.2.3 nbconvert==7.2.4
# mypy types # mypy types
types-cachetools==5.2.1 types-cachetools==5.2.1
types-filelock==3.2.7 types-filelock==3.2.7
types-requests==2.28.11.2 types-requests==2.28.11.4
types-tabulate==0.9.0.0 types-tabulate==0.9.0.0
types-python-dateutil==2.8.19.2 types-python-dateutil==2.8.19.3

View File

@ -8,4 +8,4 @@ joblib==1.2.0
catboost==1.1.1; platform_machine != 'aarch64' catboost==1.1.1; platform_machine != 'aarch64'
lightgbm==3.3.3 lightgbm==3.3.3
xgboost==1.7.1 xgboost==1.7.1
tensorboard==2.10.1 tensorboard==2.11.0

View File

@ -2,11 +2,12 @@ numpy==1.23.4
pandas==1.5.1 pandas==1.5.1
pandas-ta==0.3.14b pandas-ta==0.3.14b
ccxt==2.1.54 ccxt==2.1.75
# Pin cryptography for now due to rust build errors with piwheels # Pin cryptography for now due to rust build errors with piwheels
cryptography==38.0.1 cryptography==38.0.1; platform_machine == 'armv7l'
cryptography==38.0.3; platform_machine != 'armv7l'
aiohttp==3.8.3 aiohttp==3.8.3
SQLAlchemy==1.4.43 SQLAlchemy==1.4.44
python-telegram-bot==13.14 python-telegram-bot==13.14
arrow==1.2.3 arrow==1.2.3
cachetools==4.2.2 cachetools==4.2.2
@ -35,12 +36,12 @@ orjson==3.8.1
sdnotify==0.3.2 sdnotify==0.3.2
# API Server # API Server
fastapi==0.85.1 fastapi==0.87.0
pydantic==1.10.2 pydantic==1.10.2
uvicorn==0.19.0 uvicorn==0.19.0
pyjwt==2.6.0 pyjwt==2.6.0
aiofiles==22.1.0 aiofiles==22.1.0
psutil==5.9.3 psutil==5.9.4
# Support for colorized terminal output # Support for colorized terminal output
colorama==0.4.6 colorama==0.4.6

View File

@ -30,7 +30,7 @@ def test_validate_is_int():
assert not validate_is_int('-ee') assert not validate_is_int('-ee')
@pytest.mark.parametrize('exchange', ['bittrex', 'binance', 'kraken', 'ftx']) @pytest.mark.parametrize('exchange', ['bittrex', 'binance', 'kraken'])
def test_start_new_config(mocker, caplog, exchange): def test_start_new_config(mocker, caplog, exchange):
wt_mock = mocker.patch.object(Path, "write_text", MagicMock()) wt_mock = mocker.patch.object(Path, "write_text", MagicMock())
mocker.patch.object(Path, "exists", MagicMock(return_value=True)) mocker.patch.object(Path, "exists", MagicMock(return_value=True))

View File

@ -1748,28 +1748,7 @@ def limit_buy_order_canceled_empty(request):
# https://docs.pytest.org/en/latest/example/parametrize.html#apply-indirect-on-particular-arguments # https://docs.pytest.org/en/latest/example/parametrize.html#apply-indirect-on-particular-arguments
exchange_name = request.param exchange_name = request.param
if exchange_name == 'ftx': if exchange_name == 'kraken':
return {
'info': {},
'id': '1234512345',
'clientOrderId': None,
'timestamp': arrow.utcnow().shift(minutes=-601).int_timestamp * 1000,
'datetime': arrow.utcnow().shift(minutes=-601).isoformat(),
'lastTradeTimestamp': None,
'symbol': 'LTC/USDT',
'type': 'limit',
'side': 'buy',
'price': 34.3225,
'amount': 0.55,
'cost': 0.0,
'average': None,
'filled': 0.0,
'remaining': 0.0,
'status': 'closed',
'fee': None,
'trades': None
}
elif exchange_name == 'kraken':
return { return {
'info': {}, 'info': {},
'id': 'AZNPFF-4AC4N-7MKTAT', 'id': 'AZNPFF-4AC4N-7MKTAT',

View File

@ -3,18 +3,19 @@ import logging
from pathlib import Path from pathlib import Path
from shutil import copyfile from shutil import copyfile
import numpy as np
import pytest import pytest
from freqtrade.configuration.timerange import TimeRange from freqtrade.configuration.timerange import TimeRange
from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format, from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format,
ohlcv_fill_up_missing_data, ohlcv_to_dataframe, ohlcv_fill_up_missing_data, ohlcv_to_dataframe,
trades_dict_to_list, trades_remove_duplicates, reduce_dataframe_footprint, trades_dict_to_list,
trades_to_ohlcv, trim_dataframe) trades_remove_duplicates, trades_to_ohlcv, trim_dataframe)
from freqtrade.data.history import (get_timerange, load_data, load_pair_history, from freqtrade.data.history import (get_timerange, load_data, load_pair_history,
validate_backtest_data) validate_backtest_data)
from freqtrade.data.history.idatahandler import IDataHandler from freqtrade.data.history.idatahandler import IDataHandler
from freqtrade.enums import CandleType from freqtrade.enums import CandleType
from tests.conftest import log_has, log_has_re from tests.conftest import generate_test_data, log_has, log_has_re
from tests.data.test_history import _clean_test_file from tests.data.test_history import _clean_test_file
@ -344,3 +345,33 @@ def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, cand
assert file.exists() assert file.exists()
for file in (files_new): for file in (files_new):
assert not file.exists() assert not file.exists()
def test_reduce_dataframe_footprint():
data = generate_test_data('15m', 40)
data['open_copy'] = data['open']
data['close_copy'] = data['close']
data['close_copy'] = data['close']
assert data['open'].dtype == np.float64
assert data['open_copy'].dtype == np.float64
assert data['close_copy'].dtype == np.float64
df2 = reduce_dataframe_footprint(data)
# Does not modify original dataframe
assert data['open'].dtype == np.float64
assert data['open_copy'].dtype == np.float64
assert data['close_copy'].dtype == np.float64
# skips ohlcv columns
assert df2['open'].dtype == np.float64
assert df2['high'].dtype == np.float64
assert df2['low'].dtype == np.float64
assert df2['close'].dtype == np.float64
assert df2['volume'].dtype == np.float64
# Changes dtype of returned dataframe
assert df2['open_copy'].dtype == np.float32
assert df2['close_copy'].dtype == np.float32

View File

@ -70,7 +70,7 @@ def test_datahandler_ohlcv_regex(filename, pair, timeframe, candletype):
('BTC_USDT_USDT', 'BTC/USDT:USDT'), # Futures ('BTC_USDT_USDT', 'BTC/USDT:USDT'), # Futures
('XRP_USDT_USDT', 'XRP/USDT:USDT'), # futures ('XRP_USDT_USDT', 'XRP/USDT:USDT'), # futures
('BTC-PERP', 'BTC-PERP'), ('BTC-PERP', 'BTC-PERP'),
('BTC-PERP_USDT', 'BTC-PERP:USDT'), # potential FTX case ('BTC-PERP_USDT', 'BTC-PERP:USDT'),
('UNITTEST_USDT', 'UNITTEST/USDT'), ('UNITTEST_USDT', 'UNITTEST/USDT'),
]) ])
def test_rebuild_pair_from_filename(input, expected): def test_rebuild_pair_from_filename(input, expected):

View File

@ -45,16 +45,6 @@ EXCHANGES = {
'leverage_tiers_public': False, 'leverage_tiers_public': False,
'leverage_in_spot_market': True, 'leverage_in_spot_market': True,
}, },
# 'ftx': {
# 'pair': 'BTC/USD',
# 'stake_currency': 'USD',
# 'hasQuoteVolume': True,
# 'timeframe': '5m',
# 'futures_pair': 'BTC/USD:USD',
# 'futures': False,
# 'leverage_tiers_public': False, # TODO: Set to True once implemented on CCXT
# 'leverage_in_spot_market': True,
# },
'kucoin': { 'kucoin': {
'pair': 'XRP/USDT', 'pair': 'XRP/USDT',
'stake_currency': 'USDT', 'stake_currency': 'USDT',

View File

@ -27,7 +27,7 @@ from tests.conftest import (generate_test_data_raw, get_mock_coro, get_patched_e
# Make sure to always keep one exchange here which is NOT subclassed!! # Make sure to always keep one exchange here which is NOT subclassed!!
EXCHANGES = ['bittrex', 'binance', 'kraken', 'ftx', 'gateio'] EXCHANGES = ['bittrex', 'binance', 'kraken', 'gateio']
get_entry_rate_data = [ get_entry_rate_data = [
('other', 20, 19, 10, 0.0, 20), # Full ask side ('other', 20, 19, 10, 0.0, 20), # Full ask side
@ -3162,19 +3162,16 @@ def test_cancel_stoploss_order(default_conf, mocker, exchange_name):
def test_cancel_stoploss_order_with_result(default_conf, mocker, exchange_name): def test_cancel_stoploss_order_with_result(default_conf, mocker, exchange_name):
default_conf['dry_run'] = False default_conf['dry_run'] = False
mocker.patch('freqtrade.exchange.Exchange.fetch_stoploss_order', return_value={'for': 123}) mocker.patch('freqtrade.exchange.Exchange.fetch_stoploss_order', return_value={'for': 123})
mocker.patch('freqtrade.exchange.Ftx.fetch_stoploss_order', return_value={'for': 123})
mocker.patch('freqtrade.exchange.Gateio.fetch_stoploss_order', return_value={'for': 123}) mocker.patch('freqtrade.exchange.Gateio.fetch_stoploss_order', return_value={'for': 123})
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name) exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
res = {'fee': {}, 'status': 'canceled', 'amount': 1234} res = {'fee': {}, 'status': 'canceled', 'amount': 1234}
mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', return_value=res) mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', return_value=res)
mocker.patch('freqtrade.exchange.Ftx.cancel_stoploss_order', return_value=res)
mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', return_value=res) mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', return_value=res)
co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555) co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555)
assert co == res assert co == res
mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', return_value='canceled') mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', return_value='canceled')
mocker.patch('freqtrade.exchange.Ftx.cancel_stoploss_order', return_value='canceled')
mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', return_value='canceled') mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', return_value='canceled')
# Fall back to fetch_stoploss_order # Fall back to fetch_stoploss_order
co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555) co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555)
@ -3182,7 +3179,6 @@ def test_cancel_stoploss_order_with_result(default_conf, mocker, exchange_name):
exc = InvalidOrderException("") exc = InvalidOrderException("")
mocker.patch('freqtrade.exchange.Exchange.fetch_stoploss_order', side_effect=exc) mocker.patch('freqtrade.exchange.Exchange.fetch_stoploss_order', side_effect=exc)
mocker.patch('freqtrade.exchange.Ftx.fetch_stoploss_order', side_effect=exc)
mocker.patch('freqtrade.exchange.Gateio.fetch_stoploss_order', side_effect=exc) mocker.patch('freqtrade.exchange.Gateio.fetch_stoploss_order', side_effect=exc)
co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555) co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555)
assert co['amount'] == 555 assert co['amount'] == 555
@ -3191,7 +3187,6 @@ def test_cancel_stoploss_order_with_result(default_conf, mocker, exchange_name):
with pytest.raises(InvalidOrderException): with pytest.raises(InvalidOrderException):
exc = InvalidOrderException("Did not find order") exc = InvalidOrderException("Did not find order")
mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', side_effect=exc) mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', side_effect=exc)
mocker.patch('freqtrade.exchange.Ftx.cancel_stoploss_order', side_effect=exc)
mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', side_effect=exc) mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', side_effect=exc)
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name) exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=123) exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=123)
@ -3253,9 +3248,6 @@ def test_fetch_order(default_conf, mocker, exchange_name, caplog):
@pytest.mark.usefixtures("init_persistence") @pytest.mark.usefixtures("init_persistence")
@pytest.mark.parametrize("exchange_name", EXCHANGES) @pytest.mark.parametrize("exchange_name", EXCHANGES)
def test_fetch_stoploss_order(default_conf, mocker, exchange_name): def test_fetch_stoploss_order(default_conf, mocker, exchange_name):
# Don't test FTX here - that needs a separate test
if exchange_name == 'ftx':
return
default_conf['dry_run'] = True default_conf['dry_run'] = True
order = MagicMock() order = MagicMock()
order.myid = 123 order.myid = 123
@ -3699,16 +3691,6 @@ def test_date_minus_candles():
# no darkpools # no darkpools
("BTC/EUR.d", 'BTC', 'EUR', "kraken", True, False, False, 'spot', ("BTC/EUR.d", 'BTC', 'EUR', "kraken", True, False, False, 'spot',
{"darkpool": True}, False), {"darkpool": True}, False),
("BTC/USD", 'BTC', 'USD', "ftx", True, False, False, 'spot', {}, True),
("USD/BTC", 'USD', 'BTC', "ftx", True, False, False, 'spot', {}, True),
# Can only trade spot markets
("BTC/USD", 'BTC', 'USD', "ftx", False, False, True, 'spot', {}, False),
("BTC/USD", 'BTC', 'USD', "ftx", False, False, True, 'futures', {}, True),
# Can only trade spot markets
("BTC-PERP", 'BTC', 'USD', "ftx", False, False, True, 'spot', {}, False),
("BTC-PERP", 'BTC', 'USD', "ftx", False, False, True, 'margin', {}, False),
("BTC-PERP", 'BTC', 'USD', "ftx", False, False, True, 'futures', {}, True),
("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'spot', {}, False), ("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'spot', {}, False),
("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'margin', {}, False), ("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'margin', {}, False),
("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'futures', {}, True), ("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'futures', {}, True),
@ -3841,7 +3823,7 @@ def test_calculate_backoff(retrycount, max_retries, expected):
assert calculate_backoff(retrycount, max_retries) == expected assert calculate_backoff(retrycount, max_retries) == expected
@pytest.mark.parametrize("exchange_name", ['binance', 'ftx']) @pytest.mark.parametrize("exchange_name", ['binance'])
def test__get_funding_fees_from_exchange(default_conf, mocker, exchange_name): def test__get_funding_fees_from_exchange(default_conf, mocker, exchange_name):
api_mock = MagicMock() api_mock = MagicMock()
api_mock.fetch_funding_history = MagicMock(return_value=[ api_mock.fetch_funding_history = MagicMock(return_value=[
@ -3909,7 +3891,7 @@ def test__get_funding_fees_from_exchange(default_conf, mocker, exchange_name):
) )
@pytest.mark.parametrize('exchange', ['binance', 'kraken', 'ftx']) @pytest.mark.parametrize('exchange', ['binance', 'kraken'])
@pytest.mark.parametrize('stake_amount,leverage,min_stake_with_lev', [ @pytest.mark.parametrize('stake_amount,leverage,min_stake_with_lev', [
(9.0, 3.0, 3.0), (9.0, 3.0, 3.0),
(20.0, 5.0, 4.0), (20.0, 5.0, 4.0),
@ -3930,8 +3912,6 @@ def test_get_stake_amount_considering_leverage(
@pytest.mark.parametrize("exchange_name,trading_mode", [ @pytest.mark.parametrize("exchange_name,trading_mode", [
("binance", TradingMode.FUTURES), ("binance", TradingMode.FUTURES),
("ftx", TradingMode.MARGIN),
("ftx", TradingMode.FUTURES)
]) ])
def test__set_leverage(mocker, default_conf, exchange_name, trading_mode): def test__set_leverage(mocker, default_conf, exchange_name, trading_mode):
@ -3982,9 +3962,6 @@ def test_set_margin_mode(mocker, default_conf, margin_mode):
("kraken", TradingMode.SPOT, None, False), ("kraken", TradingMode.SPOT, None, False),
("kraken", TradingMode.MARGIN, MarginMode.ISOLATED, True), ("kraken", TradingMode.MARGIN, MarginMode.ISOLATED, True),
("kraken", TradingMode.FUTURES, MarginMode.ISOLATED, True), ("kraken", TradingMode.FUTURES, MarginMode.ISOLATED, True),
("ftx", TradingMode.SPOT, None, False),
("ftx", TradingMode.MARGIN, MarginMode.ISOLATED, True),
("ftx", TradingMode.FUTURES, MarginMode.ISOLATED, True),
("bittrex", TradingMode.SPOT, None, False), ("bittrex", TradingMode.SPOT, None, False),
("bittrex", TradingMode.MARGIN, MarginMode.CROSS, True), ("bittrex", TradingMode.MARGIN, MarginMode.CROSS, True),
("bittrex", TradingMode.MARGIN, MarginMode.ISOLATED, True), ("bittrex", TradingMode.MARGIN, MarginMode.ISOLATED, True),
@ -4005,8 +3982,6 @@ def test_set_margin_mode(mocker, default_conf, margin_mode):
("binance", TradingMode.FUTURES, MarginMode.CROSS, True), ("binance", TradingMode.FUTURES, MarginMode.CROSS, True),
("kraken", TradingMode.MARGIN, MarginMode.CROSS, True), ("kraken", TradingMode.MARGIN, MarginMode.CROSS, True),
("kraken", TradingMode.FUTURES, MarginMode.CROSS, True), ("kraken", TradingMode.FUTURES, MarginMode.CROSS, True),
("ftx", TradingMode.MARGIN, MarginMode.CROSS, True),
("ftx", TradingMode.FUTURES, MarginMode.CROSS, True),
("gateio", TradingMode.MARGIN, MarginMode.CROSS, True), ("gateio", TradingMode.MARGIN, MarginMode.CROSS, True),
("gateio", TradingMode.FUTURES, MarginMode.CROSS, True), ("gateio", TradingMode.FUTURES, MarginMode.CROSS, True),
@ -4015,8 +3990,6 @@ def test_set_margin_mode(mocker, default_conf, margin_mode):
# ("binance", TradingMode.FUTURES, MarginMode.CROSS, False), # ("binance", TradingMode.FUTURES, MarginMode.CROSS, False),
# ("kraken", TradingMode.MARGIN, MarginMode.CROSS, False), # ("kraken", TradingMode.MARGIN, MarginMode.CROSS, False),
# ("kraken", TradingMode.FUTURES, MarginMode.CROSS, False), # ("kraken", TradingMode.FUTURES, MarginMode.CROSS, False),
# ("ftx", TradingMode.MARGIN, MarginMode.CROSS, False),
# ("ftx", TradingMode.FUTURES, MarginMode.CROSS, False),
# ("gateio", TradingMode.MARGIN, MarginMode.CROSS, False), # ("gateio", TradingMode.MARGIN, MarginMode.CROSS, False),
# ("gateio", TradingMode.FUTURES, MarginMode.CROSS, False), # ("gateio", TradingMode.FUTURES, MarginMode.CROSS, False),
]) ])
@ -4046,7 +4019,6 @@ def test_validate_trading_mode_and_margin_mode(
("bibox", "futures", {"has": {"fetchCurrencies": False}, "options": {"defaultType": "swap"}}), ("bibox", "futures", {"has": {"fetchCurrencies": False}, "options": {"defaultType": "swap"}}),
("bybit", "spot", {"options": {"defaultType": "spot"}}), ("bybit", "spot", {"options": {"defaultType": "spot"}}),
("bybit", "futures", {"options": {"defaultType": "linear"}}), ("bybit", "futures", {"options": {"defaultType": "linear"}}),
("ftx", "futures", {"options": {"defaultType": "swap"}}),
("gateio", "futures", {"options": {"defaultType": "swap"}}), ("gateio", "futures", {"options": {"defaultType": "swap"}}),
("hitbtc", "futures", {"options": {"defaultType": "swap"}}), ("hitbtc", "futures", {"options": {"defaultType": "swap"}}),
("kraken", "futures", {"options": {"defaultType": "swap"}}), ("kraken", "futures", {"options": {"defaultType": "swap"}}),
@ -4223,11 +4195,6 @@ def test_combine_funding_and_mark(
# ('kraken', "2021-09-01 00:00:00", "2021-09-01 07:59:59", 30.0, -0.0012443999999999999), # ('kraken', "2021-09-01 00:00:00", "2021-09-01 07:59:59", 30.0, -0.0012443999999999999),
# ('kraken', "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, 0.0045759), # ('kraken', "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, 0.0045759),
# ('kraken', "2021-09-01 00:00:01", "2021-09-01 08:00:00", 30.0, -0.0008289), # ('kraken', "2021-09-01 00:00:01", "2021-09-01 08:00:00", 30.0, -0.0008289),
('ftx', 0, 2, "2021-09-01 00:10:00", "2021-09-01 00:30:00", 30.0, 0.0),
('ftx', 0, 9, "2021-09-01 00:00:00", "2021-09-01 08:00:00", 30.0, 0.0010008),
('ftx', 0, 13, "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, 0.0146691),
('ftx', 0, 9, "2021-09-01 00:00:00", "2021-09-01 08:00:00", 50.0, 0.001668),
('ftx', 1, 9, "2021-09-01 00:00:01", "2021-09-01 08:00:00", 30.0, 0.0019932),
('gateio', 0, 2, "2021-09-01 00:10:00", "2021-09-01 04:00:00", 30.0, 0.0), ('gateio', 0, 2, "2021-09-01 00:10:00", "2021-09-01 04:00:00", 30.0, 0.0),
('gateio', 0, 2, "2021-09-01 00:00:00", "2021-09-01 08:00:00", 30.0, -0.0009140999), ('gateio', 0, 2, "2021-09-01 00:00:00", "2021-09-01 08:00:00", 30.0, -0.0009140999),
('gateio', 0, 2, "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, -0.0009140999), ('gateio', 0, 2, "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, -0.0009140999),
@ -4289,7 +4256,6 @@ def test__fetch_and_calculate_funding_fees(
d2 = datetime.strptime(f"{d2} +0000", '%Y-%m-%d %H:%M:%S %z') d2 = datetime.strptime(f"{d2} +0000", '%Y-%m-%d %H:%M:%S %z')
funding_rate_history = { funding_rate_history = {
'binance': funding_rate_history_octohourly, 'binance': funding_rate_history_octohourly,
'ftx': funding_rate_history_hourly,
'gateio': funding_rate_history_octohourly, 'gateio': funding_rate_history_octohourly,
}[exchange][rate_start:rate_end] }[exchange][rate_start:rate_end]
api_mock = MagicMock() api_mock = MagicMock()
@ -5056,7 +5022,7 @@ def test_get_max_leverage_futures(default_conf, mocker, leverage_tiers):
exchange.get_max_leverage("BTC/USDT", 1000000000.01) exchange.get_max_leverage("BTC/USDT", 1000000000.01)
@pytest.mark.parametrize("exchange_name", ['bittrex', 'binance', 'kraken', 'ftx', 'gateio', 'okx']) @pytest.mark.parametrize("exchange_name", ['bittrex', 'binance', 'kraken', 'gateio', 'okx'])
def test__get_params(mocker, default_conf, exchange_name): def test__get_params(mocker, default_conf, exchange_name):
api_mock = MagicMock() api_mock = MagicMock()
mocker.patch('freqtrade.exchange.Exchange.exchange_has', return_value=True) mocker.patch('freqtrade.exchange.Exchange.exchange_has', return_value=True)

View File

@ -1,272 +0,0 @@
from random import randint
from unittest.mock import MagicMock
import ccxt
import pytest
from freqtrade.exceptions import DependencyException, InvalidOrderException
from freqtrade.exchange.common import API_FETCH_ORDER_RETRY_COUNT
from tests.conftest import get_patched_exchange
from .test_exchange import ccxt_exceptionhandlers
STOPLOSS_ORDERTYPE = 'stop'
@pytest.mark.parametrize('order_price,exchangelimitratio,side', [
(217.8, 1.05, "sell"),
(222.2, 0.95, "buy"),
])
def test_stoploss_order_ftx(default_conf, mocker, order_price, exchangelimitratio, side):
api_mock = MagicMock()
order_id = 'test_prod_buy_{}'.format(randint(0, 10 ** 6))
api_mock.create_order = MagicMock(return_value={
'id': order_id,
'info': {
'foo': 'bar'
}
})
default_conf['dry_run'] = False
mocker.patch('freqtrade.exchange.Exchange.amount_to_precision', lambda s, x, y: y)
mocker.patch('freqtrade.exchange.Exchange.price_to_precision', lambda s, x, y: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'ftx')
# stoploss_on_exchange_limit_ratio is irrelevant for ftx market orders
order = exchange.stoploss(
pair='ETH/BTC',
amount=1,
stop_price=190,
side=side,
order_types={'stoploss_on_exchange_limit_ratio': exchangelimitratio},
leverage=1.0
)
assert api_mock.create_order.call_args_list[0][1]['symbol'] == 'ETH/BTC'
assert api_mock.create_order.call_args_list[0][1]['type'] == STOPLOSS_ORDERTYPE
assert api_mock.create_order.call_args_list[0][1]['side'] == side
assert api_mock.create_order.call_args_list[0][1]['amount'] == 1
assert 'orderPrice' not in api_mock.create_order.call_args_list[0][1]['params']
assert 'stopPrice' in api_mock.create_order.call_args_list[0][1]['params']
assert api_mock.create_order.call_args_list[0][1]['params']['stopPrice'] == 190
assert api_mock.create_order.call_count == 1
api_mock.create_order.reset_mock()
order = exchange.stoploss(
pair='ETH/BTC',
amount=1,
stop_price=220,
order_types={},
side=side,
leverage=1.0
)
assert 'id' in order
assert 'info' in order
assert order['id'] == order_id
assert api_mock.create_order.call_args_list[0][1]['symbol'] == 'ETH/BTC'
assert api_mock.create_order.call_args_list[0][1]['type'] == STOPLOSS_ORDERTYPE
assert api_mock.create_order.call_args_list[0][1]['side'] == side
assert api_mock.create_order.call_args_list[0][1]['amount'] == 1
assert 'orderPrice' not in api_mock.create_order.call_args_list[0][1]['params']
assert api_mock.create_order.call_args_list[0][1]['params']['stopPrice'] == 220
api_mock.create_order.reset_mock()
order = exchange.stoploss(
pair='ETH/BTC',
amount=1,
stop_price=220,
order_types={'stoploss': 'limit'}, side=side,
leverage=1.0
)
assert 'id' in order
assert 'info' in order
assert order['id'] == order_id
assert api_mock.create_order.call_args_list[0][1]['symbol'] == 'ETH/BTC'
assert api_mock.create_order.call_args_list[0][1]['type'] == STOPLOSS_ORDERTYPE
assert api_mock.create_order.call_args_list[0][1]['side'] == side
assert api_mock.create_order.call_args_list[0][1]['amount'] == 1
assert 'orderPrice' in api_mock.create_order.call_args_list[0][1]['params']
assert api_mock.create_order.call_args_list[0][1]['params']['orderPrice'] == order_price
assert api_mock.create_order.call_args_list[0][1]['params']['stopPrice'] == 220
# test exception handling
with pytest.raises(DependencyException):
api_mock.create_order = MagicMock(side_effect=ccxt.InsufficientFunds("0 balance"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'ftx')
exchange.stoploss(
pair='ETH/BTC',
amount=1,
stop_price=220,
order_types={},
side=side,
leverage=1.0
)
with pytest.raises(InvalidOrderException):
api_mock.create_order = MagicMock(
side_effect=ccxt.InvalidOrder("ftx Order would trigger immediately."))
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'ftx')
exchange.stoploss(
pair='ETH/BTC',
amount=1,
stop_price=220,
order_types={},
side=side,
leverage=1.0
)
ccxt_exceptionhandlers(mocker, default_conf, api_mock, "ftx",
"stoploss", "create_order", retries=1,
pair='ETH/BTC', amount=1, stop_price=220, order_types={},
side=side, leverage=1.0)
@pytest.mark.parametrize('side', [("sell"), ("buy")])
def test_stoploss_order_dry_run_ftx(default_conf, mocker, side):
api_mock = MagicMock()
default_conf['dry_run'] = True
mocker.patch('freqtrade.exchange.Exchange.amount_to_precision', lambda s, x, y: y)
mocker.patch('freqtrade.exchange.Exchange.price_to_precision', lambda s, x, y: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'ftx')
api_mock.create_order.reset_mock()
order = exchange.stoploss(
pair='ETH/BTC',
amount=1,
stop_price=220,
order_types={},
side=side,
leverage=1.0
)
assert 'id' in order
assert 'info' in order
assert 'type' in order
assert order['type'] == STOPLOSS_ORDERTYPE
assert order['price'] == 220
assert order['amount'] == 1
@pytest.mark.parametrize('sl1,sl2,sl3,side', [
(1501, 1499, 1501, "sell"),
(1499, 1501, 1499, "buy")
])
def test_stoploss_adjust_ftx(mocker, default_conf, sl1, sl2, sl3, side):
exchange = get_patched_exchange(mocker, default_conf, id='ftx')
order = {
'type': STOPLOSS_ORDERTYPE,
'price': 1500,
}
assert exchange.stoploss_adjust(sl1, order, side=side)
assert not exchange.stoploss_adjust(sl2, order, side=side)
# Test with invalid order case ...
order['type'] = 'stop_loss_limit'
assert not exchange.stoploss_adjust(sl3, order, side=side)
@pytest.mark.usefixtures("init_persistence")
def test_fetch_stoploss_order_ftx(default_conf, mocker, limit_sell_order, limit_buy_order):
default_conf['dry_run'] = True
order = MagicMock()
order.myid = 123
exchange = get_patched_exchange(mocker, default_conf, id='ftx')
exchange._dry_run_open_orders['X'] = order
assert exchange.fetch_stoploss_order('X', 'TKN/BTC').myid == 123
with pytest.raises(InvalidOrderException, match=r'Tried to get an invalid dry-run-order.*'):
exchange.fetch_stoploss_order('Y', 'TKN/BTC')
default_conf['dry_run'] = False
api_mock = MagicMock()
api_mock.fetch_orders = MagicMock(return_value=[{'id': 'X', 'status': '456'}])
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='ftx')
assert exchange.fetch_stoploss_order('X', 'TKN/BTC')['status'] == '456'
api_mock.fetch_orders = MagicMock(return_value=[{'id': 'Y', 'status': '456'}])
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='ftx')
with pytest.raises(InvalidOrderException, match=r"Could not get stoploss order for id X"):
exchange.fetch_stoploss_order('X', 'TKN/BTC')['status']
# stoploss Limit order
api_mock.fetch_orders = MagicMock(return_value=[
{'id': 'X', 'status': 'closed',
'info': {
'orderId': 'mocked_limit_sell',
}}])
api_mock.fetch_order = MagicMock(return_value=limit_sell_order.copy())
# No orderId field - no call to fetch_order
resp = exchange.fetch_stoploss_order('X', 'TKN/BTC')
assert resp
assert api_mock.fetch_order.call_count == 1
assert resp['id_stop'] == 'mocked_limit_sell'
assert resp['id'] == 'X'
assert resp['type'] == 'stop'
assert resp['status_stop'] == 'triggered'
# Stoploss market order
# Contains no new Order, but "average" instead
order = {'id': 'X', 'status': 'closed', 'info': {'orderId': None}, 'average': 0.254}
api_mock.fetch_orders = MagicMock(return_value=[order])
api_mock.fetch_order.reset_mock()
api_mock.privateGetConditionalOrdersConditionalOrderIdTriggers = MagicMock(
return_value={'result': [
{'orderId': 'mocked_market_sell', 'type': 'market', 'side': 'sell', 'price': 0.254}
]})
resp = exchange.fetch_stoploss_order('X', 'TKN/BTC')
assert resp
# fetch_order not called (no regular order ID)
assert api_mock.fetch_order.call_count == 1
api_mock.privateGetConditionalOrdersConditionalOrderIdTriggers.call_count == 1
expected_resp = limit_sell_order.copy()
expected_resp.update({
'id_stop': 'X',
'id': 'X',
'type': 'stop',
'status_stop': 'triggered',
})
assert expected_resp == resp
with pytest.raises(InvalidOrderException):
api_mock.fetch_orders = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='ftx')
exchange.fetch_stoploss_order(order_id='_', pair='TKN/BTC')
assert api_mock.fetch_orders.call_count == 1
ccxt_exceptionhandlers(mocker, default_conf, api_mock, 'ftx',
'fetch_stoploss_order', 'fetch_orders',
retries=API_FETCH_ORDER_RETRY_COUNT + 1,
order_id='_', pair='TKN/BTC')
def test_get_order_id(mocker, default_conf):
exchange = get_patched_exchange(mocker, default_conf, id='ftx')
order = {
'type': STOPLOSS_ORDERTYPE,
'price': 1500,
'id': '1111',
'id_stop': '1234',
'info': {
}
}
assert exchange.get_order_id_conditional(order) == '1234'
order = {
'type': 'limit',
'price': 1500,
'id': '1111',
'id_stop': '1234',
'info': {
}
}
assert exchange.get_order_id_conditional(order) == '1111'

View File

@ -27,16 +27,16 @@ def is_mac() -> bool:
return "Darwin" in machine return "Darwin" in machine
@pytest.mark.parametrize('model, pca, dbscan', [ @pytest.mark.parametrize('model, pca, dbscan, float32', [
('LightGBMRegressor', True, False), ('LightGBMRegressor', True, False, True),
('XGBoostRegressor', False, True), ('XGBoostRegressor', False, True, False),
('XGBoostRFRegressor', False, False), ('XGBoostRFRegressor', False, False, False),
('CatboostRegressor', False, False), ('CatboostRegressor', False, False, False),
('ReinforcementLearner', False, True), ('ReinforcementLearner', False, True, False),
('ReinforcementLearner_multiproc', False, False), ('ReinforcementLearner_multiproc', False, False, False),
('ReinforcementLearner_test_4ac', False, False) ('ReinforcementLearner_test_4ac', False, False, False)
]) ])
def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model, pca, dbscan): def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model, pca, dbscan, float32):
if is_arm() and model == 'CatboostRegressor': if is_arm() and model == 'CatboostRegressor':
pytest.skip("CatBoost is not supported on ARM") pytest.skip("CatBoost is not supported on ARM")
@ -49,6 +49,17 @@ def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model, pca,
freqai_conf.update({"strategy": "freqai_test_strat"}) freqai_conf.update({"strategy": "freqai_test_strat"})
freqai_conf['freqai']['feature_parameters'].update({"principal_component_analysis": pca}) freqai_conf['freqai']['feature_parameters'].update({"principal_component_analysis": pca})
freqai_conf['freqai']['feature_parameters'].update({"use_DBSCAN_to_remove_outliers": dbscan}) freqai_conf['freqai']['feature_parameters'].update({"use_DBSCAN_to_remove_outliers": dbscan})
freqai_conf.update({"reduce_df_footprint": float32})
if 'ReinforcementLearner' in model:
model_save_ext = 'zip'
freqai_conf = make_rl_config(freqai_conf)
# test the RL guardrails
freqai_conf['freqai']['feature_parameters'].update({"use_SVM_to_remove_outliers": True})
freqai_conf['freqai']['data_split_parameters'].update({'shuffle': True})
if 'test_4ac' in model:
freqai_conf["freqaimodel_path"] = str(Path(__file__).parents[1] / "freqai" / "test_models")
if 'ReinforcementLearner' in model: if 'ReinforcementLearner' in model:
model_save_ext = 'zip' model_save_ext = 'zip'

View File

@ -19,11 +19,6 @@ twentyfive_hours = FtPrecise(25.0)
('kraken', 0.00025, ten_mins, 0.03), ('kraken', 0.00025, ten_mins, 0.03),
('kraken', 0.00025, five_hours, 0.045), ('kraken', 0.00025, five_hours, 0.045),
('kraken', 0.00025, twentyfive_hours, 0.12), ('kraken', 0.00025, twentyfive_hours, 0.12),
# FTX
('ftx', 0.0005, ten_mins, 0.00125),
('ftx', 0.00025, ten_mins, 0.000625),
('ftx', 0.00025, five_hours, 0.003125),
('ftx', 0.00025, twentyfive_hours, 0.015625),
]) ])
def test_interest(exchange, interest_rate, hours, expected): def test_interest(exchange, interest_rate, hours, expected):
borrowed = FtPrecise(60.0) borrowed = FtPrecise(60.0)

View File

@ -612,9 +612,9 @@ def test_VolumePairList_whitelist_gen(mocker, whitelist_conf, shitcoinmarkets, t
"lookback_timeframe": "1h", "lookback_period": 2, "refresh_period": 3600}], "lookback_timeframe": "1h", "lookback_period": 2, "refresh_period": 3600}],
"BTC", "binance", ['ETH/BTC', 'LTC/BTC', 'NEO/BTC', 'TKN/BTC', 'XRP/BTC']), "BTC", "binance", ['ETH/BTC', 'LTC/BTC', 'NEO/BTC', 'TKN/BTC', 'XRP/BTC']),
# ftx data is already in Quote currency, therefore won't require conversion # ftx data is already in Quote currency, therefore won't require conversion
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume", # ([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume",
"lookback_timeframe": "1d", "lookback_period": 1, "refresh_period": 86400}], # "lookback_timeframe": "1d", "lookback_period": 1, "refresh_period": 86400}],
"BTC", "ftx", ['HOT/BTC', 'LTC/BTC', 'ETH/BTC', 'TKN/BTC', 'XRP/BTC']), # "BTC", "ftx", ['HOT/BTC', 'LTC/BTC', 'ETH/BTC', 'TKN/BTC', 'XRP/BTC']),
]) ])
def test_VolumePairList_range(mocker, whitelist_conf, shitcoinmarkets, tickers, ohlcv_history, def test_VolumePairList_range(mocker, whitelist_conf, shitcoinmarkets, tickers, ohlcv_history,
pairlists, base_currency, exchange, volumefilter_result) -> None: pairlists, base_currency, exchange, volumefilter_result) -> None:
@ -636,8 +636,6 @@ def test_VolumePairList_range(mocker, whitelist_conf, shitcoinmarkets, tickers,
ohlcv_history_high_volume['high'] = ohlcv_history_high_volume.loc[:, 'high'] * 0.01 ohlcv_history_high_volume['high'] = ohlcv_history_high_volume.loc[:, 'high'] * 0.01
ohlcv_history_high_volume['close'] = ohlcv_history_high_volume.loc[:, 'close'] * 0.01 ohlcv_history_high_volume['close'] = ohlcv_history_high_volume.loc[:, 'close'] * 0.01
mocker.patch('freqtrade.exchange.ftx.Ftx.market_is_tradable', return_value=True)
ohlcv_data = { ohlcv_data = {
('ETH/BTC', '1d', CandleType.SPOT): ohlcv_history, ('ETH/BTC', '1d', CandleType.SPOT): ohlcv_history,
('TKN/BTC', '1d', CandleType.SPOT): ohlcv_history, ('TKN/BTC', '1d', CandleType.SPOT): ohlcv_history,

View File

@ -1,6 +1,7 @@
# pragma pylint: disable=missing-docstring, C0103 # pragma pylint: disable=missing-docstring, C0103
# pragma pylint: disable=invalid-sequence-index, invalid-name, too-many-arguments # pragma pylint: disable=invalid-sequence-index, invalid-name, too-many-arguments
from copy import deepcopy
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from unittest.mock import ANY, MagicMock, PropertyMock from unittest.mock import ANY, MagicMock, PropertyMock
@ -28,27 +29,7 @@ def prec_satoshi(a, b) -> float:
# Unit tests # Unit tests
def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None: def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
mocker.patch('freqtrade.rpc.telegram.Telegram', MagicMock()) gen_response = {
mocker.patch.multiple(
'freqtrade.exchange.Exchange',
fetch_ticker=ticker,
get_fee=fee,
)
freqtradebot = get_patched_freqtradebot(mocker, default_conf)
patch_get_signal(freqtradebot)
rpc = RPC(freqtradebot)
freqtradebot.state = State.RUNNING
with pytest.raises(RPCException, match=r'.*no active trade*'):
rpc._rpc_trade_status()
freqtradebot.enter_positions()
trades = Trade.get_open_trades()
freqtradebot.exit_positions(trades)
results = rpc._rpc_trade_status()
assert results[0] == {
'trade_id': 1, 'trade_id': 1,
'pair': 'ETH/BTC', 'pair': 'ETH/BTC',
'base_currency': 'ETH', 'base_currency': 'ETH',
@ -127,91 +108,103 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
'remaining': ANY, 'status': ANY, 'ft_is_entry': True, 'remaining': ANY, 'status': ANY, 'ft_is_entry': True,
}], }],
} }
mocker.patch('freqtrade.rpc.telegram.Telegram', MagicMock())
mocker.patch.multiple(
'freqtrade.exchange.Exchange',
fetch_ticker=ticker,
get_fee=fee,
_is_dry_limit_order_filled=MagicMock(side_effect=[False, True]),
)
freqtradebot = get_patched_freqtradebot(mocker, default_conf)
patch_get_signal(freqtradebot)
rpc = RPC(freqtradebot)
freqtradebot.state = State.RUNNING
with pytest.raises(RPCException, match=r'.*no active trade*'):
rpc._rpc_trade_status()
freqtradebot.enter_positions()
# Open order...
results = rpc._rpc_trade_status()
response_unfilled = deepcopy(gen_response)
# Different from "filled" response:
response_unfilled.update({
'amount': 91.07468124,
'profit_ratio': 0.0,
'profit_pct': 0.0,
'profit_abs': 0.0,
'current_profit': 0.0,
'current_profit_pct': 0.0,
'current_profit_abs': 0.0,
'stop_loss_abs': 0.0,
'stop_loss_pct': None,
'stop_loss_ratio': None,
'stoploss_current_dist': -1.099e-05,
'stoploss_current_dist_ratio': -1.0,
'stoploss_current_dist_pct': pytest.approx(-100.0),
'stoploss_entry_dist': -0.0010025,
'stoploss_entry_dist_ratio': -1.0,
'initial_stop_loss_abs': 0.0,
'initial_stop_loss_pct': None,
'initial_stop_loss_ratio': None,
'open_order': '(limit buy rem=91.07468123)',
})
response_unfilled['orders'][0].update({
'is_open': True,
'filled': 0.0,
'remaining': 91.07468123
})
assert results[0] == response_unfilled
# Open order without remaining
trade = Trade.get_open_trades()[0]
# kucoin case (no remaining set).
trade.orders[0].remaining = None
Trade.commit()
results = rpc._rpc_trade_status()
# Reuse above object, only remaining changed.
response_unfilled['orders'][0].update({
'remaining': None
})
assert results[0] == response_unfilled
trade = Trade.get_open_trades()[0]
trade.orders[0].remaining = trade.amount
Trade.commit()
# Fill open order ...
freqtradebot.manage_open_orders()
trades = Trade.get_open_trades()
freqtradebot.exit_positions(trades)
results = rpc._rpc_trade_status()
response = deepcopy(gen_response)
assert results[0] == response
mocker.patch('freqtrade.exchange.Exchange.get_rate', mocker.patch('freqtrade.exchange.Exchange.get_rate',
MagicMock(side_effect=ExchangeError("Pair 'ETH/BTC' not available"))) MagicMock(side_effect=ExchangeError("Pair 'ETH/BTC' not available")))
results = rpc._rpc_trade_status() results = rpc._rpc_trade_status()
assert isnan(results[0]['current_profit']) assert isnan(results[0]['current_profit'])
assert isnan(results[0]['current_rate']) assert isnan(results[0]['current_rate'])
assert results[0] == { response_norate = deepcopy(gen_response)
'trade_id': 1, # Update elements that are NaN when no rate is available.
'pair': 'ETH/BTC', response_norate.update({
'base_currency': 'ETH',
'quote_currency': 'BTC',
'open_date': ANY,
'open_timestamp': ANY,
'is_open': ANY,
'fee_open': ANY,
'fee_open_cost': ANY,
'fee_open_currency': ANY,
'fee_close': fee.return_value,
'fee_close_cost': ANY,
'fee_close_currency': ANY,
'open_rate_requested': ANY,
'open_trade_value': ANY,
'close_rate_requested': ANY,
'sell_reason': ANY,
'exit_reason': ANY,
'exit_order_status': ANY,
'min_rate': ANY,
'max_rate': ANY,
'strategy': ANY,
'buy_tag': ANY,
'enter_tag': ANY,
'timeframe': ANY,
'open_order_id': ANY,
'close_date': None,
'close_timestamp': None,
'open_rate': 1.098e-05,
'close_rate': None,
'current_rate': ANY,
'amount': 91.07468123,
'amount_requested': 91.07468124,
'trade_duration': ANY,
'trade_duration_s': ANY,
'stake_amount': 0.001,
'close_profit': None,
'close_profit_pct': None,
'close_profit_abs': None,
'current_profit': ANY,
'current_profit_pct': ANY,
'current_profit_abs': ANY,
'profit_ratio': ANY,
'profit_pct': ANY,
'profit_abs': ANY,
'profit_fiat': ANY,
'stop_loss_abs': 9.89e-06,
'stop_loss_pct': -10.0,
'stop_loss_ratio': -0.1,
'stoploss_order_id': None,
'stoploss_last_update': ANY,
'stoploss_last_update_timestamp': ANY,
'initial_stop_loss_abs': 9.89e-06,
'initial_stop_loss_pct': -10.0,
'initial_stop_loss_ratio': -0.1,
'stoploss_current_dist': ANY, 'stoploss_current_dist': ANY,
'stoploss_current_dist_ratio': ANY, 'stoploss_current_dist_ratio': ANY,
'stoploss_current_dist_pct': ANY, 'stoploss_current_dist_pct': ANY,
'stoploss_entry_dist': -0.00010402, 'profit_ratio': ANY,
'stoploss_entry_dist_ratio': -0.10376381, 'profit_pct': ANY,
'open_order': None, 'profit_abs': ANY,
'exchange': 'binance', 'current_profit_abs': ANY,
'realized_profit': 0.0, 'current_profit': ANY,
'leverage': 1.0, 'current_profit_pct': ANY,
'interest_rate': 0.0, 'current_rate': ANY,
'liquidation_price': None, })
'is_short': False, assert results[0] == response_norate
'funding_fees': 0.0,
'trading_mode': TradingMode.SPOT,
'orders': [{
'amount': 91.07468123, 'average': 1.098e-05, 'safe_price': 1.098e-05,
'cost': 0.0009999999999054, 'filled': 91.07468123, 'ft_order_side': 'buy',
'order_date': ANY, 'order_timestamp': ANY, 'order_filled_date': ANY,
'order_filled_timestamp': ANY, 'order_type': 'limit', 'price': 1.098e-05,
'is_open': False, 'pair': 'ETH/BTC', 'order_id': ANY,
'remaining': ANY, 'status': ANY, 'ft_is_entry': True,
}],
}
def test_rpc_status_table(default_conf, ticker, fee, mocker) -> None: def test_rpc_status_table(default_conf, ticker, fee, mocker) -> None:

View File

@ -67,7 +67,7 @@ def botclient(default_conf, mocker):
def client_post(client, url, data={}): def client_post(client, url, data={}):
return client.post(url, return client.post(url,
data=data, content=data,
headers={'Authorization': _basic_auth_str(_TEST_USER, _TEST_PASS), headers={'Authorization': _basic_auth_str(_TEST_USER, _TEST_PASS),
'Origin': 'http://example.com', 'Origin': 'http://example.com',
'content-type': 'application/json' 'content-type': 'application/json'

View File

@ -3036,7 +3036,7 @@ def test_handle_cancel_enter(mocker, caplog, default_conf_usdt, limit_order, is_
@pytest.mark.parametrize("is_short", [False, True]) @pytest.mark.parametrize("is_short", [False, True])
@pytest.mark.parametrize("limit_buy_order_canceled_empty", ['binance', 'ftx', 'kraken', 'bittrex'], @pytest.mark.parametrize("limit_buy_order_canceled_empty", ['binance', 'kraken', 'bittrex'],
indirect=['limit_buy_order_canceled_empty']) indirect=['limit_buy_order_canceled_empty'])
def test_handle_cancel_enter_exchanges(mocker, caplog, default_conf_usdt, is_short, fee, def test_handle_cancel_enter_exchanges(mocker, caplog, default_conf_usdt, is_short, fee,
limit_buy_order_canceled_empty) -> None: limit_buy_order_canceled_empty) -> None:

View File

@ -1,4 +1,6 @@
# pragma pylint: disable=missing-docstring, C0103 # pragma pylint: disable=missing-docstring, C0103
from datetime import datetime, timezone
import arrow import arrow
import pytest import pytest
@ -8,16 +10,28 @@ from freqtrade.exceptions import OperationalException
def test_parse_timerange_incorrect(): def test_parse_timerange_incorrect():
assert TimeRange('date', None, 1274486400, 0) == TimeRange.parse_timerange('20100522-') timerange = TimeRange.parse_timerange('20100522-')
assert TimeRange(None, 'date', 0, 1274486400) == TimeRange.parse_timerange('-20100522') assert TimeRange('date', None, 1274486400, 0) == timerange
assert timerange.timerange_str == '20100522-'
timerange = TimeRange.parse_timerange('-20100522')
assert TimeRange(None, 'date', 0, 1274486400) == timerange
assert timerange.timerange_str == '-20100522'
timerange = TimeRange.parse_timerange('20100522-20150730') timerange = TimeRange.parse_timerange('20100522-20150730')
assert timerange == TimeRange('date', 'date', 1274486400, 1438214400) assert timerange == TimeRange('date', 'date', 1274486400, 1438214400)
assert timerange.timerange_str == '20100522-20150730'
assert timerange.start_fmt == '2010-05-22 00:00:00'
assert timerange.stop_fmt == '2015-07-30 00:00:00'
# Added test for unix timestamp - BTC genesis date # Added test for unix timestamp - BTC genesis date
assert TimeRange('date', None, 1231006505, 0) == TimeRange.parse_timerange('1231006505-') assert TimeRange('date', None, 1231006505, 0) == TimeRange.parse_timerange('1231006505-')
assert TimeRange(None, 'date', 0, 1233360000) == TimeRange.parse_timerange('-1233360000') assert TimeRange(None, 'date', 0, 1233360000) == TimeRange.parse_timerange('-1233360000')
timerange = TimeRange.parse_timerange('1231006505-1233360000') timerange = TimeRange.parse_timerange('1231006505-1233360000')
assert TimeRange('date', 'date', 1231006505, 1233360000) == timerange assert TimeRange('date', 'date', 1231006505, 1233360000) == timerange
assert isinstance(timerange.startdt, datetime)
assert isinstance(timerange.stopdt, datetime)
assert timerange.startdt == datetime.fromtimestamp(1231006505, tz=timezone.utc)
assert timerange.stopdt == datetime.fromtimestamp(1233360000, tz=timezone.utc)
assert timerange.timerange_str == '20090103-20090131'
timerange = TimeRange.parse_timerange('1231006505000-1233360000000') timerange = TimeRange.parse_timerange('1231006505000-1233360000000')
assert TimeRange('date', 'date', 1231006505, 1233360000) == timerange assert TimeRange('date', 'date', 1231006505, 1233360000) == timerange
@ -45,6 +59,7 @@ def test_subtract_start():
x = TimeRange(None, 'date', 0, 1438214400) x = TimeRange(None, 'date', 0, 1438214400)
x.subtract_start(300) x.subtract_start(300)
assert not x.startts assert not x.startts
assert not x.startdt
x = TimeRange('date', None, 1274486400, 0) x = TimeRange('date', None, 1274486400, 0)
x.subtract_start(300) x.subtract_start(300)