Merge branch 'freqtrade:develop' into develop

This commit is contained in:
lolong 2022-10-23 10:09:08 +02:00 committed by GitHub
commit 6794bf144d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 561 additions and 178 deletions

View File

@ -17,7 +17,7 @@ repos:
- types-filelock==3.2.7
- types-requests==2.28.11.2
- types-tabulate==0.9.0.0
- types-python-dateutil==2.8.19
- types-python-dateutil==2.8.19.1
# stages: [push]
- repo: https://github.com/pycqa/isort

Binary file not shown.

View File

@ -18,13 +18,8 @@
"name": "binance",
"key": "",
"secret": "",
"ccxt_config": {
"enableRateLimit": true
},
"ccxt_async_config": {
"enableRateLimit": true,
"rateLimit": 200
},
"ccxt_config": {},
"ccxt_async_config": {},
"pair_whitelist": [
"1INCH/USDT",
"ALGO/USDT"

View File

@ -11,7 +11,7 @@ ENV FT_APP_ENV="docker"
# Prepare environment
RUN mkdir /freqtrade \
&& apt-get update \
&& apt-get -y install sudo libatlas3-base curl sqlite3 libhdf5-dev \
&& apt-get -y install sudo libatlas3-base curl sqlite3 libhdf5-dev libutf8proc-dev libsnappy-dev \
&& apt-get clean \
&& useradd -u 1000 -G sudo -U -m ftuser \
&& chown ftuser:ftuser /freqtrade \
@ -37,6 +37,7 @@ ENV LD_LIBRARY_PATH /usr/local/lib
COPY --chown=ftuser:ftuser requirements.txt /freqtrade/
USER ftuser
RUN pip install --user --no-cache-dir numpy \
&& pip install --user /tmp/pyarrow-*.whl \
&& pip install --user --no-cache-dir -r requirements.txt
# Copy dependencies to runtime-image

View File

@ -192,11 +192,11 @@ dataframe["target_roi"] = dataframe["&-s_close_mean"] + dataframe["&-s_close_std
dataframe["sell_roi"] = dataframe["&-s_close_mean"] - dataframe["&-s_close_std"] * 1.25
```
To consider the population of *historical predictions* for creating the dynamic target instead of information from the training as discussed above, you would set `fit_live_prediction_candles` in the config to the number of historical prediction candles you wish to use to generate target statistics.
To consider the population of *historical predictions* for creating the dynamic target instead of information from the training as discussed above, you would set `fit_live_predictions_candles` in the config to the number of historical prediction candles you wish to use to generate target statistics.
```json
"freqai": {
"fit_live_prediction_candles": 300,
"fit_live_predictions_candles": 300,
}
```

View File

@ -1,5 +1,5 @@
markdown==3.3.7
mkdocs==1.4.0
mkdocs==1.4.1
mkdocs-material==8.5.6
mdx_truly_sane_lists==1.3
pymdown-extensions==9.6

View File

@ -655,13 +655,13 @@ This is where calling `self.dp.current_whitelist()` comes in handy.
# fetch live / historical candle (OHLCV) data for the first informative pair
inf_pair, inf_timeframe = self.informative_pairs()[0]
informative = self.dp.get_pair_dataframe(pair=inf_pair,
timeframe=inf_timeframe)
timeframe=inf_timeframe)
```
!!! Warning "Warning about backtesting"
Be careful when using dataprovider in backtesting. `historic_ohlcv()` (and `get_pair_dataframe()`
for the backtesting runmode) provides the full time-range in one go,
so please be aware of it and make sure to not "look into the future" to avoid surprises when running in dry/live mode.
In backtesting, `dp.get_pair_dataframe()` behavior differs depending on where it's called.
Within `populate_*()` methods, `dp.get_pair_dataframe()` returns the full timerange. Please make sure to not "look into the future" to avoid surprises when running in dry/live mode.
Within [callbacks](strategy-callbacks.md), you'll get the full timerange up to the current (simulated) candle.
### *get_analyzed_dataframe(pair, timeframe)*
@ -670,13 +670,13 @@ It can also be used in specific callbacks to get the signal that caused the acti
``` python
# fetch current dataframe
if self.dp.runmode.value in ('live', 'dry_run'):
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=metadata['pair'],
timeframe=self.timeframe)
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=metadata['pair'],
timeframe=self.timeframe)
```
!!! Note "No data available"
Returns an empty dataframe if the requested pair was not cached.
You can check for this with `if dataframe.empty:` and handle this case accordingly.
This should not happen when using whitelisted pairs.
### *orderbook(pair, maximum)*

View File

@ -169,6 +169,43 @@ Example: Search dedicated strategy path.
freqtrade list-strategies --strategy-path ~/.freqtrade/strategies/
```
## List freqAI models
Use the `list-freqaimodels` subcommand to see all freqAI models available.
This subcommand is useful for finding problems in your environment with loading freqAI models: modules with models that contain errors and failed to load are printed in red (LOAD FAILED), while models with duplicate names are printed in yellow (DUPLICATE NAME).
```
usage: freqtrade list-freqaimodels [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH]
[--freqaimodel-path PATH] [-1] [--no-color]
optional arguments:
-h, --help show this help message and exit
--freqaimodel-path PATH
Specify additional lookup path for freqaimodels.
-1, --one-column Print output in one column.
--no-color Disable colorization of hyperopt results. May be
useful if you are redirecting output to a file.
Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--logfile FILE Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more
details.
-V, --version show program's version number and exit
-c PATH, --config PATH
Specify configuration file (default:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
```
## List Exchanges
Use the `list-exchanges` subcommand to see the exchanges available for the bot.

View File

@ -16,6 +16,6 @@ if 'dev' in __version__:
from pathlib import Path
versionfile = Path('./freqtrade_commit')
if versionfile.is_file():
__version__ = f"docker-{versionfile.read_text()[:8]}"
__version__ = f"docker-{__version__}-{versionfile.read_text()[:8]}"
except Exception:
pass

View File

@ -15,9 +15,9 @@ from freqtrade.commands.db_commands import start_convert_db
from freqtrade.commands.deploy_commands import (start_create_userdir, start_install_ui,
start_new_strategy)
from freqtrade.commands.hyperopt_commands import start_hyperopt_list, start_hyperopt_show
from freqtrade.commands.list_commands import (start_list_exchanges, start_list_markets,
start_list_strategies, start_list_timeframes,
start_show_trades)
from freqtrade.commands.list_commands import (start_list_exchanges, start_list_freqAI_models,
start_list_markets, start_list_strategies,
start_list_timeframes, start_show_trades)
from freqtrade.commands.optimize_commands import (start_backtesting, start_backtesting_show,
start_edge, start_hyperopt)
from freqtrade.commands.pairlist_commands import start_test_pairlist

View File

@ -41,6 +41,8 @@ ARGS_EDGE = ARGS_COMMON_OPTIMIZE + ["stoploss_range"]
ARGS_LIST_STRATEGIES = ["strategy_path", "print_one_column", "print_colorized",
"recursive_strategy_search"]
ARGS_LIST_FREQAIMODELS = ["freqaimodel_path", "print_one_column", "print_colorized"]
ARGS_LIST_HYPEROPTS = ["hyperopt_path", "print_one_column", "print_colorized"]
ARGS_BACKTEST_SHOW = ["exportfilename", "backtest_show_pair_list"]
@ -106,8 +108,8 @@ ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason
"exit_reason_list", "indicator_list"]
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
"list-markets", "list-pairs", "list-strategies", "list-data",
"hyperopt-list", "hyperopt-show", "backtest-filter",
"list-markets", "list-pairs", "list-strategies", "list-freqaimodels",
"list-data", "hyperopt-list", "hyperopt-show", "backtest-filter",
"plot-dataframe", "plot-profit", "show-trades", "trades-to-ohlcv"]
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-strategy"]
@ -192,10 +194,11 @@ class Arguments:
start_create_userdir, start_download_data, start_edge,
start_hyperopt, start_hyperopt_list, start_hyperopt_show,
start_install_ui, start_list_data, start_list_exchanges,
start_list_markets, start_list_strategies,
start_list_timeframes, start_new_config, start_new_strategy,
start_plot_dataframe, start_plot_profit, start_show_trades,
start_test_pairlist, start_trading, start_webserver)
start_list_freqAI_models, start_list_markets,
start_list_strategies, start_list_timeframes,
start_new_config, start_new_strategy, start_plot_dataframe,
start_plot_profit, start_show_trades, start_test_pairlist,
start_trading, start_webserver)
subparsers = self.parser.add_subparsers(dest='command',
# Use custom message when no subhandler is added
@ -362,6 +365,15 @@ class Arguments:
list_strategies_cmd.set_defaults(func=start_list_strategies)
self._build_args(optionlist=ARGS_LIST_STRATEGIES, parser=list_strategies_cmd)
# Add list-freqAI Models subcommand
list_freqaimodels_cmd = subparsers.add_parser(
'list-freqaimodels',
help='Print available freqAI models.',
parents=[_common_parser],
)
list_freqaimodels_cmd.set_defaults(func=start_list_freqAI_models)
self._build_args(optionlist=ARGS_LIST_FREQAIMODELS, parser=list_freqaimodels_cmd)
# Add list-timeframes subcommand
list_timeframes_cmd = subparsers.add_parser(
'list-timeframes',

View File

@ -90,6 +90,21 @@ def start_list_strategies(args: Dict[str, Any]) -> None:
_print_objs_tabular(strategy_objs, config.get('print_colorized', False))
def start_list_freqAI_models(args: Dict[str, Any]) -> None:
"""
Print files with FreqAI models custom classes available in the directory
"""
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
from freqtrade.resolvers.freqaimodel_resolver import FreqaiModelResolver
model_objs = FreqaiModelResolver.search_all_objects(config, not args['print_one_column'])
# Sort alphabetically
model_objs = sorted(model_objs, key=lambda x: x['name'])
if args['print_one_column']:
print('\n'.join([s['name'] for s in model_objs]))
else:
_print_objs_tabular(model_objs, config.get('print_colorized', False))
def start_list_timeframes(args: Dict[str, Any]) -> None:
"""
Print timeframes available on Exchange

View File

@ -410,11 +410,13 @@ class Exchange:
else:
return DataFrame()
def get_contract_size(self, pair: str) -> float:
def get_contract_size(self, pair: str) -> Optional[float]:
if self.trading_mode == TradingMode.FUTURES:
market = self.markets[pair]
market = self.markets.get(pair, {})
contract_size: float = 1.0
if market['contractSize'] is not None:
if not market:
return None
if market.get('contractSize') is not None:
# ccxt has contractSize in markets as string
contract_size = float(market['contractSize'])
return contract_size
@ -1934,6 +1936,7 @@ class Exchange:
candle_limit = self.ohlcv_candle_limit(timeframe, self._config['candle_type_def'])
# Age out old candles
ohlcv_df = ohlcv_df.tail(candle_limit + self._startup_candle_count)
ohlcv_df = ohlcv_df.reset_index(drop=True)
self._klines[(pair, timeframe, c_type)] = ohlcv_df
else:
self._klines[(pair, timeframe, c_type)] = ohlcv_df

View File

@ -51,7 +51,7 @@ class BaseClassifierModel(IFreqaiModel):
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions_candles", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)

View File

@ -50,7 +50,7 @@ class BaseRegressionModel(IFreqaiModel):
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions_candles", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)

View File

@ -47,7 +47,7 @@ class BaseTensorFlowModel(IFreqaiModel):
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions_candles", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)

View File

@ -971,6 +971,9 @@ class FreqaiDataKitchen:
append_df[f"{label}_mean"] = self.data["labels_mean"][label]
append_df[f"{label}_std"] = self.data["labels_std"][label]
for extra_col in self.data["extra_returns_per_train"]:
append_df["{extra_col}"] = self.data["extra_returns_per_train"][extra_col]
append_df["do_predict"] = do_predict
if self.freqai_config["feature_parameters"].get("DI_threshold", 0) > 0:
append_df["DI_values"] = self.DI_values

View File

@ -1,4 +1,5 @@
import logging
import sys
from pathlib import Path
from typing import Any, Dict
@ -48,6 +49,7 @@ class CatboostClassifier(BaseClassifierModel):
init_model = self.get_init_model(dk.pair)
cbr.fit(X=train_data, eval_set=test_data, init_model=init_model)
cbr.fit(X=train_data, eval_set=test_data, init_model=init_model,
log_cout=sys.stdout, log_cerr=sys.stderr)
return cbr

View File

@ -1,4 +1,5 @@
import logging
import sys
from pathlib import Path
from typing import Any, Dict
@ -47,6 +48,7 @@ class CatboostRegressor(BaseRegressionModel):
**self.model_training_parameters,
)
model.fit(X=train_data, eval_set=test_data, init_model=init_model)
model.fit(X=train_data, eval_set=test_data, init_model=init_model,
log_cout=sys.stdout, log_cerr=sys.stderr)
return model

View File

@ -1,4 +1,5 @@
import logging
import sys
from pathlib import Path
from typing import Any, Dict
@ -58,8 +59,10 @@ class CatboostRegressorMultiTarget(BaseRegressionModel):
fit_params = []
for i in range(len(eval_sets)):
fit_params.append(
{'eval_set': eval_sets[i], 'init_model': init_models[i]})
fit_params.append({
'eval_set': eval_sets[i], 'init_model': init_models[i],
'log_cout': sys.stdout, 'log_cerr': sys.stderr,
})
model = FreqaiMultiOutputRegressor(estimator=cbr)
thread_training = self.freqai_info.get('multitarget_parallel_training', False)

View File

@ -1471,12 +1471,13 @@ class FreqtradeBot(LoggingMixin):
)
return cancelled
def _safe_exit_amount(self, pair: str, amount: float) -> float:
def _safe_exit_amount(self, trade: Trade, pair: str, amount: float) -> float:
"""
Get sellable amount.
Should be trade.amount - but will fall back to the available amount if necessary.
This should cover cases where get_real_amount() was not able to update the amount
for whatever reason.
:param trade: Trade we're working with
:param pair: Pair we're trying to sell
:param amount: amount we expect to be available
:return: amount to sell
@ -1495,6 +1496,7 @@ class FreqtradeBot(LoggingMixin):
return amount
elif wallet_amount > amount * 0.98:
logger.info(f"{pair} - Falling back to wallet-amount {wallet_amount} -> {amount}.")
trade.amount = wallet_amount
return wallet_amount
else:
raise DependencyException(
@ -1553,7 +1555,7 @@ class FreqtradeBot(LoggingMixin):
# Emergency sells (default to market!)
order_type = self.strategy.order_types.get("emergency_exit", "market")
amount = self._safe_exit_amount(trade.pair, sub_trade_amt or trade.amount)
amount = self._safe_exit_amount(trade, trade.pair, sub_trade_amt or trade.amount)
time_in_force = self.strategy.order_time_in_force['exit']
if (exit_check.exit_type != ExitType.LIQUIDATION
@ -1828,7 +1830,7 @@ class FreqtradeBot(LoggingMixin):
never in base currency.
"""
self.wallets.update()
amount_ = amount
amount_ = trade.amount
if order_obj.ft_order_side == trade.exit_side or order_obj.ft_order_side == 'stoploss':
# check against remaining amount!
amount_ = trade.amount - amount

View File

@ -151,6 +151,8 @@ class Backtesting:
self.trading_mode: TradingMode = config.get('trading_mode', TradingMode.SPOT)
# strategies which define "can_short=True" will fail to load in Spot mode.
self._can_short = self.trading_mode != TradingMode.SPOT
self._position_stacking: bool = self.config.get('position_stacking', False)
self.enable_protections: bool = self.config.get('enable_protections', False)
self.init_backtest()
@ -924,7 +926,7 @@ class Backtesting:
Handling of left open trades at the end of backtesting
"""
for pair in open_trades.keys():
for trade in open_trades[pair]:
for trade in list(open_trades[pair]):
if trade.open_order_id and trade.nr_of_successful_entries == 0:
# Ignore trade if entry-order did not fill yet
continue
@ -959,9 +961,8 @@ class Backtesting:
return 'short'
return None
def run_protections(
self, enable_protections, pair: str, current_time: datetime, side: LongShort):
if enable_protections:
def run_protections(self, pair: str, current_time: datetime, side: LongShort):
if self.enable_protections:
self.protections.stop_per_pair(pair, current_time, side)
self.protections.global_stop(current_time, side)
@ -1067,10 +1068,78 @@ class Backtesting:
return None
return row
def backtest(self, processed: Dict, # noqa: max-complexity: 13
def backtest_loop(
self, row: Tuple, pair: str, current_time: datetime, end_date: datetime,
max_open_trades: int, open_trade_count_start: int) -> int:
"""
NOTE: This method is used by Hyperopt at each iteration. Please keep it optimized.
Backtesting processing for one candle/pair.
"""
for t in list(LocalTrade.bt_trades_open_pp[pair]):
# 1. Manage currently open orders of active trades
if self.manage_open_orders(t, current_time, row):
# Close trade
open_trade_count_start -= 1
LocalTrade.remove_bt_trade(t)
self.wallets.update()
# 2. Process entries.
# without positionstacking, we can only have one open trade per pair.
# max_open_trades must be respected
# don't open on the last row
trade_dir = self.check_for_trade_entry(row)
if (
(self._position_stacking or len(LocalTrade.bt_trades_open_pp[pair]) == 0)
and self.trade_slot_available(max_open_trades, open_trade_count_start)
and current_time != end_date
and trade_dir is not None
and not PairLocks.is_pair_locked(pair, row[DATE_IDX], trade_dir)
):
trade = self._enter_trade(pair, row, trade_dir)
if trade:
# TODO: hacky workaround to avoid opening > max_open_trades
# This emulates previous behavior - not sure if this is correct
# Prevents entering if the trade-slot was freed in this candle
open_trade_count_start += 1
# logger.debug(f"{pair} - Emulate creation of new trade: {trade}.")
LocalTrade.add_bt_trade(trade)
self.wallets.update()
for trade in list(LocalTrade.bt_trades_open_pp[pair]):
# 3. Process entry orders.
order = trade.select_order(trade.entry_side, is_open=True)
if order and self._get_order_filled(order.price, row):
order.close_bt_order(current_time, trade)
trade.open_order_id = None
self.wallets.update()
# 4. Create exit orders (if any)
if not trade.open_order_id:
self._get_exit_trade_entry(trade, row) # Place exit order if necessary
# 5. Process exit orders.
order = trade.select_order(trade.exit_side, is_open=True)
if order and self._get_order_filled(order.price, row):
order.close_bt_order(current_time, trade)
trade.open_order_id = None
sub_trade = order.safe_amount_after_fee != trade.amount
if sub_trade:
order.close_bt_order(current_time, trade)
trade.recalc_trade_from_orders()
else:
trade.close_date = current_time
trade.close(order.price, show_msg=False)
# logger.debug(f"{pair} - Backtesting exit {trade}")
LocalTrade.close_bt_trade(trade)
self.wallets.update()
self.run_protections(pair, current_time, trade.trade_direction)
return open_trade_count_start
def backtest(self, processed: Dict,
start_date: datetime, end_date: datetime,
max_open_trades: int = 0, position_stacking: bool = False,
enable_protections: bool = False) -> Dict[str, Any]:
max_open_trades: int = 0) -> Dict[str, Any]:
"""
Implement backtesting functionality
@ -1083,11 +1152,9 @@ class Backtesting:
:param start_date: backtesting timerange start datetime
:param end_date: backtesting timerange end datetime
:param max_open_trades: maximum number of concurrent trades, <= 0 means unlimited
:param position_stacking: do we allow position stacking?
:param enable_protections: Should protections be enabled?
:return: DataFrame with trades (results of backtesting)
"""
self.prepare_backtest(enable_protections)
self.prepare_backtest(self.enable_protections)
# Ensure wallets are uptodate (important for --strategy-list)
self.wallets.update()
# Use dict of lists with data for performance
@ -1098,15 +1165,12 @@ class Backtesting:
indexes: Dict = defaultdict(int)
current_time = start_date + timedelta(minutes=self.timeframe_min)
open_trades: Dict[str, List[LocalTrade]] = defaultdict(list)
open_trade_count = 0
self.progress.init_step(BacktestState.BACKTEST, int(
(end_date - start_date) / timedelta(minutes=self.timeframe_min)))
# Loop timerange and get candle for each pair at that point in time
while current_time <= end_date:
open_trade_count_start = open_trade_count
open_trade_count_start = LocalTrade.bt_open_open_trade_count
self.check_abort()
for i, pair in enumerate(data):
row_index = indexes[pair]
@ -1118,78 +1182,14 @@ class Backtesting:
indexes[pair] = row_index
self.dataprovider._set_dataframe_max_index(row_index)
for t in list(open_trades[pair]):
# 1. Manage currently open orders of active trades
if self.manage_open_orders(t, current_time, row):
# Close trade
open_trade_count -= 1
open_trade_count_start -= 1
open_trades[pair].remove(t)
LocalTrade.remove_bt_trade(t)
self.wallets.update()
# 2. Process entries.
# without positionstacking, we can only have one open trade per pair.
# max_open_trades must be respected
# don't open on the last row
trade_dir = self.check_for_trade_entry(row)
if (
(position_stacking or len(open_trades[pair]) == 0)
and self.trade_slot_available(max_open_trades, open_trade_count_start)
and current_time != end_date
and trade_dir is not None
and not PairLocks.is_pair_locked(pair, row[DATE_IDX], trade_dir)
):
trade = self._enter_trade(pair, row, trade_dir)
if trade:
# TODO: hacky workaround to avoid opening > max_open_trades
# This emulates previous behavior - not sure if this is correct
# Prevents entering if the trade-slot was freed in this candle
open_trade_count_start += 1
open_trade_count += 1
# logger.debug(f"{pair} - Emulate creation of new trade: {trade}.")
open_trades[pair].append(trade)
LocalTrade.add_bt_trade(trade)
self.wallets.update()
for trade in list(open_trades[pair]):
# 3. Process entry orders.
order = trade.select_order(trade.entry_side, is_open=True)
if order and self._get_order_filled(order.price, row):
order.close_bt_order(current_time, trade)
trade.open_order_id = None
self.wallets.update()
# 4. Create exit orders (if any)
if not trade.open_order_id:
self._get_exit_trade_entry(trade, row) # Place exit order if necessary
# 5. Process exit orders.
order = trade.select_order(trade.exit_side, is_open=True)
if order and self._get_order_filled(order.price, row):
order.close_bt_order(current_time, trade)
trade.open_order_id = None
sub_trade = order.safe_amount_after_fee != trade.amount
if sub_trade:
order.close_bt_order(current_time, trade)
trade.recalc_trade_from_orders()
else:
trade.close_date = current_time
trade.close(order.price, show_msg=False)
# logger.debug(f"{pair} - Backtesting exit {trade}")
open_trade_count -= 1
open_trades[pair].remove(trade)
LocalTrade.close_bt_trade(trade)
self.wallets.update()
self.run_protections(
enable_protections, pair, current_time, trade.trade_direction)
open_trade_count_start = self.backtest_loop(
row, pair, current_time, end_date, max_open_trades, open_trade_count_start)
# Move time one configured time_interval ahead.
self.progress.increment()
current_time += timedelta(minutes=self.timeframe_min)
self.handle_left_open(open_trades, data=data)
self.handle_left_open(LocalTrade.bt_trades_open_pp, data=data)
self.wallets.update()
results = trade_list_to_dataframe(LocalTrade.trades)
@ -1245,8 +1245,6 @@ class Backtesting:
start_date=min_date,
end_date=max_date,
max_open_trades=max_open_trades,
position_stacking=self.config.get('position_stacking', False),
enable_protections=self.config.get('enable_protections', False),
)
backtest_end_time = datetime.now(timezone.utc)
results.update({

View File

@ -122,7 +122,6 @@ class Hyperopt:
else:
logger.debug('Ignoring max_open_trades (--disable-max-market-positions was used) ...')
self.max_open_trades = 0
self.position_stacking = self.config.get('position_stacking', False)
if HyperoptTools.has_space(self.config, 'sell'):
# Make sure use_exit_signal is enabled
@ -258,6 +257,7 @@ class Hyperopt:
logger.debug("Hyperopt has 'protection' space")
# Enable Protections if protection space is selected.
self.config['enable_protections'] = True
self.backtesting.enable_protections = True
self.protection_space = self.custom_hyperopt.protection_space()
if HyperoptTools.has_space(self.config, 'buy'):
@ -339,8 +339,6 @@ class Hyperopt:
start_date=self.min_date,
end_date=self.max_date,
max_open_trades=self.max_open_trades,
position_stacking=self.position_stacking,
enable_protections=self.config.get('enable_protections', False),
)
backtest_end_time = datetime.now(timezone.utc)
bt_results.update({

View File

@ -2,6 +2,7 @@
This module contains the class to persist trades into SQLite
"""
import logging
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from math import isclose
from typing import Any, Dict, List, Optional
@ -255,6 +256,9 @@ class LocalTrade():
# Trades container for backtesting
trades: List['LocalTrade'] = []
trades_open: List['LocalTrade'] = []
# Copy of trades_open - but indexed by pair
bt_trades_open_pp: Dict[str, List['LocalTrade']] = defaultdict(list)
bt_open_open_trade_count: int = 0
total_profit: float = 0
realized_profit: float = 0
@ -538,6 +542,8 @@ class LocalTrade():
"""
LocalTrade.trades = []
LocalTrade.trades_open = []
LocalTrade.bt_trades_open_pp = defaultdict(list)
LocalTrade.bt_open_open_trade_count = 0
LocalTrade.total_profit = 0
def adjust_min_max_rates(self, current_price: float, current_price_low: float) -> None:
@ -1067,6 +1073,8 @@ class LocalTrade():
@staticmethod
def close_bt_trade(trade):
LocalTrade.trades_open.remove(trade)
LocalTrade.bt_trades_open_pp[trade.pair].remove(trade)
LocalTrade.bt_open_open_trade_count -= 1
LocalTrade.trades.append(trade)
LocalTrade.total_profit += trade.close_profit_abs
@ -1074,12 +1082,16 @@ class LocalTrade():
def add_bt_trade(trade):
if trade.is_open:
LocalTrade.trades_open.append(trade)
LocalTrade.bt_trades_open_pp[trade.pair].append(trade)
LocalTrade.bt_open_open_trade_count += 1
else:
LocalTrade.trades.append(trade)
@staticmethod
def remove_bt_trade(trade):
LocalTrade.trades_open.remove(trade)
LocalTrade.bt_trades_open_pp[trade.pair].remove(trade)
LocalTrade.bt_open_open_trade_count -= 1
@staticmethod
def get_open_trades() -> List[Any]:
@ -1096,7 +1108,7 @@ class LocalTrade():
if Trade.use_db:
return Trade.query.filter(Trade.is_open.is_(True)).count()
else:
return len(LocalTrade.trades_open)
return LocalTrade.bt_open_open_trade_count
@staticmethod
def stoploss_reinitialization(desired_stoploss):
@ -1508,3 +1520,87 @@ class Trade(_DECL_BASE, LocalTrade):
Order.status == 'closed'
).scalar()
return trading_volume
@staticmethod
def from_json(json_str: str) -> 'Trade':
"""
Create a Trade instance from a json string.
Used for debugging purposes - please keep.
:param json_str: json string to parse
:return: Trade instance
"""
import rapidjson
data = rapidjson.loads(json_str)
trade = Trade(
id=data["trade_id"],
pair=data["pair"],
base_currency=data["base_currency"],
stake_currency=data["quote_currency"],
is_open=data["is_open"],
exchange=data["exchange"],
amount=data["amount"],
amount_requested=data["amount_requested"],
stake_amount=data["stake_amount"],
strategy=data["strategy"],
enter_tag=data["enter_tag"],
timeframe=data["timeframe"],
fee_open=data["fee_open"],
fee_open_cost=data["fee_open_cost"],
fee_open_currency=data["fee_open_currency"],
fee_close=data["fee_close"],
fee_close_cost=data["fee_close_cost"],
fee_close_currency=data["fee_close_currency"],
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
open_rate=data["open_rate"],
open_rate_requested=data["open_rate_requested"],
open_trade_value=data["open_trade_value"],
close_date=(datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
if data["close_timestamp"] else None),
realized_profit=data["realized_profit"],
close_rate=data["close_rate"],
close_rate_requested=data["close_rate_requested"],
close_profit=data["close_profit"],
close_profit_abs=data["close_profit_abs"],
exit_reason=data["exit_reason"],
exit_order_status=data["exit_order_status"],
stop_loss=data["stop_loss_abs"],
stop_loss_pct=data["stop_loss_ratio"],
stoploss_order_id=data["stoploss_order_id"],
stoploss_last_update=(datetime.fromtimestamp(data["stoploss_last_update"] // 1000,
tz=timezone.utc) if data["stoploss_last_update"] else None),
initial_stop_loss=data["initial_stop_loss_abs"],
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
min_rate=data["min_rate"],
max_rate=data["max_rate"],
leverage=data["leverage"],
interest_rate=data["interest_rate"],
liquidation_price=data["liquidation_price"],
is_short=data["is_short"],
trading_mode=data["trading_mode"],
funding_fees=data["funding_fees"],
open_order_id=data["open_order_id"],
)
for order in data["orders"]:
order_obj = Order(
amount=order["amount"],
ft_order_side=order["ft_order_side"],
ft_pair=order["pair"],
ft_is_open=order["is_open"],
order_id=order["order_id"],
status=order["status"],
average=order["average"],
cost=order["cost"],
filled=order["filled"],
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT),
order_filled_date=(datetime.fromtimestamp(
order["order_filled_timestamp"] // 1000, tz=timezone.utc)
if order["order_filled_timestamp"] else None),
order_type=order["order_type"],
price=order["price"],
remaining=order["remaining"],
)
trade.orders.append(order_obj)
return trade

View File

@ -26,6 +26,7 @@ class FreqaiModelResolver(IResolver):
initial_search_path = (
Path(__file__).parent.parent.joinpath("freqai/prediction_models").resolve()
)
extra_path = "freqaimodel_path"
@staticmethod
def load_freqaimodel(config: Config) -> IFreqaiModel:
@ -50,7 +51,6 @@ class FreqaiModelResolver(IResolver):
freqaimodel_name,
config,
kwargs={"config": config},
extra_dir=config.get("freqaimodel_path"),
)
return freqaimodel

View File

@ -42,6 +42,8 @@ class IResolver:
object_type_str: str
user_subdir: Optional[str] = None
initial_search_path: Optional[Path]
# Optional config setting containing a path (strategy_path, freqaimodel_path)
extra_path: Optional[str] = None
@classmethod
def build_search_paths(cls, config: Config, user_subdir: Optional[str] = None,
@ -58,6 +60,9 @@ class IResolver:
for dir in extra_dirs:
abs_paths.insert(0, Path(dir).resolve())
if cls.extra_path and (extra := config.get(cls.extra_path)):
abs_paths.insert(0, Path(extra).resolve())
return abs_paths
@classmethod

View File

@ -30,6 +30,7 @@ class StrategyResolver(IResolver):
object_type_str = "Strategy"
user_subdir = USERPATH_STRATEGIES
initial_search_path = None
extra_path = "strategy_path"
@staticmethod
def load_strategy(config: Config = None) -> IStrategy:
@ -268,14 +269,6 @@ class StrategyResolver(IResolver):
"or contains Python code errors."
)
@classmethod
def build_search_paths(cls, config: Config, user_subdir: Optional[str] = None,
extra_dirs: List[str] = []) -> List[Path]:
if 'strategy_path' in config and config['strategy_path'] not in extra_dirs:
extra_dirs = [config['strategy_path']] + extra_dirs
return super().build_search_paths(config, user_subdir, extra_dirs)
def warn_deprecated_setting(strategy: IStrategy, old: str, new: str, error=False):
if hasattr(strategy, old):

View File

@ -89,6 +89,7 @@ async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: Bac
lastconfig['enable_protections'] = btconfig.get('enable_protections')
lastconfig['dry_run_wallet'] = btconfig.get('dry_run_wallet')
ApiServer._bt.enable_protections = btconfig.get('enable_protections', False)
ApiServer._bt.strategylist = [strat]
ApiServer._bt.results = {}
ApiServer._bt.load_prior_backtest()

View File

@ -1,3 +1,4 @@
import asyncio
import logging
from typing import Any, Dict
@ -89,6 +90,8 @@ async def _process_consumer_request(
for _, message in analyzed_df.items():
response = WSAnalyzedDFMessage(data=message)
await channel.send(response.dict(exclude_none=True))
# Throttle the messages to 50/s
await asyncio.sleep(0.02)
@router.websocket("/message/ws")

View File

@ -198,6 +198,10 @@ class ApiServer(RPCHandler):
logger.debug(f"Found message of type: {message.get('type')}")
# Broadcast it
await self._ws_channel_manager.broadcast(message)
# Limit messages per sec.
# Could cause problems with queue size if too low, and
# problems with network traffik if too high.
await asyncio.sleep(0.001)
except asyncio.CancelledError:
pass

View File

@ -1085,9 +1085,7 @@ class IStrategy(ABC, HyperStrategyMixin):
else:
logger.warning("CustomStoploss function did not return valid stoploss")
sl_lower_long = (trade.stop_loss < (low or current_rate) and not trade.is_short)
sl_higher_short = (trade.stop_loss > (high or current_rate) and trade.is_short)
if self.trailing_stop and (sl_lower_long or sl_higher_short):
if self.trailing_stop and dir_correct:
# trailing stoploss handling
sl_offset = self.trailing_stop_positive_offset
@ -1101,7 +1099,7 @@ class IStrategy(ABC, HyperStrategyMixin):
if self.trailing_stop_positive is not None and bound_profit > sl_offset:
stop_loss_value = self.trailing_stop_positive
logger.debug(f"{trade.pair} - Using positive stoploss: {stop_loss_value} "
f"offset: {sl_offset:.4g} profit: {current_profit:.2%}")
f"offset: {sl_offset:.4g} profit: {bound_profit:.2%}")
trade.adjust_stop_loss(bound or current_rate, stop_loss_value)

View File

@ -27,4 +27,4 @@ types-cachetools==5.2.1
types-filelock==3.2.7
types-requests==2.28.11.2
types-tabulate==0.9.0.0
types-python-dateutil==2.8.19
types-python-dateutil==2.8.19.1

View File

@ -5,6 +5,6 @@
scikit-learn==1.1.2
joblib==1.2.0
catboost==1.1; platform_machine != 'aarch64'
lightgbm==3.3.2
lightgbm==3.3.3
xgboost==1.6.2
tensorboard==2.10.1

View File

@ -1,14 +1,14 @@
numpy==1.23.3
numpy==1.23.4
pandas==1.5.0; platform_machine != 'armv7l'
# Piwheels doesn't have 1.5.0 yet.
pandas==1.4.3; platform_machine == 'armv7l'
pandas-ta==0.3.14b
ccxt==1.95.30
ccxt==2.0.25
# Pin cryptography for now due to rust build errors with piwheels
cryptography==38.0.1
aiohttp==3.8.3
SQLAlchemy==1.4.41
SQLAlchemy==1.4.42
python-telegram-bot==13.14
arrow==1.2.3
cachetools==4.2.2
@ -37,7 +37,7 @@ orjson==3.8.0
sdnotify==0.3.2
# API Server
fastapi==0.85.0
fastapi==0.85.1
pydantic>=1.8.0
uvicorn==0.18.3
pyjwt==2.5.0

View File

@ -18,6 +18,7 @@ from freqtrade.commands import (start_backtesting_show, start_convert_data, star
from freqtrade.commands.db_commands import start_convert_db
from freqtrade.commands.deploy_commands import (clean_ui_subdir, download_and_install_ui,
get_ui_download_url, read_ui_version)
from freqtrade.commands.list_commands import start_list_freqAI_models
from freqtrade.configuration import setup_utils_configuration
from freqtrade.enums import RunMode
from freqtrade.exceptions import OperationalException
@ -944,6 +945,34 @@ def test_start_list_strategies(capsys):
assert str(Path("broken_strats/broken_futures_strategies.py")) in captured.out
def test_start_list_freqAI_models(capsys):
args = [
"list-freqaimodels",
"-1"
]
pargs = get_args(args)
pargs['config'] = None
start_list_freqAI_models(pargs)
captured = capsys.readouterr()
assert "LightGBMClassifier" in captured.out
assert "LightGBMRegressor" in captured.out
assert "XGBoostRegressor" in captured.out
assert "<builtin>/LightGBMRegressor.py" not in captured.out
args = [
"list-freqaimodels",
]
pargs = get_args(args)
pargs['config'] = None
start_list_freqAI_models(pargs)
captured = capsys.readouterr()
assert "LightGBMClassifier" in captured.out
assert "LightGBMRegressor" in captured.out
assert "XGBoostRegressor" in captured.out
assert "<builtin>/LightGBMRegressor.py" in captured.out
def test_start_test_pairlist(mocker, caplog, tickers, default_conf, capsys):
patch_exchange(mocker, mock_markets=True)
mocker.patch.multiple('freqtrade.exchange.Exchange',

View File

@ -2196,6 +2196,9 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
time_machine.move_to(start + timedelta(hours=99, minutes=30))
exchange = get_patched_exchange(mocker, default_conf)
mocker.patch("freqtrade.exchange.Exchange.ohlcv_candle_limit", return_value=100)
assert exchange._startup_candle_count == 0
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
pair1 = ('IOTA/ETH', '1h', candle_type)
pair2 = ('XRP/ETH', '1h', candle_type)
@ -2236,30 +2239,36 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
assert len(res) == 2
assert len(res[pair1]) == 99
assert len(res[pair2]) == 99
assert res[pair2].at[0, 'open']
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-1][0] // 1000
refresh_pior = exchange._pairs_last_refresh_time[pair1]
# New candle on exchange - only return 50 candles (but one candle further)
new_startdate = (start + timedelta(hours=51)).strftime('%Y-%m-%d %H:%M')
ohlcv = generate_test_data_raw('1h', 50, new_startdate)
# New candle on exchange - return 100 candles - but skip one candle so we actually get 2 candles
# in one go
new_startdate = (start + timedelta(hours=2)).strftime('%Y-%m-%d %H:%M')
# mocker.patch("freqtrade.exchange.Exchange.ohlcv_candle_limit", return_value=100)
ohlcv = generate_test_data_raw('1h', 100, new_startdate)
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
res = exchange.refresh_latest_ohlcv(pairs)
assert exchange._api_async.fetch_ohlcv.call_count == 2
assert len(res) == 2
assert len(res[pair1]) == 100
assert len(res[pair2]) == 100
# Verify index starts at 0
assert res[pair2].at[0, 'open']
assert refresh_pior != exchange._pairs_last_refresh_time[pair1]
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-1][0] // 1000
assert exchange._pairs_last_refresh_time[pair2] == ohlcv[-1][0] // 1000
exchange._api_async.fetch_ohlcv.reset_mock()
# Retry same call - no action.
# Retry same call - from cache
res = exchange.refresh_latest_ohlcv(pairs)
assert exchange._api_async.fetch_ohlcv.call_count == 0
assert len(res) == 2
assert len(res[pair1]) == 100
assert len(res[pair2]) == 100
assert res[pair2].at[0, 'open']
# Move to distant future (so a 1 call would cause a hole in the data)
time_machine.move_to(start + timedelta(hours=2000))
@ -2272,6 +2281,7 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
# Cache eviction - new data.
assert len(res[pair1]) == 99
assert len(res[pair2]) == 99
assert res[pair2].at[0, 'open']
@pytest.mark.asyncio
@ -4341,9 +4351,10 @@ def test__fetch_and_calculate_funding_fees_datetime_called(
('XLTCUSDT', 1, 'spot'),
('LTC/USD', 1, 'futures'),
('XLTCUSDT', 0.01, 'futures'),
('ETH/USDT:USDT', 10, 'futures')
('ETH/USDT:USDT', 10, 'futures'),
('TORN/USDT:USDT', None, 'futures'), # Don't fail for unavailable pairs.
])
def est__get_contract_size(mocker, default_conf, pair, expected_size, trading_mode):
def test__get_contract_size(mocker, default_conf, pair, expected_size, trading_mode):
api_mock = MagicMock()
default_conf['trading_mode'] = trading_mode
default_conf['margin_mode'] = 'isolated'

View File

@ -97,7 +97,6 @@ def _make_backtest_conf(mocker, datadir, conf=None, pair='UNITTEST/BTC'):
'start_date': min_date,
'end_date': max_date,
'max_open_trades': 10,
'position_stacking': False,
}
@ -735,7 +734,6 @@ def test_backtest_one(default_conf, fee, mocker, testdatadir) -> None:
start_date=min_date,
end_date=max_date,
max_open_trades=10,
position_stacking=False,
)
results = result['results']
assert not results.empty
@ -822,7 +820,6 @@ def test_backtest_timedout_entry_orders(default_conf, fee, mocker, testdatadir)
start_date=min_date,
end_date=max_date,
max_open_trades=1,
position_stacking=False,
)
assert result['timedout_entry_orders'] == 10
@ -848,7 +845,6 @@ def test_backtest_1min_timeframe(default_conf, fee, mocker, testdatadir) -> None
start_date=min_date,
end_date=max_date,
max_open_trades=1,
position_stacking=False,
)
assert not results['results'].empty
assert len(results['results']) == 1
@ -880,7 +876,6 @@ def test_backtest_trim_no_data_left(default_conf, fee, mocker, testdatadir) -> N
start_date=min_date,
end_date=max_date,
max_open_trades=10,
position_stacking=False,
)
@ -935,7 +930,6 @@ def test_backtest_dataprovider_analyzed_df(default_conf, fee, mocker, testdatadi
start_date=min_date,
end_date=max_date,
max_open_trades=10,
position_stacking=False,
)
assert count == 5
@ -979,8 +973,6 @@ def test_backtest_pricecontours_protections(default_conf, fee, mocker, testdatad
start_date=min_date,
end_date=max_date,
max_open_trades=1,
position_stacking=False,
enable_protections=default_conf.get('enable_protections', False),
)
assert len(results['results']) == numres
@ -1023,8 +1015,6 @@ def test_backtest_pricecontours(default_conf, fee, mocker, testdatadir,
start_date=min_date,
end_date=max_date,
max_open_trades=1,
position_stacking=False,
enable_protections=default_conf.get('enable_protections', False),
)
assert len(results['results']) == expected
@ -1136,7 +1126,6 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir)
'start_date': min_date,
'end_date': max_date,
'max_open_trades': 3,
'position_stacking': False,
}
results = backtesting.backtest(**backtest_conf)
@ -1159,7 +1148,6 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir)
'start_date': min_date,
'end_date': max_date,
'max_open_trades': 1,
'position_stacking': False,
}
results = backtesting.backtest(**backtest_conf)
assert len(evaluate_result_multi(results['results'], '5m', 1)) == 0

View File

@ -42,7 +42,6 @@ def test_backtest_position_adjustment(default_conf, fee, mocker, testdatadir) ->
start_date=min_date,
end_date=max_date,
max_open_trades=10,
position_stacking=False,
)
results = result['results']
assert not results.empty

View File

@ -336,7 +336,7 @@ def test_start_calls_optimizer(mocker, hyperopt_conf, capsys) -> None:
assert hasattr(hyperopt.backtesting.strategy, "advise_entry")
assert hasattr(hyperopt, "max_open_trades")
assert hyperopt.max_open_trades == hyperopt_conf['max_open_trades']
assert hasattr(hyperopt, "position_stacking")
assert hasattr(hyperopt.backtesting, "_position_stacking")
def test_hyperopt_format_results(hyperopt):
@ -704,7 +704,7 @@ def test_simplified_interface_roi_stoploss(mocker, hyperopt_conf, capsys) -> Non
assert hasattr(hyperopt.backtesting.strategy, "advise_entry")
assert hasattr(hyperopt, "max_open_trades")
assert hyperopt.max_open_trades == hyperopt_conf['max_open_trades']
assert hasattr(hyperopt, "position_stacking")
assert hasattr(hyperopt.backtesting, "_position_stacking")
def test_simplified_interface_all_failed(mocker, hyperopt_conf, caplog) -> None:
@ -778,7 +778,7 @@ def test_simplified_interface_buy(mocker, hyperopt_conf, capsys) -> None:
assert hasattr(hyperopt.backtesting.strategy, "advise_entry")
assert hasattr(hyperopt, "max_open_trades")
assert hyperopt.max_open_trades == hyperopt_conf['max_open_trades']
assert hasattr(hyperopt, "position_stacking")
assert hasattr(hyperopt.backtesting, "_position_stacking")
def test_simplified_interface_sell(mocker, hyperopt_conf, capsys) -> None:
@ -821,7 +821,7 @@ def test_simplified_interface_sell(mocker, hyperopt_conf, capsys) -> None:
assert hasattr(hyperopt.backtesting.strategy, "advise_entry")
assert hasattr(hyperopt, "max_open_trades")
assert hyperopt.max_open_trades == hyperopt_conf['max_open_trades']
assert hasattr(hyperopt, "position_stacking")
assert hasattr(hyperopt.backtesting, "_position_stacking")
@pytest.mark.parametrize("space", [

View File

View File

@ -2404,8 +2404,10 @@ def test_Trade_object_idem():
'get_enter_tag_performance',
'get_mix_tag_performance',
'get_trading_volume',
'from_json',
)
EXCLUDES2 = ('trades', 'trades_open', 'bt_trades_open_pp', 'bt_open_open_trade_count',
'total_profit')
# Parent (LocalTrade) should have the same attributes
for item in trade:
@ -2416,7 +2418,7 @@ def test_Trade_object_idem():
# Fails if only a column is added without corresponding parent field
for item in localtrade:
if (not item.startswith('__')
and item not in ('trades', 'trades_open', 'total_profit')
and item not in EXCLUDES2
and type(getattr(LocalTrade, item)) not in (property, FunctionType)):
assert item in trade

View File

@ -0,0 +1,181 @@
from datetime import datetime, timezone
from freqtrade.persistence.trade_model import Trade
def test_trade_fromjson():
"""Test the Trade.from_json() method."""
trade_string = """{
"trade_id": 25,
"pair": "ETH/USDT",
"base_currency": "ETH",
"quote_currency": "USDT",
"is_open": false,
"exchange": "binance",
"amount": 407.0,
"amount_requested": 102.92547026,
"stake_amount": 102.7494348,
"strategy": "SampleStrategy55",
"buy_tag": "Strategy2",
"enter_tag": "Strategy2",
"timeframe": 5,
"fee_open": 0.001,
"fee_open_cost": 0.1027494,
"fee_open_currency": "ETH",
"fee_close": 0.001,
"fee_close_cost": 0.1054944,
"fee_close_currency": "USDT",
"open_date": "2022-10-18 09:12:42",
"open_timestamp": 1666084362912,
"open_rate": 0.2518998249562391,
"open_rate_requested": 0.2516,
"open_trade_value": 102.62575199,
"close_date": "2022-10-18 09:45:22",
"close_timestamp": 1666086322208,
"realized_profit": 2.76315361,
"close_rate": 0.2592,
"close_rate_requested": 0.2592,
"close_profit": 0.026865,
"close_profit_pct": 2.69,
"close_profit_abs": 2.76315361,
"trade_duration_s": 1959,
"trade_duration": 32,
"profit_ratio": 0.02686,
"profit_pct": 2.69,
"profit_abs": 2.76315361,
"sell_reason": "no longer good",
"exit_reason": "no longer good",
"exit_order_status": "closed",
"stop_loss_abs": 0.1981,
"stop_loss_ratio": -0.216,
"stop_loss_pct": -21.6,
"stoploss_order_id": null,
"stoploss_last_update": null,
"stoploss_last_update_timestamp": null,
"initial_stop_loss_abs": 0.1981,
"initial_stop_loss_ratio": -0.216,
"initial_stop_loss_pct": -21.6,
"min_rate": 0.2495,
"max_rate": 0.2592,
"leverage": 1.0,
"interest_rate": 0.0,
"liquidation_price": null,
"is_short": false,
"trading_mode": "spot",
"funding_fees": 0.0,
"open_order_id": null,
"orders": [
{
"amount": 102.0,
"safe_price": 0.2526,
"ft_order_side": "buy",
"order_filled_timestamp": 1666084370887,
"ft_is_entry": true,
"pair": "ETH/USDT",
"order_id": "78404228",
"status": "closed",
"average": 0.2526,
"cost": 25.7652,
"filled": 102.0,
"is_open": false,
"order_date": "2022-10-18 09:12:42",
"order_timestamp": 1666084362684,
"order_filled_date": "2022-10-18 09:12:50",
"order_type": "limit",
"price": 0.2526,
"remaining": 0.0
},
{
"amount": 102.0,
"safe_price": 0.2517,
"ft_order_side": "buy",
"order_filled_timestamp": 1666084379056,
"ft_is_entry": true,
"pair": "ETH/USDT",
"order_id": "78405139",
"status": "closed",
"average": 0.2517,
"cost": 25.6734,
"filled": 102.0,
"is_open": false,
"order_date": "2022-10-18 09:12:57",
"order_timestamp": 1666084377681,
"order_filled_date": "2022-10-18 09:12:59",
"order_type": "limit",
"price": 0.2517,
"remaining": 0.0
},
{
"amount": 102.0,
"safe_price": 0.2517,
"ft_order_side": "buy",
"order_filled_timestamp": 1666084389644,
"ft_is_entry": true,
"pair": "ETH/USDT",
"order_id": "78405265",
"status": "closed",
"average": 0.2517,
"cost": 25.6734,
"filled": 102.0,
"is_open": false,
"order_date": "2022-10-18 09:13:03",
"order_timestamp": 1666084383295,
"order_filled_date": "2022-10-18 09:13:09",
"order_type": "limit",
"price": 0.2517,
"remaining": 0.0
},
{
"amount": 102.0,
"safe_price": 0.2516,
"ft_order_side": "buy",
"order_filled_timestamp": 1666084723521,
"ft_is_entry": true,
"pair": "ETH/USDT",
"order_id": "78405395",
"status": "closed",
"average": 0.2516,
"cost": 25.6632,
"filled": 102.0,
"is_open": false,
"order_date": "2022-10-18 09:13:13",
"order_timestamp": 1666084393920,
"order_filled_date": "2022-10-18 09:18:43",
"order_type": "limit",
"price": 0.2516,
"remaining": 0.0
},
{
"amount": 407.0,
"safe_price": 0.2592,
"ft_order_side": "sell",
"order_filled_timestamp": 1666086322198,
"ft_is_entry": false,
"pair": "ETH/USDT",
"order_id": "78432649",
"status": "closed",
"average": 0.2592,
"cost": 105.4944,
"filled": 407.0,
"is_open": false,
"order_date": "2022-10-18 09:45:21",
"order_timestamp": 1666086321435,
"order_filled_date": "2022-10-18 09:45:22",
"order_type": "market",
"price": 0.2592,
"remaining": 0.0
}
]
}"""
trade = Trade.from_json(trade_string)
assert trade.id == 25
assert trade.pair == 'ETH/USDT'
assert trade.open_date == datetime(2022, 10, 18, 9, 12, 42, tzinfo=timezone.utc)
assert isinstance(trade.open_date, datetime)
assert trade.exit_reason == 'no longer good'
assert len(trade.orders) == 5
last_o = trade.orders[-1]
assert last_o.order_filled_date == datetime(2022, 10, 18, 9, 45, 22, tzinfo=timezone.utc)
assert isinstance(last_o.order_date, datetime)

View File

@ -3969,15 +3969,17 @@ def test__safe_exit_amount(default_conf_usdt, fee, caplog, mocker, amount_wallet
patch_get_signal(freqtrade)
if has_err:
with pytest.raises(DependencyException, match=r"Not enough amount to exit trade."):
assert freqtrade._safe_exit_amount(trade.pair, trade.amount)
assert freqtrade._safe_exit_amount(trade, trade.pair, trade.amount)
else:
wallet_update.reset_mock()
assert freqtrade._safe_exit_amount(trade.pair, trade.amount) == amount_wallet
assert trade.amount != amount_wallet
assert freqtrade._safe_exit_amount(trade, trade.pair, trade.amount) == amount_wallet
assert log_has_re(r'.*Falling back to wallet-amount.', caplog)
assert trade.amount == amount_wallet
assert wallet_update.call_count == 1
caplog.clear()
wallet_update.reset_mock()
assert freqtrade._safe_exit_amount(trade.pair, amount_wallet) == amount_wallet
assert freqtrade._safe_exit_amount(trade, trade.pair, amount_wallet) == amount_wallet
assert not log_has_re(r'.*Falling back to wallet-amount.', caplog)
assert wallet_update.call_count == 1

View File

@ -420,7 +420,7 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
assert trade.open_order_id is None
# Open rate is not adjusted yet
assert trade.open_rate == 1.99
assert trade.stake_amount == 60
assert pytest.approx(trade.stake_amount) == 60
assert trade.stop_loss_pct == -0.1
assert pytest.approx(trade.stop_loss) == 1.99 * (1 - 0.1 / leverage)
assert pytest.approx(trade.initial_stop_loss) == 1.99 * (1 - 0.1 / leverage)
@ -446,7 +446,7 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
assert len(trade.orders) == 4
assert trade.open_order_id is not None
assert trade.open_rate == 1.99
assert trade.stake_amount == 60
assert pytest.approx(trade.stake_amount) == 60
assert trade.orders[-1].price == 1.95
assert pytest.approx(trade.orders[-1].cost) == 120 * leverage