Merge branch 'develop' into pr/froggleston/7861
This commit is contained in:
@@ -1,19 +1,20 @@
|
||||
""" Freqtrade bot """
|
||||
__version__ = '2022.12.dev'
|
||||
__version__ = '2023.3.dev'
|
||||
|
||||
if 'dev' in __version__:
|
||||
from pathlib import Path
|
||||
try:
|
||||
import subprocess
|
||||
freqtrade_basedir = Path(__file__).parent
|
||||
|
||||
__version__ = __version__ + '-' + subprocess.check_output(
|
||||
['git', 'log', '--format="%h"', '-n 1'],
|
||||
stderr=subprocess.DEVNULL).decode("utf-8").rstrip().strip('"')
|
||||
stderr=subprocess.DEVNULL, cwd=freqtrade_basedir).decode("utf-8").rstrip().strip('"')
|
||||
|
||||
except Exception: # pragma: no cover
|
||||
# git not available, ignore
|
||||
try:
|
||||
# Try Fallback to freqtrade_commit file (created by CI while building docker image)
|
||||
from pathlib import Path
|
||||
versionfile = Path('./freqtrade_commit')
|
||||
if versionfile.is_file():
|
||||
__version__ = f"docker-{__version__}-{versionfile.read_text()[:8]}"
|
||||
|
0
freqtrade/__main__.py
Normal file → Executable file
0
freqtrade/__main__.py
Normal file → Executable file
@@ -22,5 +22,6 @@ from freqtrade.commands.optimize_commands import (start_backtesting, start_backt
|
||||
start_edge, start_hyperopt)
|
||||
from freqtrade.commands.pairlist_commands import start_test_pairlist
|
||||
from freqtrade.commands.plot_commands import start_plot_dataframe, start_plot_profit
|
||||
from freqtrade.commands.strategy_utils_commands import start_strategy_update
|
||||
from freqtrade.commands.trade_commands import start_trading
|
||||
from freqtrade.commands.webserver_commands import start_webserver
|
||||
|
0
freqtrade/commands/analyze_commands.py
Executable file → Normal file
0
freqtrade/commands/analyze_commands.py
Executable file → Normal file
@@ -112,10 +112,13 @@ ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason
|
||||
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
||||
"list-markets", "list-pairs", "list-strategies", "list-freqaimodels",
|
||||
"list-data", "hyperopt-list", "hyperopt-show", "backtest-filter",
|
||||
"plot-dataframe", "plot-profit", "show-trades", "trades-to-ohlcv"]
|
||||
"plot-dataframe", "plot-profit", "show-trades", "trades-to-ohlcv",
|
||||
"strategy-updater"]
|
||||
|
||||
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-strategy"]
|
||||
|
||||
ARGS_STRATEGY_UTILS = ["strategy_list", "strategy_path", "recursive_strategy_search"]
|
||||
|
||||
|
||||
class Arguments:
|
||||
"""
|
||||
@@ -199,8 +202,8 @@ class Arguments:
|
||||
start_list_freqAI_models, start_list_markets,
|
||||
start_list_strategies, start_list_timeframes,
|
||||
start_new_config, start_new_strategy, start_plot_dataframe,
|
||||
start_plot_profit, start_show_trades, start_test_pairlist,
|
||||
start_trading, start_webserver)
|
||||
start_plot_profit, start_show_trades, start_strategy_update,
|
||||
start_test_pairlist, start_trading, start_webserver)
|
||||
|
||||
subparsers = self.parser.add_subparsers(dest='command',
|
||||
# Use custom message when no subhandler is added
|
||||
@@ -441,3 +444,11 @@ class Arguments:
|
||||
parents=[_common_parser])
|
||||
webserver_cmd.set_defaults(func=start_webserver)
|
||||
self._build_args(optionlist=ARGS_WEBSERVER, parser=webserver_cmd)
|
||||
|
||||
# Add strategy_updater subcommand
|
||||
strategy_updater_cmd = subparsers.add_parser('strategy-updater',
|
||||
help='updates outdated strategy'
|
||||
'files to the current version',
|
||||
parents=[_common_parser])
|
||||
strategy_updater_cmd.set_defaults(func=start_strategy_update)
|
||||
self._build_args(optionlist=ARGS_STRATEGY_UTILS, parser=strategy_updater_cmd)
|
||||
|
@@ -108,7 +108,7 @@ def ask_user_config() -> Dict[str, Any]:
|
||||
"binance",
|
||||
"binanceus",
|
||||
"bittrex",
|
||||
"gateio",
|
||||
"gate",
|
||||
"huobi",
|
||||
"kraken",
|
||||
"kucoin",
|
||||
@@ -123,7 +123,7 @@ def ask_user_config() -> Dict[str, Any]:
|
||||
"message": "Do you want to trade Perpetual Swaps (perpetual futures)?",
|
||||
"default": False,
|
||||
"filter": lambda val: 'futures' if val else 'spot',
|
||||
"when": lambda x: x["exchange_name"] in ['binance', 'gateio', 'okx'],
|
||||
"when": lambda x: x["exchange_name"] in ['binance', 'gate', 'okx'],
|
||||
},
|
||||
{
|
||||
"type": "autocomplete",
|
||||
|
@@ -251,7 +251,8 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
"spaces": Arg(
|
||||
'--spaces',
|
||||
help='Specify which parameters to hyperopt. Space-separated list.',
|
||||
choices=['all', 'buy', 'sell', 'roi', 'stoploss', 'trailing', 'protection', 'default'],
|
||||
choices=['all', 'buy', 'sell', 'roi', 'stoploss',
|
||||
'trailing', 'protection', 'trades', 'default'],
|
||||
nargs='+',
|
||||
default='default',
|
||||
),
|
||||
@@ -632,10 +633,11 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
"1: by enter_tag, "
|
||||
"2: by enter_tag and exit_tag, "
|
||||
"3: by pair and enter_tag, "
|
||||
"4: by pair, enter_ and exit_tag (this can get quite large)"),
|
||||
"4: by pair, enter_ and exit_tag (this can get quite large), "
|
||||
"5: by exit_tag"),
|
||||
nargs='+',
|
||||
default=[],
|
||||
choices=['0', '1', '2', '3', '4'],
|
||||
choices=['0', '1', '2', '3', '4', '5'],
|
||||
),
|
||||
"enter_reason_list": Arg(
|
||||
"--enter-reason-list",
|
||||
|
@@ -5,7 +5,7 @@ from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from freqtrade.configuration import TimeRange, setup_utils_configuration
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
|
||||
from freqtrade.data.converter import convert_ohlcv_format, convert_trades_format
|
||||
from freqtrade.data.history import (convert_trades_to_ohlcv, refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data)
|
||||
@@ -14,20 +14,30 @@ from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import market_is_active, timeframe_to_minutes
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist, expand_pairlist
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
from freqtrade.util.binance_mig import migrate_binance_futures_data
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _data_download_sanity(config: Config) -> None:
|
||||
if 'days' in config and 'timerange' in config:
|
||||
raise OperationalException("--days and --timerange are mutually exclusive. "
|
||||
"You can only specify one or the other.")
|
||||
|
||||
if 'pairs' not in config:
|
||||
raise OperationalException(
|
||||
"Downloading data requires a list of pairs. "
|
||||
"Please check the documentation on how to configure this.")
|
||||
|
||||
|
||||
def start_download_data(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Download data (former download_backtest_data.py script)
|
||||
"""
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
|
||||
if 'days' in config and 'timerange' in config:
|
||||
raise OperationalException("--days and --timerange are mutually exclusive. "
|
||||
"You can only specify one or the other.")
|
||||
_data_download_sanity(config)
|
||||
timerange = TimeRange()
|
||||
if 'days' in config:
|
||||
time_since = (datetime.now() - timedelta(days=config['days'])).strftime("%Y%m%d")
|
||||
@@ -39,11 +49,6 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
# Remove stake-currency to skip checks which are not relevant for datadownload
|
||||
config['stake_currency'] = ''
|
||||
|
||||
if 'pairs' not in config:
|
||||
raise OperationalException(
|
||||
"Downloading data requires a list of pairs. "
|
||||
"Please check the documentation on how to configure this.")
|
||||
|
||||
pairs_not_available: List[str] = []
|
||||
|
||||
# Init exchange
|
||||
@@ -86,6 +91,7 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
"Please use `--dl-trades` instead for this exchange "
|
||||
"(will unfortunately take a long time)."
|
||||
)
|
||||
migrate_binance_futures_data(config)
|
||||
pairs_not_available = refresh_backtest_ohlcv_data(
|
||||
exchange, pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange,
|
||||
@@ -145,6 +151,7 @@ def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
|
||||
"""
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
if ohlcv:
|
||||
migrate_binance_futures_data(config)
|
||||
candle_types = [CandleType.from_string(ct) for ct in config.get('candle_types', ['spot'])]
|
||||
for candle_type in candle_types:
|
||||
convert_ohlcv_format(config,
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
@@ -20,7 +20,7 @@ def start_convert_db(args: Dict[str, Any]) -> None:
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
init_db(config['db_url'])
|
||||
session_target = Trade._session
|
||||
session_target = Trade.session
|
||||
init_db(config['db_url_from'])
|
||||
logger.info("Starting db migration.")
|
||||
|
||||
@@ -36,16 +36,16 @@ def start_convert_db(args: Dict[str, Any]) -> None:
|
||||
|
||||
session_target.commit()
|
||||
|
||||
for pairlock in PairLock.query:
|
||||
for pairlock in PairLock.get_all_locks():
|
||||
pairlock_count += 1
|
||||
make_transient(pairlock)
|
||||
session_target.add(pairlock)
|
||||
session_target.commit()
|
||||
|
||||
# Update sequences
|
||||
max_trade_id = session_target.query(func.max(Trade.id)).scalar()
|
||||
max_order_id = session_target.query(func.max(Order.id)).scalar()
|
||||
max_pairlock_id = session_target.query(func.max(PairLock.id)).scalar()
|
||||
max_trade_id = session_target.scalar(select(func.max(Trade.id)))
|
||||
max_order_id = session_target.scalar(select(func.max(Order.id)))
|
||||
max_pairlock_id = session_target.scalar(select(func.max(PairLock.id)))
|
||||
|
||||
set_sequence_ids(session_target.get_bind(),
|
||||
trade_id=max_trade_id,
|
||||
|
0
freqtrade/commands/hyperopt_commands.py
Executable file → Normal file
0
freqtrade/commands/hyperopt_commands.py
Executable file → Normal file
55
freqtrade/commands/strategy_utils_commands.py
Normal file
55
freqtrade/commands/strategy_utils_commands.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
from freqtrade.strategy.strategyupdater import StrategyUpdater
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def start_strategy_update(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Start the strategy updating script
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
|
||||
if sys.version_info == (3, 8): # pragma: no cover
|
||||
sys.exit("Freqtrade strategy updater requires Python version >= 3.9")
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
strategy_objs = StrategyResolver.search_all_objects(
|
||||
config, enum_failed=False, recursive=config.get('recursive_strategy_search', False))
|
||||
|
||||
filtered_strategy_objs = []
|
||||
if args['strategy_list']:
|
||||
filtered_strategy_objs = [
|
||||
strategy_obj for strategy_obj in strategy_objs
|
||||
if strategy_obj['name'] in args['strategy_list']
|
||||
]
|
||||
|
||||
else:
|
||||
# Use all available entries.
|
||||
filtered_strategy_objs = strategy_objs
|
||||
|
||||
processed_locations = set()
|
||||
for strategy_obj in filtered_strategy_objs:
|
||||
if strategy_obj['location'] not in processed_locations:
|
||||
processed_locations.add(strategy_obj['location'])
|
||||
start_conversion(strategy_obj, config)
|
||||
|
||||
|
||||
def start_conversion(strategy_obj, config):
|
||||
print(f"Conversion of {Path(strategy_obj['location']).name} started.")
|
||||
instance_strategy_updater = StrategyUpdater()
|
||||
start = time.perf_counter()
|
||||
instance_strategy_updater.start(config, strategy_obj)
|
||||
elapsed = time.perf_counter() - start
|
||||
print(f"Conversion of {Path(strategy_obj['location']).name} took {elapsed:.1f} seconds.")
|
@@ -1,4 +1,5 @@
|
||||
import logging
|
||||
import signal
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
@@ -12,15 +13,20 @@ def start_trading(args: Dict[str, Any]) -> int:
|
||||
# Import here to avoid loading worker module when it's not used
|
||||
from freqtrade.worker import Worker
|
||||
|
||||
def term_handler(signum, frame):
|
||||
# Raise KeyboardInterrupt - so we can handle it in the same way as Ctrl-C
|
||||
raise KeyboardInterrupt()
|
||||
|
||||
# Create and run worker
|
||||
worker = None
|
||||
try:
|
||||
signal.signal(signal.SIGTERM, term_handler)
|
||||
worker = Worker(args)
|
||||
worker.run()
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
logger.exception("Fatal exception!")
|
||||
except KeyboardInterrupt:
|
||||
except (KeyboardInterrupt):
|
||||
logger.info('SIGINT received, aborting ...')
|
||||
finally:
|
||||
if worker:
|
||||
|
@@ -355,6 +355,13 @@ def _validate_freqai_include_timeframes(conf: Dict[str, Any]) -> None:
|
||||
f"Main timeframe of {main_tf} must be smaller or equal to FreqAI "
|
||||
f"`include_timeframes`.Offending include-timeframes: {', '.join(offending_lines)}")
|
||||
|
||||
# Ensure that the base timeframe is included in the include_timeframes list
|
||||
if main_tf not in freqai_include_timeframes:
|
||||
feature_parameters = conf.get('freqai', {}).get('feature_parameters', {})
|
||||
include_timeframes = [main_tf] + freqai_include_timeframes
|
||||
conf.get('freqai', {}).get('feature_parameters', {}) \
|
||||
.update({**feature_parameters, 'include_timeframes': include_timeframes})
|
||||
|
||||
|
||||
def _validate_freqai_backtest(conf: Dict[str, Any]) -> None:
|
||||
if conf.get('runmode', RunMode.OTHER) == RunMode.BACKTEST:
|
||||
|
@@ -28,7 +28,7 @@ class Configuration:
|
||||
Reuse this class for the bot, backtesting, hyperopt and every script that required configuration
|
||||
"""
|
||||
|
||||
def __init__(self, args: Dict[str, Any], runmode: RunMode = None) -> None:
|
||||
def __init__(self, args: Dict[str, Any], runmode: Optional[RunMode] = None) -> None:
|
||||
self.args = args
|
||||
self.config: Optional[Config] = None
|
||||
self.runmode = runmode
|
||||
|
@@ -32,7 +32,7 @@ def flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str,
|
||||
:param prefix: Prefix to consider (usually FREQTRADE__)
|
||||
:return: Nested dict based on available and relevant variables.
|
||||
"""
|
||||
no_convert = ['CHAT_ID']
|
||||
no_convert = ['CHAT_ID', 'PASSWORD']
|
||||
relevant_vars: Dict[str, Any] = {}
|
||||
|
||||
for env_var, val in sorted(env_dict.items()):
|
||||
|
@@ -6,7 +6,7 @@ import re
|
||||
import sys
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import rapidjson
|
||||
|
||||
@@ -58,7 +58,7 @@ def load_config_file(path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
try:
|
||||
# Read config from stdin if requested in the options
|
||||
with open(path) if path != '-' else sys.stdin as file:
|
||||
with Path(path).open() if path != '-' else sys.stdin as file:
|
||||
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
|
||||
except FileNotFoundError:
|
||||
raise OperationalException(
|
||||
@@ -75,7 +75,8 @@ def load_config_file(path: str) -> Dict[str, Any]:
|
||||
return config
|
||||
|
||||
|
||||
def load_from_files(files: List[str], base_path: Path = None, level: int = 0) -> Dict[str, Any]:
|
||||
def load_from_files(
|
||||
files: List[str], base_path: Optional[Path] = None, level: int = 0) -> Dict[str, Any]:
|
||||
"""
|
||||
Recursively load configuration files if specified.
|
||||
Sub-files are assumed to be relative to the initial config.
|
||||
|
@@ -5,7 +5,7 @@ bot constants
|
||||
"""
|
||||
from typing import Any, Dict, List, Literal, Tuple
|
||||
|
||||
from freqtrade.enums import CandleType, RPCMessageType
|
||||
from freqtrade.enums import CandleType, PriceType, RPCMessageType
|
||||
|
||||
|
||||
DEFAULT_CONFIG = 'config.json'
|
||||
@@ -25,13 +25,14 @@ PRICING_SIDES = ['ask', 'bid', 'same', 'other']
|
||||
ORDERTYPE_POSSIBILITIES = ['limit', 'market']
|
||||
_ORDERTIF_POSSIBILITIES = ['GTC', 'FOK', 'IOC', 'PO']
|
||||
ORDERTIF_POSSIBILITIES = _ORDERTIF_POSSIBILITIES + [t.lower() for t in _ORDERTIF_POSSIBILITIES]
|
||||
STOPLOSS_PRICE_TYPES = [p for p in PriceType]
|
||||
HYPEROPT_LOSS_BUILTIN = ['ShortTradeDurHyperOptLoss', 'OnlyProfitHyperOptLoss',
|
||||
'SharpeHyperOptLoss', 'SharpeHyperOptLossDaily',
|
||||
'SortinoHyperOptLoss', 'SortinoHyperOptLossDaily',
|
||||
'CalmarHyperOptLoss',
|
||||
'MaxDrawDownHyperOptLoss', 'MaxDrawDownRelativeHyperOptLoss',
|
||||
'ProfitDrawDownHyperOptLoss']
|
||||
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList', 'ProducerPairList',
|
||||
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList', 'ProducerPairList', 'RemotePairList',
|
||||
'AgeFilter', 'OffsetFilter', 'PerformanceFilter',
|
||||
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
|
||||
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
|
||||
@@ -61,6 +62,7 @@ USERPATH_FREQAIMODELS = 'freqaimodels'
|
||||
|
||||
TELEGRAM_SETTING_OPTIONS = ['on', 'off', 'silent']
|
||||
WEBHOOK_FORMAT_OPTIONS = ['form', 'json', 'raw']
|
||||
FULL_DATAFRAME_THRESHOLD = 100
|
||||
|
||||
ENV_VAR_PREFIX = 'FREQTRADE__'
|
||||
|
||||
@@ -228,6 +230,7 @@ CONF_SCHEMA = {
|
||||
'default': 'market'},
|
||||
'stoploss': {'type': 'string', 'enum': ORDERTYPE_POSSIBILITIES},
|
||||
'stoploss_on_exchange': {'type': 'boolean'},
|
||||
'stoploss_price_type': {'type': 'string', 'enum': STOPLOSS_PRICE_TYPES},
|
||||
'stoploss_on_exchange_interval': {'type': 'number'},
|
||||
'stoploss_on_exchange_limit_ratio': {'type': 'number', 'minimum': 0.0,
|
||||
'maximum': 1.0}
|
||||
@@ -543,7 +546,7 @@ CONF_SCHEMA = {
|
||||
"enabled": {"type": "boolean", "default": False},
|
||||
"keras": {"type": "boolean", "default": False},
|
||||
"write_metrics_to_disk": {"type": "boolean", "default": False},
|
||||
"purge_old_models": {"type": "boolean", "default": True},
|
||||
"purge_old_models": {"type": ["boolean", "number"], "default": 2},
|
||||
"conv_width": {"type": "integer", "default": 1},
|
||||
"train_period_days": {"type": "integer", "default": 0},
|
||||
"backtest_period_days": {"type": "number", "default": 7},
|
||||
@@ -565,7 +568,9 @@ CONF_SCHEMA = {
|
||||
"shuffle": {"type": "boolean", "default": False},
|
||||
"nu": {"type": "number", "default": 0.1}
|
||||
},
|
||||
}
|
||||
},
|
||||
"shuffle_after_split": {"type": "boolean", "default": False},
|
||||
"buffer_train_data_candles": {"type": "integer", "default": 0}
|
||||
},
|
||||
"required": ["include_timeframes", "include_corr_pairlist", ]
|
||||
},
|
||||
@@ -583,6 +588,7 @@ CONF_SCHEMA = {
|
||||
"rl_config": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"drop_ohlc_from_features": {"type": "boolean", "default": False},
|
||||
"train_cycles": {"type": "integer"},
|
||||
"max_trade_duration_candles": {"type": "integer"},
|
||||
"add_state_info": {"type": "boolean", "default": False},
|
||||
@@ -608,9 +614,8 @@ CONF_SCHEMA = {
|
||||
"backtest_period_days",
|
||||
"identifier",
|
||||
"feature_parameters",
|
||||
"data_split_parameters",
|
||||
"model_training_parameters"
|
||||
]
|
||||
"data_split_parameters"
|
||||
]
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -636,7 +641,6 @@ SCHEMA_TRADE_REQUIRED = [
|
||||
|
||||
SCHEMA_BACKTEST_REQUIRED = [
|
||||
'exchange',
|
||||
'max_open_trades',
|
||||
'stake_currency',
|
||||
'stake_amount',
|
||||
'dry_run_wallet',
|
||||
@@ -646,6 +650,7 @@ SCHEMA_BACKTEST_REQUIRED = [
|
||||
SCHEMA_BACKTEST_REQUIRED_FINAL = SCHEMA_BACKTEST_REQUIRED + [
|
||||
'stoploss',
|
||||
'minimal_roi',
|
||||
'max_open_trades'
|
||||
]
|
||||
|
||||
SCHEMA_MINIMAL_REQUIRED = [
|
||||
@@ -679,5 +684,7 @@ EntryExit = Literal['entry', 'exit']
|
||||
BuySell = Literal['buy', 'sell']
|
||||
MakerTaker = Literal['maker', 'taker']
|
||||
BidAsk = Literal['bid', 'ask']
|
||||
OBLiteral = Literal['asks', 'bids']
|
||||
|
||||
Config = Dict[str, Any]
|
||||
IntOrInf = float
|
||||
|
@@ -10,7 +10,7 @@ from typing import Any, Dict, List, Optional, Union
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.constants import LAST_BT_RESULT_FN
|
||||
from freqtrade.constants import LAST_BT_RESULT_FN, IntOrInf
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.misc import json_load
|
||||
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
||||
@@ -20,8 +20,8 @@ from freqtrade.persistence import LocalTrade, Trade, init_db
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Newest format
|
||||
BT_DATA_COLUMNS = ['pair', 'stake_amount', 'amount', 'open_date', 'close_date',
|
||||
'open_rate', 'close_rate',
|
||||
BT_DATA_COLUMNS = ['pair', 'stake_amount', 'max_stake_amount', 'amount',
|
||||
'open_date', 'close_date', 'open_rate', 'close_rate',
|
||||
'fee_open', 'fee_close', 'trade_duration',
|
||||
'profit_ratio', 'profit_abs', 'exit_reason',
|
||||
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
|
||||
@@ -90,7 +90,8 @@ def get_latest_hyperopt_filename(directory: Union[Path, str]) -> str:
|
||||
return 'hyperopt_results.pickle'
|
||||
|
||||
|
||||
def get_latest_hyperopt_file(directory: Union[Path, str], predef_filename: str = None) -> Path:
|
||||
def get_latest_hyperopt_file(
|
||||
directory: Union[Path, str], predef_filename: Optional[str] = None) -> Path:
|
||||
"""
|
||||
Get latest hyperopt export based on '.last_result.json'.
|
||||
:param directory: Directory to search for last result
|
||||
@@ -193,7 +194,7 @@ def get_backtest_resultlist(dirname: Path):
|
||||
|
||||
|
||||
def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, str],
|
||||
min_backtest_date: datetime = None) -> Dict[str, Any]:
|
||||
min_backtest_date: Optional[datetime] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Find existing backtest stats that match specified run IDs and load them.
|
||||
:param dirname: pathlib.Path object, or string pointing to the file.
|
||||
@@ -241,6 +242,33 @@ def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, s
|
||||
return results
|
||||
|
||||
|
||||
def _load_backtest_data_df_compatibility(df: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
Compatibility support for older backtest data.
|
||||
"""
|
||||
df['open_date'] = pd.to_datetime(df['open_date'],
|
||||
utc=True,
|
||||
infer_datetime_format=True
|
||||
)
|
||||
df['close_date'] = pd.to_datetime(df['close_date'],
|
||||
utc=True,
|
||||
infer_datetime_format=True
|
||||
)
|
||||
# Compatibility support for pre short Columns
|
||||
if 'is_short' not in df.columns:
|
||||
df['is_short'] = False
|
||||
if 'leverage' not in df.columns:
|
||||
df['leverage'] = 1.0
|
||||
if 'enter_tag' not in df.columns:
|
||||
df['enter_tag'] = df['buy_tag']
|
||||
df = df.drop(['buy_tag'], axis=1)
|
||||
if 'max_stake_amount' not in df.columns:
|
||||
df['max_stake_amount'] = df['stake_amount']
|
||||
if 'orders' not in df.columns:
|
||||
df['orders'] = None
|
||||
return df
|
||||
|
||||
|
||||
def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = None) -> pd.DataFrame:
|
||||
"""
|
||||
Load backtest data file.
|
||||
@@ -269,24 +297,7 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
|
||||
data = data['strategy'][strategy]['trades']
|
||||
df = pd.DataFrame(data)
|
||||
if not df.empty:
|
||||
df['open_date'] = pd.to_datetime(df['open_date'],
|
||||
utc=True,
|
||||
infer_datetime_format=True
|
||||
)
|
||||
df['close_date'] = pd.to_datetime(df['close_date'],
|
||||
utc=True,
|
||||
infer_datetime_format=True
|
||||
)
|
||||
# Compatibility support for pre short Columns
|
||||
if 'is_short' not in df.columns:
|
||||
df['is_short'] = 0
|
||||
if 'leverage' not in df.columns:
|
||||
df['leverage'] = 1.0
|
||||
if 'enter_tag' not in df.columns:
|
||||
df['enter_tag'] = df['buy_tag']
|
||||
df = df.drop(['buy_tag'], axis=1)
|
||||
if 'orders' not in df.columns:
|
||||
df['orders'] = None
|
||||
df = _load_backtest_data_df_compatibility(df)
|
||||
|
||||
else:
|
||||
# old format - only with lists.
|
||||
@@ -322,7 +333,7 @@ def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataF
|
||||
|
||||
|
||||
def evaluate_result_multi(results: pd.DataFrame, timeframe: str,
|
||||
max_open_trades: int) -> pd.DataFrame:
|
||||
max_open_trades: IntOrInf) -> pd.DataFrame:
|
||||
"""
|
||||
Find overlapping trades by expanding each trade once per period it was open
|
||||
and then counting overlaps
|
||||
@@ -335,7 +346,7 @@ def evaluate_result_multi(results: pd.DataFrame, timeframe: str,
|
||||
return df_final[df_final['open_trades'] > max_open_trades]
|
||||
|
||||
|
||||
def trade_list_to_dataframe(trades: List[LocalTrade]) -> pd.DataFrame:
|
||||
def trade_list_to_dataframe(trades: Union[List[Trade], List[LocalTrade]]) -> pd.DataFrame:
|
||||
"""
|
||||
Convert list of Trade objects to pandas Dataframe
|
||||
:param trades: List of trade objects
|
||||
@@ -362,7 +373,7 @@ def load_trades_from_db(db_url: str, strategy: Optional[str] = None) -> pd.DataF
|
||||
filters = []
|
||||
if strategy:
|
||||
filters.append(Trade.strategy == strategy)
|
||||
trades = trade_list_to_dataframe(Trade.get_trades(filters).all())
|
||||
trades = trade_list_to_dataframe(list(Trade.get_trades(filters).all()))
|
||||
|
||||
return trades
|
||||
|
||||
|
@@ -9,14 +9,17 @@ from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from pandas import DataFrame
|
||||
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import Config, ListPairsWithTimeframes, PairWithTimeframe
|
||||
from freqtrade.constants import (FULL_DATAFRAME_THRESHOLD, Config, ListPairsWithTimeframes,
|
||||
PairWithTimeframe)
|
||||
from freqtrade.data.history import load_pair_history
|
||||
from freqtrade.enums import CandleType, RPCMessageType, RunMode
|
||||
from freqtrade.exceptions import ExchangeError, OperationalException
|
||||
from freqtrade.exchange import Exchange, timeframe_to_seconds
|
||||
from freqtrade.exchange.types import OrderBook
|
||||
from freqtrade.misc import append_candles_to_dataframe
|
||||
from freqtrade.rpc import RPCManager
|
||||
from freqtrade.util import PeriodicCache
|
||||
|
||||
@@ -120,7 +123,7 @@ class DataProvider:
|
||||
'type': RPCMessageType.ANALYZED_DF,
|
||||
'data': {
|
||||
'key': pair_key,
|
||||
'df': dataframe,
|
||||
'df': dataframe.tail(1),
|
||||
'la': datetime.now(timezone.utc)
|
||||
}
|
||||
}
|
||||
@@ -131,7 +134,7 @@ class DataProvider:
|
||||
'data': pair_key,
|
||||
})
|
||||
|
||||
def _add_external_df(
|
||||
def _replace_external_df(
|
||||
self,
|
||||
pair: str,
|
||||
dataframe: DataFrame,
|
||||
@@ -157,6 +160,87 @@ class DataProvider:
|
||||
self.__producer_pairs_df[producer_name][pair_key] = (dataframe, _last_analyzed)
|
||||
logger.debug(f"External DataFrame for {pair_key} from {producer_name} added.")
|
||||
|
||||
def _add_external_df(
|
||||
self,
|
||||
pair: str,
|
||||
dataframe: DataFrame,
|
||||
last_analyzed: datetime,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
producer_name: str = "default"
|
||||
) -> Tuple[bool, int]:
|
||||
"""
|
||||
Append a candle to the existing external dataframe. The incoming dataframe
|
||||
must have at least 1 candle.
|
||||
|
||||
:param pair: pair to get the data for
|
||||
:param timeframe: Timeframe to get data for
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
:returns: False if the candle could not be appended, or the int number of missing candles.
|
||||
"""
|
||||
pair_key = (pair, timeframe, candle_type)
|
||||
|
||||
if dataframe.empty:
|
||||
# The incoming dataframe must have at least 1 candle
|
||||
return (False, 0)
|
||||
|
||||
if len(dataframe) >= FULL_DATAFRAME_THRESHOLD:
|
||||
# This is likely a full dataframe
|
||||
# Add the dataframe to the dataprovider
|
||||
self._replace_external_df(
|
||||
pair,
|
||||
dataframe,
|
||||
last_analyzed=last_analyzed,
|
||||
timeframe=timeframe,
|
||||
candle_type=candle_type,
|
||||
producer_name=producer_name
|
||||
)
|
||||
return (True, 0)
|
||||
|
||||
if (producer_name not in self.__producer_pairs_df
|
||||
or pair_key not in self.__producer_pairs_df[producer_name]):
|
||||
# We don't have data from this producer yet,
|
||||
# or we don't have data for this pair_key
|
||||
# return False and 1000 for the full df
|
||||
return (False, 1000)
|
||||
|
||||
existing_df, _ = self.__producer_pairs_df[producer_name][pair_key]
|
||||
|
||||
# CHECK FOR MISSING CANDLES
|
||||
# Convert the timeframe to a timedelta for pandas
|
||||
timeframe_delta: Timedelta = to_timedelta(timeframe)
|
||||
local_last: Timestamp = existing_df.iloc[-1]['date'] # We want the last date from our copy
|
||||
# We want the first date from the incoming
|
||||
incoming_first: Timestamp = dataframe.iloc[0]['date']
|
||||
|
||||
# Remove existing candles that are newer than the incoming first candle
|
||||
existing_df1 = existing_df[existing_df['date'] < incoming_first]
|
||||
|
||||
candle_difference = (incoming_first - local_last) / timeframe_delta
|
||||
|
||||
# If the difference divided by the timeframe is 1, then this
|
||||
# is the candle we want and the incoming data isn't missing any.
|
||||
# If the candle_difference is more than 1, that means
|
||||
# we missed some candles between our data and the incoming
|
||||
# so return False and candle_difference.
|
||||
if candle_difference > 1:
|
||||
return (False, int(candle_difference))
|
||||
if existing_df1.empty:
|
||||
appended_df = dataframe
|
||||
else:
|
||||
appended_df = append_candles_to_dataframe(existing_df1, dataframe)
|
||||
|
||||
# Everything is good, we appended
|
||||
self._replace_external_df(
|
||||
pair,
|
||||
appended_df,
|
||||
last_analyzed=last_analyzed,
|
||||
timeframe=timeframe,
|
||||
candle_type=candle_type,
|
||||
producer_name=producer_name
|
||||
)
|
||||
return (True, 0)
|
||||
|
||||
def get_producer_df(
|
||||
self,
|
||||
pair: str,
|
||||
@@ -200,7 +284,7 @@ class DataProvider:
|
||||
def historic_ohlcv(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str = None,
|
||||
timeframe: Optional[str] = None,
|
||||
candle_type: str = ''
|
||||
) -> DataFrame:
|
||||
"""
|
||||
@@ -252,7 +336,7 @@ class DataProvider:
|
||||
def get_pair_dataframe(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str = None,
|
||||
timeframe: Optional[str] = None,
|
||||
candle_type: str = ''
|
||||
) -> DataFrame:
|
||||
"""
|
||||
@@ -334,16 +418,14 @@ class DataProvider:
|
||||
|
||||
def refresh(self,
|
||||
pairlist: ListPairsWithTimeframes,
|
||||
helping_pairs: ListPairsWithTimeframes = None) -> None:
|
||||
helping_pairs: Optional[ListPairsWithTimeframes] = None) -> None:
|
||||
"""
|
||||
Refresh data, called with each cycle
|
||||
"""
|
||||
if self._exchange is None:
|
||||
raise OperationalException(NO_EXCHANGE_EXCEPTION)
|
||||
if helping_pairs:
|
||||
self._exchange.refresh_latest_ohlcv(pairlist + helping_pairs)
|
||||
else:
|
||||
self._exchange.refresh_latest_ohlcv(pairlist)
|
||||
final_pairs = (pairlist + helping_pairs) if helping_pairs else pairlist
|
||||
self._exchange.refresh_latest_ohlcv(final_pairs)
|
||||
|
||||
@property
|
||||
def available_pairs(self) -> ListPairsWithTimeframes:
|
||||
@@ -358,7 +440,7 @@ class DataProvider:
|
||||
def ohlcv(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str = None,
|
||||
timeframe: Optional[str] = None,
|
||||
copy: bool = True,
|
||||
candle_type: str = ''
|
||||
) -> DataFrame:
|
||||
@@ -406,7 +488,7 @@ class DataProvider:
|
||||
except ExchangeError:
|
||||
return {}
|
||||
|
||||
def orderbook(self, pair: str, maximum: int) -> Dict[str, List]:
|
||||
def orderbook(self, pair: str, maximum: int) -> OrderBook:
|
||||
"""
|
||||
Fetch latest l2 orderbook data
|
||||
Warning: Does a network request - so use with common sense.
|
||||
|
17
freqtrade/data/entryexitanalysis.py
Executable file → Normal file
17
freqtrade/data/entryexitanalysis.py
Executable file → Normal file
@@ -24,11 +24,12 @@ def _load_backtest_analysis_data(backtest_dir: Path, name: str):
|
||||
scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_{name}.pkl")
|
||||
|
||||
try:
|
||||
scp = open(scpf, "rb")
|
||||
loaded_data = joblib.load(scp)
|
||||
logger.info(f"Loaded {name} data: {str(scpf)}")
|
||||
with scpf.open("rb") as scp:
|
||||
loaded_data = joblib.load(scp)
|
||||
logger.info(f"Loaded {name} candles: {str(scpf)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Cannot load {name} data from pickled results: ", e)
|
||||
return None
|
||||
|
||||
return loaded_data
|
||||
|
||||
@@ -60,7 +61,7 @@ def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_cand
|
||||
return analysed_trades_dict
|
||||
|
||||
|
||||
def _analyze_candles_and_indicators(pair, trades, signal_candles):
|
||||
def _analyze_candles_and_indicators(pair, trades: pd.DataFrame, signal_candles: pd.DataFrame):
|
||||
buyf = signal_candles
|
||||
|
||||
if len(buyf) > 0:
|
||||
@@ -129,7 +130,7 @@ def _do_group_table_output(bigdf, glist, to_csv=False, csv_path=None):
|
||||
|
||||
else:
|
||||
agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'],
|
||||
'profit_ratio': ['sum', 'median', 'mean']}
|
||||
'profit_ratio': ['median', 'mean', 'sum']}
|
||||
agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median',
|
||||
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
|
||||
'total_profit_pct']
|
||||
@@ -150,6 +151,12 @@ def _do_group_table_output(bigdf, glist, to_csv=False, csv_path=None):
|
||||
# 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
|
||||
if g == "4":
|
||||
group_mask = ['pair', 'enter_reason', 'exit_reason']
|
||||
|
||||
# 5: profit summaries grouped by exit_tag
|
||||
if g == "5":
|
||||
group_mask = ['exit_reason']
|
||||
sortcols = ['exit_reason']
|
||||
|
||||
if group_mask:
|
||||
new = bigdf.groupby(group_mask).agg(agg_mask).reset_index()
|
||||
new.columns = group_mask + agg_cols
|
||||
|
@@ -28,8 +28,8 @@ def load_pair_history(pair: str,
|
||||
fill_up_missing: bool = True,
|
||||
drop_incomplete: bool = False,
|
||||
startup_candles: int = 0,
|
||||
data_format: str = None,
|
||||
data_handler: IDataHandler = None,
|
||||
data_format: Optional[str] = None,
|
||||
data_handler: Optional[IDataHandler] = None,
|
||||
candle_type: CandleType = CandleType.SPOT
|
||||
) -> DataFrame:
|
||||
"""
|
||||
@@ -69,7 +69,7 @@ def load_data(datadir: Path,
|
||||
fail_without_data: bool = False,
|
||||
data_format: str = 'json',
|
||||
candle_type: CandleType = CandleType.SPOT,
|
||||
user_futures_funding_rate: int = None,
|
||||
user_futures_funding_rate: Optional[int] = None,
|
||||
) -> Dict[str, DataFrame]:
|
||||
"""
|
||||
Load ohlcv history data for a list of pairs.
|
||||
@@ -116,7 +116,7 @@ def refresh_data(*, datadir: Path,
|
||||
timeframe: str,
|
||||
pairs: List[str],
|
||||
exchange: Exchange,
|
||||
data_format: str = None,
|
||||
data_format: Optional[str] = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
candle_type: CandleType,
|
||||
) -> None:
|
||||
@@ -189,7 +189,7 @@ def _download_pair_history(pair: str, *,
|
||||
timeframe: str = '5m',
|
||||
process: str = '',
|
||||
new_pairs_days: int = 30,
|
||||
data_handler: IDataHandler = None,
|
||||
data_handler: Optional[IDataHandler] = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
candle_type: CandleType,
|
||||
erase: bool = False,
|
||||
@@ -272,7 +272,7 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
||||
datadir: Path, trading_mode: str,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
new_pairs_days: int = 30, erase: bool = False,
|
||||
data_format: str = None,
|
||||
data_format: Optional[str] = None,
|
||||
prepend: bool = False,
|
||||
) -> List[str]:
|
||||
"""
|
||||
|
@@ -308,7 +308,7 @@ class IDataHandler(ABC):
|
||||
timerange=timerange_startup,
|
||||
candle_type=candle_type
|
||||
)
|
||||
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data, True):
|
||||
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data):
|
||||
return pairdf
|
||||
else:
|
||||
enddate = pairdf.iloc[-1]['date']
|
||||
@@ -316,7 +316,7 @@ class IDataHandler(ABC):
|
||||
if timerange_startup:
|
||||
self._validate_pairdata(pair, pairdf, timeframe, candle_type, timerange_startup)
|
||||
pairdf = trim_dataframe(pairdf, timerange_startup)
|
||||
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data):
|
||||
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data, True):
|
||||
return pairdf
|
||||
|
||||
# incomplete candles should only be dropped if we didn't trim the end beforehand.
|
||||
@@ -374,6 +374,21 @@ class IDataHandler(ABC):
|
||||
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
||||
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")
|
||||
|
||||
def rename_futures_data(
|
||||
self, pair: str, new_pair: str, timeframe: str, candle_type: CandleType):
|
||||
"""
|
||||
Temporary method to migrate data from old naming to new naming (BTC/USDT -> BTC/USDT:USDT)
|
||||
Only used for binance to support the binance futures naming unification.
|
||||
"""
|
||||
|
||||
file_old = self._pair_data_filename(self._datadir, pair, timeframe, candle_type)
|
||||
file_new = self._pair_data_filename(self._datadir, new_pair, timeframe, candle_type)
|
||||
# print(file_old, file_new)
|
||||
if file_new.exists():
|
||||
logger.warning(f"{file_new} exists already, can't migrate {pair}.")
|
||||
return
|
||||
file_old.rename(file_new)
|
||||
|
||||
|
||||
def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
|
||||
"""
|
||||
@@ -403,8 +418,8 @@ def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
|
||||
raise ValueError(f"No datahandler for datatype {datatype} available.")
|
||||
|
||||
|
||||
def get_datahandler(datadir: Path, data_format: str = None,
|
||||
data_handler: IDataHandler = None) -> IDataHandler:
|
||||
def get_datahandler(datadir: Path, data_format: Optional[str] = None,
|
||||
data_handler: Optional[IDataHandler] = None) -> IDataHandler:
|
||||
"""
|
||||
:param datadir: Folder to save data
|
||||
:param data_format: dataformat to use
|
||||
|
@@ -1,4 +1,6 @@
|
||||
import logging
|
||||
import math
|
||||
from datetime import datetime
|
||||
from typing import Dict, Tuple
|
||||
|
||||
import numpy as np
|
||||
@@ -190,3 +192,119 @@ def calculate_cagr(days_passed: int, starting_balance: float, final_balance: flo
|
||||
:return: CAGR
|
||||
"""
|
||||
return (final_balance / starting_balance) ** (1 / (days_passed / 365)) - 1
|
||||
|
||||
|
||||
def calculate_expectancy(trades: pd.DataFrame) -> float:
|
||||
"""
|
||||
Calculate expectancy
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)
|
||||
:return: expectancy
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
return 0
|
||||
|
||||
expectancy = 1
|
||||
|
||||
profit_sum = trades.loc[trades['profit_abs'] > 0, 'profit_abs'].sum()
|
||||
loss_sum = abs(trades.loc[trades['profit_abs'] < 0, 'profit_abs'].sum())
|
||||
nb_win_trades = len(trades.loc[trades['profit_abs'] > 0])
|
||||
nb_loss_trades = len(trades.loc[trades['profit_abs'] < 0])
|
||||
|
||||
if (nb_win_trades > 0) and (nb_loss_trades > 0):
|
||||
average_win = profit_sum / nb_win_trades
|
||||
average_loss = loss_sum / nb_loss_trades
|
||||
risk_reward_ratio = average_win / average_loss
|
||||
winrate = nb_win_trades / len(trades)
|
||||
expectancy = ((1 + risk_reward_ratio) * winrate) - 1
|
||||
elif nb_win_trades == 0:
|
||||
expectancy = 0
|
||||
|
||||
return expectancy
|
||||
|
||||
|
||||
def calculate_sortino(trades: pd.DataFrame, min_date: datetime, max_date: datetime,
|
||||
starting_balance: float) -> float:
|
||||
"""
|
||||
Calculate sortino
|
||||
:param trades: DataFrame containing trades (requires columns profit_abs)
|
||||
:return: sortino
|
||||
"""
|
||||
if (len(trades) == 0) or (min_date is None) or (max_date is None) or (min_date == max_date):
|
||||
return 0
|
||||
|
||||
total_profit = trades['profit_abs'] / starting_balance
|
||||
days_period = max(1, (max_date - min_date).days)
|
||||
|
||||
expected_returns_mean = total_profit.sum() / days_period
|
||||
|
||||
down_stdev = np.std(trades.loc[trades['profit_abs'] < 0, 'profit_abs'] / starting_balance)
|
||||
|
||||
if down_stdev != 0 and not np.isnan(down_stdev):
|
||||
sortino_ratio = expected_returns_mean / down_stdev * np.sqrt(365)
|
||||
else:
|
||||
# Define high (negative) sortino ratio to be clear that this is NOT optimal.
|
||||
sortino_ratio = -100
|
||||
|
||||
# print(expected_returns_mean, down_stdev, sortino_ratio)
|
||||
return sortino_ratio
|
||||
|
||||
|
||||
def calculate_sharpe(trades: pd.DataFrame, min_date: datetime, max_date: datetime,
|
||||
starting_balance: float) -> float:
|
||||
"""
|
||||
Calculate sharpe
|
||||
:param trades: DataFrame containing trades (requires column profit_abs)
|
||||
:return: sharpe
|
||||
"""
|
||||
if (len(trades) == 0) or (min_date is None) or (max_date is None) or (min_date == max_date):
|
||||
return 0
|
||||
|
||||
total_profit = trades['profit_abs'] / starting_balance
|
||||
days_period = max(1, (max_date - min_date).days)
|
||||
|
||||
expected_returns_mean = total_profit.sum() / days_period
|
||||
up_stdev = np.std(total_profit)
|
||||
|
||||
if up_stdev != 0:
|
||||
sharp_ratio = expected_returns_mean / up_stdev * np.sqrt(365)
|
||||
else:
|
||||
# Define high (negative) sharpe ratio to be clear that this is NOT optimal.
|
||||
sharp_ratio = -100
|
||||
|
||||
# print(expected_returns_mean, up_stdev, sharp_ratio)
|
||||
return sharp_ratio
|
||||
|
||||
|
||||
def calculate_calmar(trades: pd.DataFrame, min_date: datetime, max_date: datetime,
|
||||
starting_balance: float) -> float:
|
||||
"""
|
||||
Calculate calmar
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)
|
||||
:return: calmar
|
||||
"""
|
||||
if (len(trades) == 0) or (min_date is None) or (max_date is None) or (min_date == max_date):
|
||||
return 0
|
||||
|
||||
total_profit = trades['profit_abs'].sum() / starting_balance
|
||||
days_period = max(1, (max_date - min_date).days)
|
||||
|
||||
# adding slippage of 0.1% per trade
|
||||
# total_profit = total_profit - 0.0005
|
||||
expected_returns_mean = total_profit / days_period * 100
|
||||
|
||||
# calculate max drawdown
|
||||
try:
|
||||
_, _, _, _, _, max_drawdown = calculate_max_drawdown(
|
||||
trades, value_col="profit_abs", starting_balance=starting_balance
|
||||
)
|
||||
except ValueError:
|
||||
max_drawdown = 0
|
||||
|
||||
if max_drawdown != 0:
|
||||
calmar_ratio = expected_returns_mean / max_drawdown * math.sqrt(365)
|
||||
else:
|
||||
# Define high (negative) calmar ratio to be clear that this is NOT optimal.
|
||||
calmar_ratio = -100
|
||||
|
||||
# print(expected_returns_mean, max_drawdown, calmar_ratio)
|
||||
return calmar_ratio
|
||||
|
@@ -195,7 +195,7 @@ class Edge:
|
||||
|
||||
def stake_amount(self, pair: str, free_capital: float,
|
||||
total_capital: float, capital_in_trade: float) -> float:
|
||||
stoploss = self.stoploss(pair)
|
||||
stoploss = self.get_stoploss(pair)
|
||||
available_capital = (total_capital + capital_in_trade) * self._capital_ratio
|
||||
allowed_capital_at_risk = available_capital * self._allowed_risk
|
||||
max_position_size = abs(allowed_capital_at_risk / stoploss)
|
||||
@@ -214,7 +214,7 @@ class Edge:
|
||||
)
|
||||
return round(position_size, 15)
|
||||
|
||||
def stoploss(self, pair: str) -> float:
|
||||
def get_stoploss(self, pair: str) -> float:
|
||||
if pair in self._cached_pairs:
|
||||
return self._cached_pairs[pair].stoploss
|
||||
else:
|
||||
|
@@ -5,7 +5,9 @@ from freqtrade.enums.exitchecktuple import ExitCheckTuple
|
||||
from freqtrade.enums.exittype import ExitType
|
||||
from freqtrade.enums.hyperoptstate import HyperoptState
|
||||
from freqtrade.enums.marginmode import MarginMode
|
||||
from freqtrade.enums.marketstatetype import MarketDirection
|
||||
from freqtrade.enums.ordertypevalue import OrderTypeValues
|
||||
from freqtrade.enums.pricetype import PriceType
|
||||
from freqtrade.enums.rpcmessagetype import NO_ECHO_MESSAGES, RPCMessageType, RPCRequestType
|
||||
from freqtrade.enums.runmode import NON_UTIL_MODES, OPTIMIZE_MODES, TRADING_MODES, RunMode
|
||||
from freqtrade.enums.signaltype import SignalDirection, SignalTagType, SignalType
|
||||
|
@@ -13,6 +13,9 @@ class CandleType(str, Enum):
|
||||
FUNDING_RATE = "funding_rate"
|
||||
# BORROW_RATE = "borrow_rate" # * unimplemented
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name.lower()}"
|
||||
|
||||
@staticmethod
|
||||
def from_string(value: str) -> 'CandleType':
|
||||
if not value:
|
||||
|
15
freqtrade/enums/marketstatetype.py
Normal file
15
freqtrade/enums/marketstatetype.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class MarketDirection(Enum):
|
||||
"""
|
||||
Enum for various market directions.
|
||||
"""
|
||||
LONG = "long"
|
||||
SHORT = "short"
|
||||
EVEN = "even"
|
||||
NONE = "none"
|
||||
|
||||
def __str__(self):
|
||||
# convert to string
|
||||
return self.value
|
8
freqtrade/enums/pricetype.py
Normal file
8
freqtrade/enums/pricetype.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class PriceType(str, Enum):
|
||||
"""Enum to distinguish possible trigger prices for stoplosses"""
|
||||
LAST = "last"
|
||||
MARK = "mark"
|
||||
INDEX = "index"
|
@@ -4,6 +4,7 @@ from enum import Enum
|
||||
class RPCMessageType(str, Enum):
|
||||
STATUS = 'status'
|
||||
WARNING = 'warning'
|
||||
EXCEPTION = 'exception'
|
||||
STARTUP = 'startup'
|
||||
|
||||
ENTRY = 'entry'
|
||||
@@ -37,5 +38,8 @@ class RPCRequestType(str, Enum):
|
||||
WHITELIST = 'whitelist'
|
||||
ANALYZED_DF = 'analyzed_df'
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
|
||||
NO_ECHO_MESSAGES = (RPCMessageType.ANALYZED_DF, RPCMessageType.WHITELIST, RPCMessageType.NEW_CANDLE)
|
||||
|
@@ -10,6 +10,9 @@ class SignalType(Enum):
|
||||
ENTER_SHORT = "enter_short"
|
||||
EXIT_SHORT = "exit_short"
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name.lower()}"
|
||||
|
||||
|
||||
class SignalTagType(Enum):
|
||||
"""
|
||||
@@ -18,7 +21,13 @@ class SignalTagType(Enum):
|
||||
ENTER_TAG = "enter_tag"
|
||||
EXIT_TAG = "exit_tag"
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name.lower()}"
|
||||
|
||||
|
||||
class SignalDirection(str, Enum):
|
||||
LONG = 'long'
|
||||
SHORT = 'short'
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name.lower()}"
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from freqtrade.exchange.common import remove_credentials, MAP_EXCHANGE_CHILDCLASS
|
||||
from freqtrade.exchange.exchange import Exchange
|
||||
# isort: on
|
||||
from freqtrade.exchange.bibox import Bibox
|
||||
from freqtrade.exchange.binance import Binance
|
||||
from freqtrade.exchange.bitpanda import Bitpanda
|
||||
from freqtrade.exchange.bittrex import Bittrex
|
||||
@@ -18,7 +17,7 @@ from freqtrade.exchange.exchange_utils import (amount_to_contract_precision, amo
|
||||
timeframe_to_next_date, timeframe_to_prev_date,
|
||||
timeframe_to_seconds, validate_exchange,
|
||||
validate_exchanges)
|
||||
from freqtrade.exchange.gateio import Gateio
|
||||
from freqtrade.exchange.gate import Gate
|
||||
from freqtrade.exchange.hitbtc import Hitbtc
|
||||
from freqtrade.exchange.huobi import Huobi
|
||||
from freqtrade.exchange.kraken import Kraken
|
||||
|
@@ -1,28 +0,0 @@
|
||||
""" Bibox exchange subclass """
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from freqtrade.exchange import Exchange
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Bibox(Exchange):
|
||||
"""
|
||||
Bibox exchange class. Contains adjustments needed for Freqtrade to work
|
||||
with this exchange.
|
||||
|
||||
Please note that this exchange is not included in the list of exchanges
|
||||
officially supported by the Freqtrade development team. So some features
|
||||
may still not work as expected.
|
||||
"""
|
||||
|
||||
# fetchCurrencies API point requires authentication for Bibox,
|
||||
# so switch it off for Freqtrade load_markets()
|
||||
@property
|
||||
def _ccxt_config(self) -> Dict:
|
||||
# Parameters to add directly to ccxt sync/async initialization.
|
||||
config = {"has": {"fetchCurrencies": False}}
|
||||
config.update(super()._ccxt_config)
|
||||
return config
|
@@ -7,11 +7,11 @@ from typing import Dict, List, Optional, Tuple
|
||||
import arrow
|
||||
import ccxt
|
||||
|
||||
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
||||
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.exchange.types import Tickers
|
||||
from freqtrade.exchange.types import OHLCVResponse, Tickers
|
||||
from freqtrade.misc import deep_merge_dicts, json_load
|
||||
|
||||
|
||||
@@ -28,11 +28,16 @@ class Binance(Exchange):
|
||||
"trades_pagination": "id",
|
||||
"trades_pagination_arg": "fromId",
|
||||
"l2_limit_range": [5, 10, 20, 50, 100, 500, 1000],
|
||||
"ccxt_futures_name": "future"
|
||||
}
|
||||
_ft_has_futures: Dict = {
|
||||
"stoploss_order_types": {"limit": "limit", "market": "market"},
|
||||
"stoploss_order_types": {"limit": "stop", "market": "stop_market"},
|
||||
"tickers_have_price": False,
|
||||
"floor_leverage": True,
|
||||
"stop_price_type_field": "workingType",
|
||||
"stop_price_type_value_mapping": {
|
||||
PriceType.LAST: "CONTRACT_PRICE",
|
||||
PriceType.MARK: "MARK_PRICE",
|
||||
},
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
@@ -78,33 +83,9 @@ class Binance(Exchange):
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f'Could not set leverage due to {e.__class__.__name__}. Message: {e}') from e
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
f'Error in additional_exchange_init due to {e.__class__.__name__}. Message: {e}'
|
||||
) from e
|
||||
|
||||
@retrier
|
||||
def _set_leverage(
|
||||
self,
|
||||
leverage: float,
|
||||
pair: Optional[str] = None,
|
||||
trading_mode: Optional[TradingMode] = None
|
||||
):
|
||||
"""
|
||||
Set's the leverage before making a trade, in order to not
|
||||
have the same leverage on every trade
|
||||
"""
|
||||
trading_mode = trading_mode or self.trading_mode
|
||||
|
||||
if self._config['dry_run'] or trading_mode != TradingMode.FUTURES:
|
||||
return
|
||||
|
||||
try:
|
||||
self._api.set_leverage(symbol=pair, leverage=round(leverage))
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f'Could not set leverage due to {e.__class__.__name__}. Message: {e}') from e
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@@ -112,7 +93,7 @@ class Binance(Exchange):
|
||||
since_ms: int, candle_type: CandleType,
|
||||
is_new_pair: bool = False, raise_: bool = False,
|
||||
until_ms: Optional[int] = None
|
||||
) -> Tuple[str, str, str, List]:
|
||||
) -> OHLCVResponse:
|
||||
"""
|
||||
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
|
||||
Does not work for other exchanges, which don't return the earliest data when called with "0"
|
||||
@@ -150,6 +131,7 @@ class Binance(Exchange):
|
||||
is_short: bool,
|
||||
amount: float,
|
||||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
mm_ex_1: float = 0.0, # (Binance) Cross only
|
||||
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
||||
@@ -159,11 +141,12 @@ class Binance(Exchange):
|
||||
MARGIN: https://www.binance.com/en/support/faq/f6b010588e55413aa58b7d63ee0125ed
|
||||
PERPETUAL: https://www.binance.com/en/support/faq/b3c689c1f50a44cabb3a84e663b81d93
|
||||
|
||||
:param exchange_name:
|
||||
:param pair: Pair to calculate liquidation price for
|
||||
:param open_rate: Entry price of position
|
||||
:param is_short: True if the trade is a short, false otherwise
|
||||
:param amount: Absolute value of position size incl. leverage (in base currency)
|
||||
:param stake_amount: Stake amount - Collateral in settle currency.
|
||||
:param leverage: Leverage used for this position.
|
||||
:param trading_mode: SPOT, MARGIN, FUTURES, etc.
|
||||
:param margin_mode: Either ISOLATED or CROSS
|
||||
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
||||
@@ -212,7 +195,7 @@ class Binance(Exchange):
|
||||
leverage_tiers_path = (
|
||||
Path(__file__).parent / 'binance_leverage_tiers.json'
|
||||
)
|
||||
with open(leverage_tiers_path) as json_file:
|
||||
with leverage_tiers_path.open() as json_file:
|
||||
return json_load(json_file)
|
||||
else:
|
||||
try:
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,16 @@
|
||||
""" Bybit exchange subclass """
|
||||
import logging
|
||||
from typing import Dict, List, Tuple
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from freqtrade.enums import MarginMode, TradingMode
|
||||
import ccxt
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.exchange.exchange_utils import timeframe_to_msecs
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -20,18 +27,27 @@ class Bybit(Exchange):
|
||||
"""
|
||||
|
||||
_ft_has: Dict = {
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"ccxt_futures_name": "linear",
|
||||
"ohlcv_candle_limit": 200,
|
||||
"ohlcv_has_history": False,
|
||||
}
|
||||
_ft_has_futures: Dict = {
|
||||
"ohlcv_has_history": True,
|
||||
"mark_ohlcv_timeframe": "4h",
|
||||
"funding_fee_timeframe": "8h",
|
||||
"stoploss_on_exchange": True,
|
||||
"stoploss_order_types": {"limit": "limit", "market": "market"},
|
||||
"stop_price_type_field": "triggerBy",
|
||||
"stop_price_type_value_mapping": {
|
||||
PriceType.LAST: "LastPrice",
|
||||
PriceType.MARK: "MarkPrice",
|
||||
PriceType.INDEX: "IndexPrice",
|
||||
},
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
# TradingMode.SPOT always supported and not required in this list
|
||||
# (TradingMode.FUTURES, MarginMode.CROSS),
|
||||
# (TradingMode.FUTURES, MarginMode.ISOLATED)
|
||||
(TradingMode.FUTURES, MarginMode.ISOLATED)
|
||||
]
|
||||
|
||||
@property
|
||||
@@ -47,3 +63,158 @@ class Bybit(Exchange):
|
||||
})
|
||||
config.update(super()._ccxt_config)
|
||||
return config
|
||||
|
||||
def market_is_future(self, market: Dict[str, Any]) -> bool:
|
||||
main = super().market_is_future(market)
|
||||
# For ByBit, we'll only support USDT markets for now.
|
||||
return (
|
||||
main and market['settle'] == 'USDT'
|
||||
)
|
||||
|
||||
@retrier
|
||||
def additional_exchange_init(self) -> None:
|
||||
"""
|
||||
Additional exchange initialization logic.
|
||||
.api will be available at this point.
|
||||
Must be overridden in child methods if required.
|
||||
"""
|
||||
try:
|
||||
if self.trading_mode == TradingMode.FUTURES and not self._config['dry_run']:
|
||||
position_mode = self._api.set_position_mode(False)
|
||||
self._log_exchange_response('set_position_mode', position_mode)
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f'Error in additional_exchange_init due to {e.__class__.__name__}. Message: {e}'
|
||||
) from e
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
async def _fetch_funding_rate_history(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
limit: int,
|
||||
since_ms: Optional[int] = None,
|
||||
) -> List[List]:
|
||||
"""
|
||||
Fetch funding rate history
|
||||
Necessary workaround until https://github.com/ccxt/ccxt/issues/15990 is fixed.
|
||||
"""
|
||||
params = {}
|
||||
if since_ms:
|
||||
until = since_ms + (timeframe_to_msecs(timeframe) * self._ft_has['ohlcv_candle_limit'])
|
||||
params.update({'until': until})
|
||||
# Funding rate
|
||||
data = await self._api_async.fetch_funding_rate_history(
|
||||
pair, since=since_ms,
|
||||
params=params)
|
||||
# Convert funding rate to candle pattern
|
||||
data = [[x['timestamp'], x['fundingRate'], 0, 0, 0, 0] for x in data]
|
||||
return data
|
||||
|
||||
def _lev_prep(self, pair: str, leverage: float, side: BuySell):
|
||||
if self.trading_mode != TradingMode.SPOT:
|
||||
params = {'leverage': leverage}
|
||||
self.set_margin_mode(pair, self.margin_mode, accept_fail=True, params=params)
|
||||
self._set_leverage(leverage, pair, accept_fail=True)
|
||||
|
||||
def _get_params(
|
||||
self,
|
||||
side: BuySell,
|
||||
ordertype: str,
|
||||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = 'GTC',
|
||||
) -> Dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
ordertype=ordertype,
|
||||
leverage=leverage,
|
||||
reduceOnly=reduceOnly,
|
||||
time_in_force=time_in_force,
|
||||
)
|
||||
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
|
||||
params['position_idx'] = 0
|
||||
return params
|
||||
|
||||
def dry_run_liquidation_price(
|
||||
self,
|
||||
pair: str,
|
||||
open_rate: float, # Entry price of position
|
||||
is_short: bool,
|
||||
amount: float,
|
||||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
mm_ex_1: float = 0.0, # (Binance) Cross only
|
||||
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
PERPETUAL:
|
||||
bybit:
|
||||
https://www.bybithelp.com/HelpCenterKnowledge/bybitHC_Article?language=en_US&id=000001067
|
||||
|
||||
Long:
|
||||
Liquidation Price = (
|
||||
Entry Price * (1 - Initial Margin Rate + Maintenance Margin Rate)
|
||||
- Extra Margin Added/ Contract)
|
||||
Short:
|
||||
Liquidation Price = (
|
||||
Entry Price * (1 + Initial Margin Rate - Maintenance Margin Rate)
|
||||
+ Extra Margin Added/ Contract)
|
||||
|
||||
Implementation Note: Extra margin is currently not used.
|
||||
|
||||
:param pair: Pair to calculate liquidation price for
|
||||
:param open_rate: Entry price of position
|
||||
:param is_short: True if the trade is a short, false otherwise
|
||||
:param amount: Absolute value of position size incl. leverage (in base currency)
|
||||
:param stake_amount: Stake amount - Collateral in settle currency.
|
||||
:param leverage: Leverage used for this position.
|
||||
:param trading_mode: SPOT, MARGIN, FUTURES, etc.
|
||||
:param margin_mode: Either ISOLATED or CROSS
|
||||
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
||||
Cross-Margin Mode: crossWalletBalance
|
||||
Isolated-Margin Mode: isolatedWalletBalance
|
||||
"""
|
||||
|
||||
market = self.markets[pair]
|
||||
mm_ratio, _ = self.get_maintenance_ratio_and_amt(pair, stake_amount)
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES and self.margin_mode == MarginMode.ISOLATED:
|
||||
|
||||
if market['inverse']:
|
||||
raise OperationalException(
|
||||
"Freqtrade does not yet support inverse contracts")
|
||||
initial_margin_rate = 1 / leverage
|
||||
|
||||
# See docstring - ignores extra margin!
|
||||
if is_short:
|
||||
return open_rate * (1 + initial_margin_rate - mm_ratio)
|
||||
else:
|
||||
return open_rate * (1 - initial_margin_rate + mm_ratio)
|
||||
|
||||
else:
|
||||
raise OperationalException(
|
||||
"Freqtrade only supports isolated futures for leverage trading")
|
||||
|
||||
def get_funding_fees(
|
||||
self, pair: str, amount: float, is_short: bool, open_date: datetime) -> float:
|
||||
"""
|
||||
Fetch funding fees, either from the exchange (live) or calculates them
|
||||
based on funding rate/mark price history
|
||||
:param pair: The quote/base pair of the trade
|
||||
:param is_short: trade direction
|
||||
:param amount: Trade amount
|
||||
:param open_date: Open date of the trade
|
||||
:return: funding fee since open_date
|
||||
:raises: ExchangeError if something goes wrong.
|
||||
"""
|
||||
# Bybit does not provide "applied" funding fees per position.
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
return self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
return 0.0
|
||||
|
@@ -46,13 +46,13 @@ MAP_EXCHANGE_CHILDCLASS = {
|
||||
'binanceje': 'binance',
|
||||
'binanceusdm': 'binance',
|
||||
'okex': 'okx',
|
||||
'gate': 'gateio',
|
||||
'gateio': 'gate',
|
||||
}
|
||||
|
||||
SUPPORTED_EXCHANGES = [
|
||||
'binance',
|
||||
'bittrex',
|
||||
'gateio',
|
||||
'gate',
|
||||
'huobi',
|
||||
'kraken',
|
||||
'okx',
|
||||
|
@@ -3,11 +3,11 @@
|
||||
Cryptocurrency Exchanges support
|
||||
"""
|
||||
import asyncio
|
||||
import http
|
||||
import inspect
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import floor
|
||||
from threading import Lock
|
||||
from typing import Any, Coroutine, Dict, List, Literal, Optional, Tuple, Union
|
||||
|
||||
@@ -21,9 +21,10 @@ from pandas import DataFrame, concat
|
||||
|
||||
from freqtrade.constants import (DEFAULT_AMOUNT_RESERVE_PERCENT, NON_OPEN_EXCHANGE_STATES, BidAsk,
|
||||
BuySell, Config, EntryExit, ListPairsWithTimeframes, MakerTaker,
|
||||
PairWithTimeframe)
|
||||
OBLiteral, PairWithTimeframe)
|
||||
from freqtrade.data.converter import clean_ohlcv_dataframe, ohlcv_to_dataframe, trades_dict_to_list
|
||||
from freqtrade.enums import OPTIMIZE_MODES, CandleType, MarginMode, TradingMode
|
||||
from freqtrade.enums.pricetype import PriceType
|
||||
from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFundsError,
|
||||
InvalidOrderException, OperationalException, PricingError,
|
||||
RetryableOrderError, TemporaryError)
|
||||
@@ -36,7 +37,7 @@ from freqtrade.exchange.exchange_utils import (CcxtModuleType, amount_to_contrac
|
||||
price_to_precision, timeframe_to_minutes,
|
||||
timeframe_to_msecs, timeframe_to_next_date,
|
||||
timeframe_to_prev_date, timeframe_to_seconds)
|
||||
from freqtrade.exchange.types import Ticker, Tickers
|
||||
from freqtrade.exchange.types import OHLCVResponse, OrderBook, Ticker, Tickers
|
||||
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
|
||||
safe_value_fallback2)
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||
@@ -45,12 +46,6 @@ from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Workaround for adding samesite support to pre 3.8 python
|
||||
# Only applies to python3.7, and only on certain exchanges (kraken)
|
||||
# Replicates the fix from starlette (which is actually causing this problem)
|
||||
http.cookies.Morsel._reserved["samesite"] = "SameSite" # type: ignore
|
||||
|
||||
|
||||
class Exchange:
|
||||
|
||||
# Parameters to add directly to buy/sell calls (like agreeing to trading agreement)
|
||||
@@ -74,6 +69,7 @@ class Exchange:
|
||||
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
|
||||
"ohlcv_volume_currency": "base", # "base" or "quote"
|
||||
"tickers_have_quoteVolume": True,
|
||||
"tickers_have_bid_ask": True, # bid / ask empty for fetch_tickers
|
||||
"tickers_have_price": True,
|
||||
"trades_pagination": "time", # Possible are "time" or "id"
|
||||
"trades_pagination_arg": "since",
|
||||
@@ -474,7 +470,7 @@ class Exchange:
|
||||
try:
|
||||
if self._api_async:
|
||||
self.loop.run_until_complete(
|
||||
self._api_async.load_markets(reload=reload))
|
||||
self._api_async.load_markets(reload=reload, params={}))
|
||||
|
||||
except (asyncio.TimeoutError, ccxt.BaseError) as e:
|
||||
logger.warning('Could not load async markets. Reason: %s', e)
|
||||
@@ -483,7 +479,7 @@ class Exchange:
|
||||
def _load_markets(self) -> None:
|
||||
""" Initialize markets both sync and async """
|
||||
try:
|
||||
self._markets = self._api.load_markets()
|
||||
self._markets = self._api.load_markets(params={})
|
||||
self._load_async_markets()
|
||||
self._last_markets_refresh = arrow.utcnow().int_timestamp
|
||||
if self._ft_has['needs_trading_fees']:
|
||||
@@ -501,7 +497,7 @@ class Exchange:
|
||||
return None
|
||||
logger.debug("Performing scheduled market reload..")
|
||||
try:
|
||||
self._markets = self._api.load_markets(reload=True)
|
||||
self._markets = self._api.load_markets(reload=True, params={})
|
||||
# Also reload async markets to avoid issues with newly listed pairs
|
||||
self._load_async_markets(reload=True)
|
||||
self._last_markets_refresh = arrow.utcnow().int_timestamp
|
||||
@@ -606,12 +602,27 @@ class Exchange:
|
||||
if not self.exchange_has('createMarketOrder'):
|
||||
raise OperationalException(
|
||||
f'Exchange {self.name} does not support market orders.')
|
||||
self.validate_stop_ordertypes(order_types)
|
||||
|
||||
def validate_stop_ordertypes(self, order_types: Dict) -> None:
|
||||
"""
|
||||
Validate stoploss order types
|
||||
"""
|
||||
if (order_types.get("stoploss_on_exchange")
|
||||
and not self._ft_has.get("stoploss_on_exchange", False)):
|
||||
raise OperationalException(
|
||||
f'On exchange stoploss is not supported for {self.name}.'
|
||||
)
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
price_mapping = self._ft_has.get('stop_price_type_value_mapping', {}).keys()
|
||||
if (
|
||||
order_types.get("stoploss_on_exchange", False) is True
|
||||
and 'stoploss_price_type' in order_types
|
||||
and order_types['stoploss_price_type'] not in price_mapping
|
||||
):
|
||||
raise OperationalException(
|
||||
f'On exchange stoploss price type is not supported for {self.name}.'
|
||||
)
|
||||
|
||||
def validate_pricing(self, pricing: Dict) -> None:
|
||||
if pricing.get('use_order_book', False) and not self.exchange_has('fetchL2OrderBook'):
|
||||
@@ -682,7 +693,7 @@ class Exchange:
|
||||
f"Freqtrade does not support {mm_value} {trading_mode.value} on {self.name}"
|
||||
)
|
||||
|
||||
def get_option(self, param: str, default: Any = None) -> Any:
|
||||
def get_option(self, param: str, default: Optional[Any] = None) -> Any:
|
||||
"""
|
||||
Get parameter value from _ft_has
|
||||
"""
|
||||
@@ -840,7 +851,7 @@ class Exchange:
|
||||
'remaining': _amount,
|
||||
'datetime': arrow.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
|
||||
'timestamp': arrow.utcnow().int_timestamp * 1000,
|
||||
'status': "closed" if ordertype == "market" and not stop_loss else "open",
|
||||
'status': "open",
|
||||
'fee': None,
|
||||
'info': {},
|
||||
'leverage': leverage
|
||||
@@ -850,20 +861,33 @@ class Exchange:
|
||||
dry_order["stopPrice"] = dry_order["price"]
|
||||
# Workaround to avoid filling stoploss orders immediately
|
||||
dry_order["ft_order_type"] = "stoploss"
|
||||
orderbook: Optional[OrderBook] = None
|
||||
if self.exchange_has('fetchL2OrderBook'):
|
||||
orderbook = self.fetch_l2_order_book(pair, 20)
|
||||
if ordertype == "limit" and orderbook:
|
||||
# Allow a 3% price difference
|
||||
allowed_diff = 0.03
|
||||
if self._dry_is_price_crossed(pair, side, rate, orderbook, allowed_diff):
|
||||
logger.info(
|
||||
f"Converted order {pair} to market order due to price {rate} crossing spread "
|
||||
f"by more than {allowed_diff:.2%}.")
|
||||
dry_order["type"] = "market"
|
||||
|
||||
if dry_order["type"] == "market" and not dry_order.get("ft_order_type"):
|
||||
# Update market order pricing
|
||||
average = self.get_dry_market_fill_price(pair, side, amount, rate)
|
||||
average = self.get_dry_market_fill_price(pair, side, amount, rate, orderbook)
|
||||
dry_order.update({
|
||||
'average': average,
|
||||
'filled': _amount,
|
||||
'remaining': 0.0,
|
||||
'status': "closed",
|
||||
'cost': (dry_order['amount'] * average) / leverage
|
||||
})
|
||||
# market orders will always incurr taker fees
|
||||
dry_order = self.add_dry_order_fee(pair, dry_order, 'taker')
|
||||
|
||||
dry_order = self.check_dry_limit_order_filled(dry_order, immediate=True)
|
||||
dry_order = self.check_dry_limit_order_filled(
|
||||
dry_order, immediate=True, orderbook=orderbook)
|
||||
|
||||
self._dry_run_open_orders[dry_order["id"]] = dry_order
|
||||
# Copy order and close it - so the returned order is open unless it's a market order
|
||||
@@ -885,20 +909,22 @@ class Exchange:
|
||||
})
|
||||
return dry_order
|
||||
|
||||
def get_dry_market_fill_price(self, pair: str, side: str, amount: float, rate: float) -> float:
|
||||
def get_dry_market_fill_price(self, pair: str, side: str, amount: float, rate: float,
|
||||
orderbook: Optional[OrderBook]) -> float:
|
||||
"""
|
||||
Get the market order fill price based on orderbook interpolation
|
||||
"""
|
||||
if self.exchange_has('fetchL2OrderBook'):
|
||||
ob = self.fetch_l2_order_book(pair, 20)
|
||||
ob_type = 'asks' if side == 'buy' else 'bids'
|
||||
if not orderbook:
|
||||
orderbook = self.fetch_l2_order_book(pair, 20)
|
||||
ob_type: OBLiteral = 'asks' if side == 'buy' else 'bids'
|
||||
slippage = 0.05
|
||||
max_slippage_val = rate * ((1 + slippage) if side == 'buy' else (1 - slippage))
|
||||
|
||||
remaining_amount = amount
|
||||
filled_amount = 0.0
|
||||
book_entry_price = 0.0
|
||||
for book_entry in ob[ob_type]:
|
||||
for book_entry in orderbook[ob_type]:
|
||||
book_entry_price = book_entry[0]
|
||||
book_entry_coin_volume = book_entry[1]
|
||||
if remaining_amount > 0:
|
||||
@@ -926,20 +952,20 @@ class Exchange:
|
||||
|
||||
return rate
|
||||
|
||||
def _is_dry_limit_order_filled(self, pair: str, side: str, limit: float) -> bool:
|
||||
def _dry_is_price_crossed(self, pair: str, side: str, limit: float,
|
||||
orderbook: Optional[OrderBook] = None, offset: float = 0.0) -> bool:
|
||||
if not self.exchange_has('fetchL2OrderBook'):
|
||||
return True
|
||||
ob = self.fetch_l2_order_book(pair, 1)
|
||||
if not orderbook:
|
||||
orderbook = self.fetch_l2_order_book(pair, 1)
|
||||
try:
|
||||
if side == 'buy':
|
||||
price = ob['asks'][0][0]
|
||||
logger.debug(f"{pair} checking dry buy-order: price={price}, limit={limit}")
|
||||
if limit >= price:
|
||||
price = orderbook['asks'][0][0]
|
||||
if limit * (1 - offset) >= price:
|
||||
return True
|
||||
else:
|
||||
price = ob['bids'][0][0]
|
||||
logger.debug(f"{pair} checking dry sell-order: price={price}, limit={limit}")
|
||||
if limit <= price:
|
||||
price = orderbook['bids'][0][0]
|
||||
if limit * (1 + offset) <= price:
|
||||
return True
|
||||
except IndexError:
|
||||
# Ignore empty orderbooks when filling - can be filled with the next iteration.
|
||||
@@ -947,7 +973,8 @@ class Exchange:
|
||||
return False
|
||||
|
||||
def check_dry_limit_order_filled(
|
||||
self, order: Dict[str, Any], immediate: bool = False) -> Dict[str, Any]:
|
||||
self, order: Dict[str, Any], immediate: bool = False,
|
||||
orderbook: Optional[OrderBook] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Check dry-run limit order fill and update fee (if it filled).
|
||||
"""
|
||||
@@ -955,7 +982,7 @@ class Exchange:
|
||||
and order['type'] in ["limit"]
|
||||
and not order.get('ft_order_type')):
|
||||
pair = order['symbol']
|
||||
if self._is_dry_limit_order_filled(pair, order['side'], order['price']):
|
||||
if self._dry_is_price_crossed(pair, order['side'], order['price'], orderbook):
|
||||
order.update({
|
||||
'status': 'closed',
|
||||
'filled': order['amount'],
|
||||
@@ -1060,7 +1087,7 @@ class Exchange:
|
||||
f'Tried to {side} amount {amount} at rate {rate}.'
|
||||
f'Message: {e}') from e
|
||||
except ccxt.InvalidOrder as e:
|
||||
raise ExchangeError(
|
||||
raise InvalidOrderException(
|
||||
f'Could not create {ordertype} {side} order on market {pair}. '
|
||||
f'Tried to {side} amount {amount} at rate {rate}. '
|
||||
f'Message: {e}') from e
|
||||
@@ -1111,7 +1138,10 @@ class Exchange:
|
||||
# Ensure rate is less than stop price
|
||||
if bad_stop_price:
|
||||
raise OperationalException(
|
||||
'In stoploss limit order, stop price should be more than limit price')
|
||||
"In stoploss limit order, stop price should be more than limit price. "
|
||||
f"Stop price: {stop_price}, Limit price: {limit_rate}, "
|
||||
f"Limit Price pct: {limit_price_pct}"
|
||||
)
|
||||
return limit_rate
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
@@ -1121,8 +1151,8 @@ class Exchange:
|
||||
return params
|
||||
|
||||
@retrier(retries=0)
|
||||
def stoploss(self, pair: str, amount: float, stop_price: float, order_types: Dict,
|
||||
side: BuySell, leverage: float) -> Dict:
|
||||
def create_stoploss(self, pair: str, amount: float, stop_price: float, order_types: Dict,
|
||||
side: BuySell, leverage: float) -> Dict:
|
||||
"""
|
||||
creates a stoploss order.
|
||||
requires `_ft_has['stoploss_order_types']` to be set as a dict mapping limit and market
|
||||
@@ -1167,6 +1197,10 @@ class Exchange:
|
||||
stop_price=stop_price_norm)
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
params['reduceOnly'] = True
|
||||
if 'stoploss_price_type' in order_types and 'stop_price_type_field' in self._ft_has:
|
||||
price_type = self._ft_has['stop_price_type_value_mapping'][
|
||||
order_types.get('stoploss_price_type', PriceType.LAST)]
|
||||
params[self._ft_has['stop_price_type_field']] = price_type
|
||||
|
||||
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
||||
|
||||
@@ -1357,7 +1391,7 @@ class Exchange:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def fetch_positions(self, pair: str = None) -> List[Dict]:
|
||||
def fetch_positions(self, pair: Optional[str] = None) -> List[Dict]:
|
||||
"""
|
||||
Fetch positions from the exchange.
|
||||
If no pair is given, all positions are returned.
|
||||
@@ -1497,7 +1531,7 @@ class Exchange:
|
||||
return result
|
||||
|
||||
@retrier
|
||||
def fetch_l2_order_book(self, pair: str, limit: int = 100) -> dict:
|
||||
def fetch_l2_order_book(self, pair: str, limit: int = 100) -> OrderBook:
|
||||
"""
|
||||
Get L2 order book from exchange.
|
||||
Can be limited to a certain amount (if supported).
|
||||
@@ -1540,7 +1574,7 @@ class Exchange:
|
||||
|
||||
def get_rate(self, pair: str, refresh: bool,
|
||||
side: EntryExit, is_short: bool,
|
||||
order_book: Optional[dict] = None, ticker: Optional[Ticker] = None) -> float:
|
||||
order_book: Optional[OrderBook] = None, ticker: Optional[Ticker] = None) -> float:
|
||||
"""
|
||||
Calculates bid/ask target
|
||||
bid rate - between current ask price and last price
|
||||
@@ -1578,7 +1612,8 @@ class Exchange:
|
||||
logger.debug('order_book %s', order_book)
|
||||
# top 1 = index 0
|
||||
try:
|
||||
rate = order_book[f"{price_side}s"][order_book_top - 1][0]
|
||||
obside: OBLiteral = 'bids' if price_side == 'bid' else 'asks'
|
||||
rate = order_book[obside][order_book_top - 1][0]
|
||||
except (IndexError, KeyError) as e:
|
||||
logger.warning(
|
||||
f"{pair} - {name} Price at location {order_book_top} from orderbook "
|
||||
@@ -1705,7 +1740,7 @@ class Exchange:
|
||||
return self._config['fee']
|
||||
# validate that markets are loaded before trying to get fee
|
||||
if self._api.markets is None or len(self._api.markets) == 0:
|
||||
self._api.load_markets()
|
||||
self._api.load_markets(params={})
|
||||
|
||||
return self._api.calculate_fee(symbol=symbol, type=type, side=side, amount=amount,
|
||||
price=price, takerOrMaker=taker_or_maker)['rate']
|
||||
@@ -1801,7 +1836,7 @@ class Exchange:
|
||||
def get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||
since_ms: int, candle_type: CandleType,
|
||||
is_new_pair: bool = False,
|
||||
until_ms: int = None) -> List:
|
||||
until_ms: Optional[int] = None) -> List:
|
||||
"""
|
||||
Get candle history using asyncio and returns the list of candles.
|
||||
Handles all async work for this.
|
||||
@@ -1813,32 +1848,18 @@ class Exchange:
|
||||
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||
:return: List with candle (OHLCV) data
|
||||
"""
|
||||
pair, _, _, data = self.loop.run_until_complete(
|
||||
pair, _, _, data, _ = self.loop.run_until_complete(
|
||||
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
||||
since_ms=since_ms, until_ms=until_ms,
|
||||
is_new_pair=is_new_pair, candle_type=candle_type))
|
||||
logger.info(f"Downloaded data for {pair} with length {len(data)}.")
|
||||
return data
|
||||
|
||||
def get_historic_ohlcv_as_df(self, pair: str, timeframe: str,
|
||||
since_ms: int, candle_type: CandleType) -> DataFrame:
|
||||
"""
|
||||
Minimal wrapper around get_historic_ohlcv - converting the result into a dataframe
|
||||
:param pair: Pair to download
|
||||
:param timeframe: Timeframe to get data for
|
||||
:param since_ms: Timestamp in milliseconds to get history from
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
:return: OHLCV DataFrame
|
||||
"""
|
||||
ticks = self.get_historic_ohlcv(pair, timeframe, since_ms=since_ms, candle_type=candle_type)
|
||||
return ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
|
||||
drop_incomplete=self._ohlcv_partial_candle)
|
||||
|
||||
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||
since_ms: int, candle_type: CandleType,
|
||||
is_new_pair: bool = False, raise_: bool = False,
|
||||
until_ms: Optional[int] = None
|
||||
) -> Tuple[str, str, str, List]:
|
||||
) -> OHLCVResponse:
|
||||
"""
|
||||
Download historic ohlcv
|
||||
:param is_new_pair: used by binance subclass to allow "fast" new pair downloading
|
||||
@@ -1869,15 +1890,16 @@ class Exchange:
|
||||
continue
|
||||
else:
|
||||
# Deconstruct tuple if it's not an exception
|
||||
p, _, c, new_data = res
|
||||
p, _, c, new_data, _ = res
|
||||
if p == pair and c == candle_type:
|
||||
data.extend(new_data)
|
||||
# Sort data again after extending the result - above calls return in "async order"
|
||||
data = sorted(data, key=lambda x: x[0])
|
||||
return pair, timeframe, candle_type, data
|
||||
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
|
||||
|
||||
def _build_coroutine(self, pair: str, timeframe: str, candle_type: CandleType,
|
||||
since_ms: Optional[int], cache: bool) -> Coroutine:
|
||||
def _build_coroutine(
|
||||
self, pair: str, timeframe: str, candle_type: CandleType,
|
||||
since_ms: Optional[int], cache: bool) -> Coroutine[Any, Any, OHLCVResponse]:
|
||||
not_all_data = cache and self.required_candle_call_count > 1
|
||||
if cache and (pair, timeframe, candle_type) in self._klines:
|
||||
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
|
||||
@@ -1914,7 +1936,7 @@ class Exchange:
|
||||
"""
|
||||
Build Coroutines to execute as part of refresh_latest_ohlcv
|
||||
"""
|
||||
input_coroutines = []
|
||||
input_coroutines: List[Coroutine[Any, Any, OHLCVResponse]] = []
|
||||
cached_pairs = []
|
||||
for pair, timeframe, candle_type in set(pair_list):
|
||||
if (timeframe not in self.timeframes
|
||||
@@ -1943,7 +1965,8 @@ class Exchange:
|
||||
cache: bool, drop_incomplete: bool) -> DataFrame:
|
||||
# keeping last candle time as last refreshed time of the pair
|
||||
if ticks and cache:
|
||||
self._pairs_last_refresh_time[(pair, timeframe, c_type)] = ticks[-1][0] // 1000
|
||||
idx = -2 if drop_incomplete and len(ticks) > 1 else -1
|
||||
self._pairs_last_refresh_time[(pair, timeframe, c_type)] = ticks[idx][0] // 1000
|
||||
# keeping parsed dataframe in cache
|
||||
ohlcv_df = ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
|
||||
drop_incomplete=drop_incomplete)
|
||||
@@ -1978,7 +2001,6 @@ class Exchange:
|
||||
:return: Dict of [{(pair, timeframe): Dataframe}]
|
||||
"""
|
||||
logger.debug("Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
|
||||
drop_incomplete = self._ohlcv_partial_candle if drop_incomplete is None else drop_incomplete
|
||||
|
||||
# Gather coroutines to run
|
||||
input_coroutines, cached_pairs = self._build_ohlcv_dl_jobs(pair_list, since_ms, cache)
|
||||
@@ -1996,10 +2018,11 @@ class Exchange:
|
||||
if isinstance(res, Exception):
|
||||
logger.warning(f"Async code raised an exception: {repr(res)}")
|
||||
continue
|
||||
# Deconstruct tuple (has 4 elements)
|
||||
pair, timeframe, c_type, ticks = res
|
||||
# Deconstruct tuple (has 5 elements)
|
||||
pair, timeframe, c_type, ticks, drop_hint = res
|
||||
drop_incomplete_ = drop_hint if drop_incomplete is None else drop_incomplete
|
||||
ohlcv_df = self._process_ohlcv_df(
|
||||
pair, timeframe, c_type, ticks, cache, drop_incomplete)
|
||||
pair, timeframe, c_type, ticks, cache, drop_incomplete_)
|
||||
|
||||
results_df[(pair, timeframe, c_type)] = ohlcv_df
|
||||
|
||||
@@ -2016,7 +2039,9 @@ class Exchange:
|
||||
# Timeframe in seconds
|
||||
interval_in_sec = timeframe_to_seconds(timeframe)
|
||||
plr = self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0) + interval_in_sec
|
||||
return plr < arrow.utcnow().int_timestamp
|
||||
# current,active candle open date
|
||||
now = int(timeframe_to_prev_date(timeframe).timestamp())
|
||||
return plr < now
|
||||
|
||||
@retrier_async
|
||||
async def _async_get_candle_history(
|
||||
@@ -2025,7 +2050,7 @@ class Exchange:
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
since_ms: Optional[int] = None,
|
||||
) -> Tuple[str, str, str, List]:
|
||||
) -> OHLCVResponse:
|
||||
"""
|
||||
Asynchronously get candle history data using fetch_ohlcv
|
||||
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||
@@ -2035,8 +2060,8 @@ class Exchange:
|
||||
# Fetch OHLCV asynchronously
|
||||
s = '(' + arrow.get(since_ms // 1000).isoformat() + ') ' if since_ms is not None else ''
|
||||
logger.debug(
|
||||
"Fetching pair %s, interval %s, since %s %s...",
|
||||
pair, timeframe, since_ms, s
|
||||
"Fetching pair %s, %s, interval %s, since %s %s...",
|
||||
pair, candle_type, timeframe, since_ms, s
|
||||
)
|
||||
params = deepcopy(self._ft_has.get('ohlcv_params', {}))
|
||||
candle_limit = self.ohlcv_candle_limit(
|
||||
@@ -2050,11 +2075,12 @@ class Exchange:
|
||||
limit=candle_limit, params=params)
|
||||
else:
|
||||
# Funding rate
|
||||
data = await self._api_async.fetch_funding_rate_history(
|
||||
pair, since=since_ms,
|
||||
limit=candle_limit)
|
||||
# Convert funding rate to candle pattern
|
||||
data = [[x['timestamp'], x['fundingRate'], 0, 0, 0, 0] for x in data]
|
||||
data = await self._fetch_funding_rate_history(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
limit=candle_limit,
|
||||
since_ms=since_ms,
|
||||
)
|
||||
# Some exchanges sort OHLCV in ASC order and others in DESC.
|
||||
# Ex: Bittrex returns the list of OHLCV in ASC order (oldest first, newest last)
|
||||
# while GDAX returns the list of OHLCV in DESC order (newest first, oldest last)
|
||||
@@ -2064,9 +2090,9 @@ class Exchange:
|
||||
data = sorted(data, key=lambda x: x[0])
|
||||
except IndexError:
|
||||
logger.exception("Error loading %s. Result was %s.", pair, data)
|
||||
return pair, timeframe, candle_type, []
|
||||
return pair, timeframe, candle_type, [], self._ohlcv_partial_candle
|
||||
logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe)
|
||||
return pair, timeframe, candle_type, data
|
||||
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
|
||||
|
||||
except ccxt.NotSupported as e:
|
||||
raise OperationalException(
|
||||
@@ -2082,6 +2108,24 @@ class Exchange:
|
||||
raise OperationalException(f'Could not fetch historical candle (OHLCV) data '
|
||||
f'for pair {pair}. Message: {e}') from e
|
||||
|
||||
async def _fetch_funding_rate_history(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
limit: int,
|
||||
since_ms: Optional[int] = None,
|
||||
) -> List[List]:
|
||||
"""
|
||||
Fetch funding rate history - used to selectively override this by subclasses.
|
||||
"""
|
||||
# Funding rate
|
||||
data = await self._api_async.fetch_funding_rate_history(
|
||||
pair, since=since_ms,
|
||||
limit=limit)
|
||||
# Convert funding rate to candle pattern
|
||||
data = [[x['timestamp'], x['fundingRate'], 0, 0, 0, 0] for x in data]
|
||||
return data
|
||||
|
||||
# Fetch historic trades
|
||||
|
||||
@retrier_async
|
||||
@@ -2485,7 +2529,8 @@ class Exchange:
|
||||
self,
|
||||
leverage: float,
|
||||
pair: Optional[str] = None,
|
||||
trading_mode: Optional[TradingMode] = None
|
||||
trading_mode: Optional[TradingMode] = None,
|
||||
accept_fail: bool = False,
|
||||
):
|
||||
"""
|
||||
Set's the leverage before making a trade, in order to not
|
||||
@@ -2494,12 +2539,18 @@ class Exchange:
|
||||
if self._config['dry_run'] or not self.exchange_has("setLeverage"):
|
||||
# Some exchanges only support one margin_mode type
|
||||
return
|
||||
|
||||
if self._ft_has.get('floor_leverage', False) is True:
|
||||
# Rounding for binance ...
|
||||
leverage = floor(leverage)
|
||||
try:
|
||||
res = self._api.set_leverage(symbol=pair, leverage=leverage)
|
||||
self._log_exchange_response('set_leverage', res)
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except ccxt.BadRequest as e:
|
||||
if not accept_fail:
|
||||
raise TemporaryError(
|
||||
f'Could not set leverage due to {e.__class__.__name__}. Message: {e}') from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f'Could not set leverage due to {e.__class__.__name__}. Message: {e}') from e
|
||||
@@ -2521,7 +2572,8 @@ class Exchange:
|
||||
return open_date.minute > 0 or open_date.second > 0
|
||||
|
||||
@retrier
|
||||
def set_margin_mode(self, pair: str, margin_mode: MarginMode, params: dict = {}):
|
||||
def set_margin_mode(self, pair: str, margin_mode: MarginMode, accept_fail: bool = False,
|
||||
params: dict = {}):
|
||||
"""
|
||||
Set's the margin mode on the exchange to cross or isolated for a specific pair
|
||||
:param pair: base/quote currency pair (e.g. "ADA/USDT")
|
||||
@@ -2535,6 +2587,10 @@ class Exchange:
|
||||
self._log_exchange_response('set_margin_mode', res)
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except ccxt.BadRequest as e:
|
||||
if not accept_fail:
|
||||
raise TemporaryError(
|
||||
f'Could not set margin mode due to {e.__class__.__name__}. Message: {e}') from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f'Could not set margin mode due to {e.__class__.__name__}. Message: {e}') from e
|
||||
@@ -2668,7 +2724,7 @@ class Exchange:
|
||||
:param amount: Trade amount
|
||||
:param open_date: Open date of the trade
|
||||
:return: funding fee since open_date
|
||||
:raies: ExchangeError if something goes wrong.
|
||||
:raises: ExchangeError if something goes wrong.
|
||||
"""
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
if self._config['dry_run']:
|
||||
@@ -2688,6 +2744,7 @@ class Exchange:
|
||||
is_short: bool,
|
||||
amount: float, # Absolute value of position size
|
||||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float,
|
||||
mm_ex_1: float = 0.0, # (Binance) Cross only
|
||||
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
||||
@@ -2701,14 +2758,15 @@ class Exchange:
|
||||
raise OperationalException(
|
||||
f"{self.name} does not support {self.margin_mode} {self.trading_mode}")
|
||||
|
||||
isolated_liq = None
|
||||
liquidation_price = None
|
||||
if self._config['dry_run'] or not self.exchange_has("fetchPositions"):
|
||||
|
||||
isolated_liq = self.dry_run_liquidation_price(
|
||||
liquidation_price = self.dry_run_liquidation_price(
|
||||
pair=pair,
|
||||
open_rate=open_rate,
|
||||
is_short=is_short,
|
||||
amount=amount,
|
||||
leverage=leverage,
|
||||
stake_amount=stake_amount,
|
||||
wallet_balance=wallet_balance,
|
||||
mm_ex_1=mm_ex_1,
|
||||
@@ -2718,16 +2776,16 @@ class Exchange:
|
||||
positions = self.fetch_positions(pair)
|
||||
if len(positions) > 0:
|
||||
pos = positions[0]
|
||||
isolated_liq = pos['liquidationPrice']
|
||||
liquidation_price = pos['liquidationPrice']
|
||||
|
||||
if isolated_liq:
|
||||
buffer_amount = abs(open_rate - isolated_liq) * self.liquidation_buffer
|
||||
isolated_liq = (
|
||||
isolated_liq - buffer_amount
|
||||
if liquidation_price is not None:
|
||||
buffer_amount = abs(open_rate - liquidation_price) * self.liquidation_buffer
|
||||
liquidation_price_buffer = (
|
||||
liquidation_price - buffer_amount
|
||||
if is_short else
|
||||
isolated_liq + buffer_amount
|
||||
liquidation_price + buffer_amount
|
||||
)
|
||||
return isolated_liq
|
||||
return max(liquidation_price_buffer, 0.0)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -2738,6 +2796,7 @@ class Exchange:
|
||||
is_short: bool,
|
||||
amount: float,
|
||||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
mm_ex_1: float = 0.0, # (Binance) Cross only
|
||||
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
||||
@@ -2745,22 +2804,28 @@ class Exchange:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
PERPETUAL:
|
||||
gateio: https://www.gate.io/help/futures/perpetual/22160/calculation-of-liquidation-price
|
||||
gate: https://www.gate.io/help/futures/futures/27724/liquidation-price-bankruptcy-price
|
||||
> Liquidation Price = (Entry Price ± Margin / Contract Multiplier / Size) /
|
||||
[ 1 ± (Maintenance Margin Ratio + Taker Rate)]
|
||||
Wherein, "+" or "-" depends on whether the contract goes long or short:
|
||||
"-" for long, and "+" for short.
|
||||
|
||||
okex: https://www.okex.com/support/hc/en-us/articles/
|
||||
360053909592-VI-Introduction-to-the-isolated-mode-of-Single-Multi-currency-Portfolio-margin
|
||||
|
||||
:param exchange_name:
|
||||
:param pair: Pair to calculate liquidation price for
|
||||
:param open_rate: Entry price of position
|
||||
:param is_short: True if the trade is a short, false otherwise
|
||||
:param amount: Absolute value of position size incl. leverage (in base currency)
|
||||
:param stake_amount: Stake amount - Collateral in settle currency.
|
||||
:param leverage: Leverage used for this position.
|
||||
:param trading_mode: SPOT, MARGIN, FUTURES, etc.
|
||||
:param margin_mode: Either ISOLATED or CROSS
|
||||
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
||||
Cross-Margin Mode: crossWalletBalance
|
||||
Isolated-Margin Mode: isolatedWalletBalance
|
||||
|
||||
# * Not required by Gateio or OKX
|
||||
# * Not required by Gate or OKX
|
||||
:param mm_ex_1:
|
||||
:param upnl_ex_1:
|
||||
"""
|
||||
@@ -2789,7 +2854,7 @@ class Exchange:
|
||||
def get_maintenance_ratio_and_amt(
|
||||
self,
|
||||
pair: str,
|
||||
nominal_value: float = 0.0,
|
||||
nominal_value: float,
|
||||
) -> Tuple[float, Optional[float]]:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
|
@@ -15,18 +15,19 @@ from freqtrade.util import FtPrecise
|
||||
CcxtModuleType = Any
|
||||
|
||||
|
||||
def is_exchange_known_ccxt(exchange_name: str, ccxt_module: CcxtModuleType = None) -> bool:
|
||||
def is_exchange_known_ccxt(
|
||||
exchange_name: str, ccxt_module: Optional[CcxtModuleType] = None) -> bool:
|
||||
return exchange_name in ccxt_exchanges(ccxt_module)
|
||||
|
||||
|
||||
def ccxt_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
||||
def ccxt_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> List[str]:
|
||||
"""
|
||||
Return the list of all exchanges known to ccxt
|
||||
"""
|
||||
return ccxt_module.exchanges if ccxt_module is not None else ccxt.exchanges
|
||||
|
||||
|
||||
def available_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
||||
def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> List[str]:
|
||||
"""
|
||||
Return exchanges available to the bot, i.e. non-bad exchanges in the ccxt list
|
||||
"""
|
||||
@@ -86,7 +87,7 @@ def timeframe_to_msecs(timeframe: str) -> int:
|
||||
return ccxt.Exchange.parse_timeframe(timeframe) * 1000
|
||||
|
||||
|
||||
def timeframe_to_prev_date(timeframe: str, date: datetime = None) -> datetime:
|
||||
def timeframe_to_prev_date(timeframe: str, date: Optional[datetime] = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine the candle start date for this date.
|
||||
Does not round when given a candle start date.
|
||||
@@ -102,7 +103,7 @@ def timeframe_to_prev_date(timeframe: str, date: datetime = None) -> datetime:
|
||||
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
||||
|
||||
|
||||
def timeframe_to_next_date(timeframe: str, date: datetime = None) -> datetime:
|
||||
def timeframe_to_next_date(timeframe: str, date: Optional[datetime] = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine next candle.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
|
@@ -4,7 +4,7 @@ from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import MarginMode, TradingMode
|
||||
from freqtrade.enums import MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.misc import safe_value_fallback2
|
||||
@@ -13,7 +13,7 @@ from freqtrade.misc import safe_value_fallback2
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Gateio(Exchange):
|
||||
class Gate(Exchange):
|
||||
"""
|
||||
Gate.io exchange class. Contains adjustments needed for Freqtrade to work
|
||||
with this exchange.
|
||||
@@ -32,8 +32,15 @@ class Gateio(Exchange):
|
||||
|
||||
_ft_has_futures: Dict = {
|
||||
"needs_trading_fees": True,
|
||||
"tickers_have_bid_ask": False,
|
||||
"fee_cost_in_contracts": False, # Set explicitly to false for clarity
|
||||
"order_props_in_contracts": ['amount', 'filled', 'remaining'],
|
||||
"stop_price_type_field": "price_type",
|
||||
"stop_price_type_value_mapping": {
|
||||
PriceType.LAST: 0,
|
||||
PriceType.MARK: 1,
|
||||
PriceType.INDEX: 2,
|
||||
},
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
@@ -49,6 +56,7 @@ class Gateio(Exchange):
|
||||
if any(v == 'market' for k, v in order_types.items()):
|
||||
raise OperationalException(
|
||||
f'Exchange {self.name} does not support market orders.')
|
||||
super().validate_stop_ordertypes(order_types)
|
||||
|
||||
def _get_params(
|
||||
self,
|
||||
@@ -77,7 +85,7 @@ class Gateio(Exchange):
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
# Futures usually don't contain fees in the response.
|
||||
# As such, futures orders on gateio will not contain a fee, which causes
|
||||
# As such, futures orders on gate will not contain a fee, which causes
|
||||
# a repeated "update fee" cycle and wrong calculations.
|
||||
# Therefore we patch the response with fees if it's not available.
|
||||
# An alternative also contianing fees would be
|
@@ -19,5 +19,4 @@ class Hitbtc(Exchange):
|
||||
|
||||
_ft_has: Dict = {
|
||||
"ohlcv_candle_limit": 1000,
|
||||
"ohlcv_params": {"sort": "DESC"}
|
||||
}
|
||||
|
@@ -97,8 +97,8 @@ class Kraken(Exchange):
|
||||
))
|
||||
|
||||
@retrier(retries=0)
|
||||
def stoploss(self, pair: str, amount: float, stop_price: float,
|
||||
order_types: Dict, side: BuySell, leverage: float) -> Dict:
|
||||
def create_stoploss(self, pair: str, amount: float, stop_price: float,
|
||||
order_types: Dict, side: BuySell, leverage: float) -> Dict:
|
||||
"""
|
||||
Creates a stoploss market order.
|
||||
Stoploss market orders is the only stoploss type supported by kraken.
|
||||
@@ -158,7 +158,8 @@ class Kraken(Exchange):
|
||||
self,
|
||||
leverage: float,
|
||||
pair: Optional[str] = None,
|
||||
trading_mode: Optional[TradingMode] = None
|
||||
trading_mode: Optional[TradingMode] = None,
|
||||
accept_fail: bool = False,
|
||||
):
|
||||
"""
|
||||
Kraken set's the leverage as an option in the order object, so we need to
|
||||
|
@@ -36,3 +36,35 @@ class Kucoin(Exchange):
|
||||
'stop': 'loss'
|
||||
})
|
||||
return params
|
||||
|
||||
def create_order(
|
||||
self,
|
||||
*,
|
||||
pair: str,
|
||||
ordertype: str,
|
||||
side: BuySell,
|
||||
amount: float,
|
||||
rate: float,
|
||||
leverage: float,
|
||||
reduceOnly: bool = False,
|
||||
time_in_force: str = 'GTC',
|
||||
) -> Dict:
|
||||
|
||||
res = super().create_order(
|
||||
pair=pair,
|
||||
ordertype=ordertype,
|
||||
side=side,
|
||||
amount=amount,
|
||||
rate=rate,
|
||||
leverage=leverage,
|
||||
reduceOnly=reduceOnly,
|
||||
time_in_force=time_in_force,
|
||||
)
|
||||
# Kucoin returns only the order-id.
|
||||
# ccxt returns status = 'closed' at the moment - which is information ccxt invented.
|
||||
# Since we rely on status heavily, we must set it to 'open' here.
|
||||
# ref: https://github.com/ccxt/ccxt/pull/16674, (https://github.com/ccxt/ccxt/pull/16553)
|
||||
if not self._config['dry_run']:
|
||||
res['type'] = ordertype
|
||||
res['status'] = 'open'
|
||||
return res
|
||||
|
@@ -5,6 +5,7 @@ import ccxt
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
||||
from freqtrade.enums.pricetype import PriceType
|
||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||
from freqtrade.exchange import Exchange, date_minus_candles
|
||||
from freqtrade.exchange.common import retrier
|
||||
@@ -27,6 +28,12 @@ class Okx(Exchange):
|
||||
_ft_has_futures: Dict = {
|
||||
"tickers_have_quoteVolume": False,
|
||||
"fee_cost_in_contracts": True,
|
||||
"stop_price_type_field": "tpTriggerPxType",
|
||||
"stop_price_type_value_mapping": {
|
||||
PriceType.LAST: "last",
|
||||
PriceType.MARK: "index",
|
||||
PriceType.INDEX: "mark",
|
||||
},
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
@@ -118,13 +125,15 @@ class Okx(Exchange):
|
||||
if self.trading_mode != TradingMode.SPOT and self.margin_mode is not None:
|
||||
try:
|
||||
# TODO-lev: Test me properly (check mgnMode passed)
|
||||
self._api.set_leverage(
|
||||
res = self._api.set_leverage(
|
||||
leverage=leverage,
|
||||
symbol=pair,
|
||||
params={
|
||||
"mgnMode": self.margin_mode.value,
|
||||
"posSide": self._get_posSide(side, False),
|
||||
})
|
||||
self._log_exchange_response('set_leverage', res)
|
||||
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
|
@@ -1,4 +1,6 @@
|
||||
from typing import Dict, Optional, TypedDict
|
||||
from typing import Dict, List, Optional, Tuple, TypedDict
|
||||
|
||||
from freqtrade.enums import CandleType
|
||||
|
||||
|
||||
class Ticker(TypedDict):
|
||||
@@ -13,4 +15,16 @@ class Ticker(TypedDict):
|
||||
# Several more - only listing required.
|
||||
|
||||
|
||||
class OrderBook(TypedDict):
|
||||
symbol: str
|
||||
bids: List[Tuple[float, float]]
|
||||
asks: List[Tuple[float, float]]
|
||||
timestamp: Optional[int]
|
||||
datetime: Optional[str]
|
||||
nonce: Optional[int]
|
||||
|
||||
|
||||
Tickers = Dict[str, Ticker]
|
||||
|
||||
# pair, timeframe, candleType, OHLCV, drop last?,
|
||||
OHLCVResponse = Tuple[str, str, CandleType, List, bool]
|
||||
|
125
freqtrade/freqai/RL/Base3ActionRLEnv.py
Normal file
125
freqtrade/freqai/RL/Base3ActionRLEnv.py
Normal file
@@ -0,0 +1,125 @@
|
||||
import logging
|
||||
from enum import Enum
|
||||
|
||||
from gym import spaces
|
||||
|
||||
from freqtrade.freqai.RL.BaseEnvironment import BaseEnvironment, Positions
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Actions(Enum):
|
||||
Neutral = 0
|
||||
Buy = 1
|
||||
Sell = 2
|
||||
|
||||
|
||||
class Base3ActionRLEnv(BaseEnvironment):
|
||||
"""
|
||||
Base class for a 3 action environment
|
||||
"""
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.actions = Actions
|
||||
|
||||
def set_action_space(self):
|
||||
self.action_space = spaces.Discrete(len(Actions))
|
||||
|
||||
def step(self, action: int):
|
||||
"""
|
||||
Logic for a single step (incrementing one candle in time)
|
||||
by the agent
|
||||
:param: action: int = the action type that the agent plans
|
||||
to take for the current step.
|
||||
:returns:
|
||||
observation = current state of environment
|
||||
step_reward = the reward from `calculate_reward()`
|
||||
_done = if the agent "died" or if the candles finished
|
||||
info = dict passed back to openai gym lib
|
||||
"""
|
||||
self._done = False
|
||||
self._current_tick += 1
|
||||
|
||||
if self._current_tick == self._end_tick:
|
||||
self._done = True
|
||||
|
||||
self._update_unrealized_total_profit()
|
||||
step_reward = self.calculate_reward(action)
|
||||
self.total_reward += step_reward
|
||||
self.tensorboard_log(self.actions._member_names_[action], category="actions")
|
||||
|
||||
trade_type = None
|
||||
if self.is_tradesignal(action):
|
||||
if action == Actions.Buy.value:
|
||||
if self._position == Positions.Short:
|
||||
self._update_total_profit()
|
||||
self._position = Positions.Long
|
||||
trade_type = "long"
|
||||
self._last_trade_tick = self._current_tick
|
||||
elif action == Actions.Sell.value and self.can_short:
|
||||
if self._position == Positions.Long:
|
||||
self._update_total_profit()
|
||||
self._position = Positions.Short
|
||||
trade_type = "short"
|
||||
self._last_trade_tick = self._current_tick
|
||||
elif action == Actions.Sell.value and not self.can_short:
|
||||
self._update_total_profit()
|
||||
self._position = Positions.Neutral
|
||||
trade_type = "neutral"
|
||||
self._last_trade_tick = None
|
||||
else:
|
||||
print("case not defined")
|
||||
|
||||
if trade_type is not None:
|
||||
self.trade_history.append(
|
||||
{'price': self.current_price(), 'index': self._current_tick,
|
||||
'type': trade_type})
|
||||
|
||||
if (self._total_profit < self.max_drawdown or
|
||||
self._total_unrealized_profit < self.max_drawdown):
|
||||
self._done = True
|
||||
|
||||
self._position_history.append(self._position)
|
||||
|
||||
info = dict(
|
||||
tick=self._current_tick,
|
||||
action=action,
|
||||
total_reward=self.total_reward,
|
||||
total_profit=self._total_profit,
|
||||
position=self._position.value,
|
||||
trade_duration=self.get_trade_duration(),
|
||||
current_profit_pct=self.get_unrealized_profit()
|
||||
)
|
||||
|
||||
observation = self._get_observation()
|
||||
|
||||
self._update_history(info)
|
||||
|
||||
return observation, step_reward, self._done, info
|
||||
|
||||
def is_tradesignal(self, action: int) -> bool:
|
||||
"""
|
||||
Determine if the signal is a trade signal
|
||||
e.g.: agent wants a Actions.Buy while it is in a Positions.short
|
||||
"""
|
||||
return (
|
||||
(action == Actions.Buy.value and self._position == Positions.Neutral)
|
||||
or (action == Actions.Sell.value and self._position == Positions.Long)
|
||||
or (action == Actions.Sell.value and self._position == Positions.Neutral
|
||||
and self.can_short)
|
||||
or (action == Actions.Buy.value and self._position == Positions.Short
|
||||
and self.can_short)
|
||||
)
|
||||
|
||||
def _is_valid(self, action: int) -> bool:
|
||||
"""
|
||||
Determine if the signal is valid.
|
||||
e.g.: agent wants a Actions.Sell while it is in a Positions.Long
|
||||
"""
|
||||
if self.can_short:
|
||||
return action in [Actions.Buy.value, Actions.Sell.value, Actions.Neutral.value]
|
||||
else:
|
||||
if action == Actions.Sell.value and self._position != Positions.Long:
|
||||
return False
|
||||
return True
|
@@ -46,9 +46,9 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
self._done = True
|
||||
|
||||
self._update_unrealized_total_profit()
|
||||
|
||||
step_reward = self.calculate_reward(action)
|
||||
self.total_reward += step_reward
|
||||
self.tensorboard_log(self.actions._member_names_[action], category="actions")
|
||||
|
||||
trade_type = None
|
||||
if self.is_tradesignal(action):
|
||||
@@ -88,7 +88,8 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
{'price': self.current_price(), 'index': self._current_tick,
|
||||
'type': trade_type})
|
||||
|
||||
if self._total_profit < 1 - self.rl_config.get('max_training_drawdown_pct', 0.8):
|
||||
if (self._total_profit < self.max_drawdown or
|
||||
self._total_unrealized_profit < self.max_drawdown):
|
||||
self._done = True
|
||||
|
||||
self._position_history.append(self._position)
|
||||
|
@@ -49,6 +49,7 @@ class Base5ActionRLEnv(BaseEnvironment):
|
||||
self._update_unrealized_total_profit()
|
||||
step_reward = self.calculate_reward(action)
|
||||
self.total_reward += step_reward
|
||||
self.tensorboard_log(self.actions._member_names_[action], category="actions")
|
||||
|
||||
trade_type = None
|
||||
if self.is_tradesignal(action):
|
||||
|
@@ -2,7 +2,7 @@ import logging
|
||||
import random
|
||||
from abc import abstractmethod
|
||||
from enum import Enum
|
||||
from typing import Optional, Type
|
||||
from typing import Optional, Type, Union
|
||||
|
||||
import gym
|
||||
import numpy as np
|
||||
@@ -11,9 +11,6 @@ from gym import spaces
|
||||
from gym.utils import seeding
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.enums import RunMode
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -47,8 +44,9 @@ class BaseEnvironment(gym.Env):
|
||||
|
||||
def __init__(self, df: DataFrame = DataFrame(), prices: DataFrame = DataFrame(),
|
||||
reward_kwargs: dict = {}, window_size=10, starting_point=True,
|
||||
id: str = 'baseenv-1', seed: int = 1, config: dict = {},
|
||||
dp: Optional[DataProvider] = None):
|
||||
id: str = 'baseenv-1', seed: int = 1, config: dict = {}, live: bool = False,
|
||||
fee: float = 0.0015, can_short: bool = False, pair: str = "",
|
||||
df_raw: DataFrame = DataFrame()):
|
||||
"""
|
||||
Initializes the training/eval environment.
|
||||
:param df: dataframe of features
|
||||
@@ -59,32 +57,33 @@ class BaseEnvironment(gym.Env):
|
||||
:param id: string id of the environment (used in backend for multiprocessed env)
|
||||
:param seed: Sets the seed of the environment higher in the gym.Env object
|
||||
:param config: Typical user configuration file
|
||||
:param dp: dataprovider from freqtrade
|
||||
:param live: Whether or not this environment is active in dry/live/backtesting
|
||||
:param fee: The fee to use for environmental interactions.
|
||||
:param can_short: Whether or not the environment can short
|
||||
"""
|
||||
self.config = config
|
||||
self.rl_config = config['freqai']['rl_config']
|
||||
self.add_state_info = self.rl_config.get('add_state_info', False)
|
||||
self.id = id
|
||||
self.seed(seed)
|
||||
self.reset_env(df, prices, window_size, reward_kwargs, starting_point)
|
||||
self.max_drawdown = 1 - self.rl_config.get('max_training_drawdown_pct', 0.8)
|
||||
self.compound_trades = config['stake_amount'] == 'unlimited'
|
||||
self.config: dict = config
|
||||
self.rl_config: dict = config['freqai']['rl_config']
|
||||
self.add_state_info: bool = self.rl_config.get('add_state_info', False)
|
||||
self.id: str = id
|
||||
self.max_drawdown: float = 1 - self.rl_config.get('max_training_drawdown_pct', 0.8)
|
||||
self.compound_trades: bool = config['stake_amount'] == 'unlimited'
|
||||
self.pair: str = pair
|
||||
self.raw_features: DataFrame = df_raw
|
||||
if self.config.get('fee', None) is not None:
|
||||
self.fee = self.config['fee']
|
||||
elif dp is not None:
|
||||
self.fee = dp._exchange.get_fee(symbol=dp.current_whitelist()[0]) # type: ignore
|
||||
else:
|
||||
self.fee = 0.0015
|
||||
self.fee = fee
|
||||
|
||||
# set here to default 5Ac, but all children envs can override this
|
||||
self.actions: Type[Enum] = BaseActions
|
||||
self.custom_info: dict = {}
|
||||
self.live: bool = False
|
||||
if dp:
|
||||
self.live = dp.runmode in (RunMode.DRY_RUN, RunMode.LIVE)
|
||||
self.tensorboard_metrics: dict = {}
|
||||
self.can_short: bool = can_short
|
||||
self.live: bool = live
|
||||
if not self.live and self.add_state_info:
|
||||
self.add_state_info = False
|
||||
logger.warning("add_state_info is not available in backtesting. Deactivating.")
|
||||
self.seed(seed)
|
||||
self.reset_env(df, prices, window_size, reward_kwargs, starting_point)
|
||||
|
||||
def reset_env(self, df: DataFrame, prices: DataFrame, window_size: int,
|
||||
reward_kwargs: dict, starting_point=True):
|
||||
@@ -97,13 +96,12 @@ class BaseEnvironment(gym.Env):
|
||||
:param reward_kwargs: extra config settings assigned by user in `rl_config`
|
||||
:param starting_point: start at edge of window or not
|
||||
"""
|
||||
self.df = df
|
||||
self.signal_features = self.df
|
||||
self.prices = prices
|
||||
self.window_size = window_size
|
||||
self.starting_point = starting_point
|
||||
self.rr = reward_kwargs["rr"]
|
||||
self.profit_aim = reward_kwargs["profit_aim"]
|
||||
self.signal_features: DataFrame = df
|
||||
self.prices: DataFrame = prices
|
||||
self.window_size: int = window_size
|
||||
self.starting_point: bool = starting_point
|
||||
self.rr: float = reward_kwargs["rr"]
|
||||
self.profit_aim: float = reward_kwargs["profit_aim"]
|
||||
|
||||
# # spaces
|
||||
if self.add_state_info:
|
||||
@@ -139,20 +137,46 @@ class BaseEnvironment(gym.Env):
|
||||
self.np_random, seed = seeding.np_random(seed)
|
||||
return [seed]
|
||||
|
||||
def tensorboard_log(self, metric: str, value: Optional[Union[int, float]] = None,
|
||||
inc: Optional[bool] = None, category: str = "custom"):
|
||||
"""
|
||||
Function builds the tensorboard_metrics dictionary
|
||||
to be parsed by the TensorboardCallback. This
|
||||
function is designed for tracking incremented objects,
|
||||
events, actions inside the training environment.
|
||||
For example, a user can call this to track the
|
||||
frequency of occurence of an `is_valid` call in
|
||||
their `calculate_reward()`:
|
||||
|
||||
def calculate_reward(self, action: int) -> float:
|
||||
if not self._is_valid(action):
|
||||
self.tensorboard_log("invalid")
|
||||
return -2
|
||||
|
||||
:param metric: metric to be tracked and incremented
|
||||
:param value: `metric` value
|
||||
:param inc: (deprecated) sets whether the `value` is incremented or not
|
||||
:param category: `metric` category
|
||||
"""
|
||||
increment = True if value is None else False
|
||||
value = 1 if increment else value
|
||||
|
||||
if category not in self.tensorboard_metrics:
|
||||
self.tensorboard_metrics[category] = {}
|
||||
|
||||
if not increment or metric not in self.tensorboard_metrics[category]:
|
||||
self.tensorboard_metrics[category][metric] = value
|
||||
else:
|
||||
self.tensorboard_metrics[category][metric] += value
|
||||
|
||||
def reset_tensorboard_log(self):
|
||||
self.tensorboard_metrics = {}
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
Reset is called at the beginning of every episode
|
||||
"""
|
||||
# custom_info is used for episodic reports and tensorboard logging
|
||||
self.custom_info["Invalid"] = 0
|
||||
self.custom_info["Hold"] = 0
|
||||
self.custom_info["Unknown"] = 0
|
||||
self.custom_info["pnl_factor"] = 0
|
||||
self.custom_info["duration_factor"] = 0
|
||||
self.custom_info["reward_exit"] = 0
|
||||
self.custom_info["reward_hold"] = 0
|
||||
for action in self.actions:
|
||||
self.custom_info[f"{action.name}"] = 0
|
||||
self.reset_tensorboard_log()
|
||||
|
||||
self._done = False
|
||||
|
||||
@@ -195,7 +219,7 @@ class BaseEnvironment(gym.Env):
|
||||
"""
|
||||
features_window = self.signal_features[(
|
||||
self._current_tick - self.window_size):self._current_tick]
|
||||
if self.add_state_info and self.live:
|
||||
if self.add_state_info:
|
||||
features_and_state = DataFrame(np.zeros((len(features_window), 3)),
|
||||
columns=['current_profit_pct',
|
||||
'position',
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import copy
|
||||
import importlib
|
||||
import logging
|
||||
from abc import abstractmethod
|
||||
@@ -50,6 +51,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
self.eval_callback: Optional[EvalCallback] = None
|
||||
self.model_type = self.freqai_info['rl_config']['model_type']
|
||||
self.rl_config = self.freqai_info['rl_config']
|
||||
self.df_raw: DataFrame = DataFrame()
|
||||
self.continual_learning = self.freqai_info.get('continual_learning', False)
|
||||
if self.model_type in SB3_MODELS:
|
||||
import_str = 'stable_baselines3'
|
||||
@@ -107,10 +109,12 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
|
||||
data_dictionary: Dict[str, Any] = dk.make_train_test_datasets(
|
||||
features_filtered, labels_filtered)
|
||||
self.df_raw = copy.deepcopy(data_dictionary["train_features"])
|
||||
dk.fit_labels() # FIXME useless for now, but just satiating append methods
|
||||
|
||||
# normalize all data based on train_dataset only
|
||||
prices_train, prices_test = self.build_ohlc_price_dataframes(dk.data_dictionary, pair, dk)
|
||||
|
||||
data_dictionary = dk.normalize_data(data_dictionary)
|
||||
|
||||
# data cleaning/analysis
|
||||
@@ -143,18 +147,10 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
train_df = data_dictionary["train_features"]
|
||||
test_df = data_dictionary["test_features"]
|
||||
|
||||
self.train_env = self.MyRLEnv(df=train_df,
|
||||
prices=prices_train,
|
||||
window_size=self.CONV_WIDTH,
|
||||
reward_kwargs=self.reward_params,
|
||||
config=self.config,
|
||||
dp=self.data_provider)
|
||||
self.eval_env = Monitor(self.MyRLEnv(df=test_df,
|
||||
prices=prices_test,
|
||||
window_size=self.CONV_WIDTH,
|
||||
reward_kwargs=self.reward_params,
|
||||
config=self.config,
|
||||
dp=self.data_provider))
|
||||
env_info = self.pack_env_dict(dk.pair)
|
||||
|
||||
self.train_env = self.MyRLEnv(df=train_df, prices=prices_train, **env_info)
|
||||
self.eval_env = Monitor(self.MyRLEnv(df=test_df, prices=prices_test, **env_info))
|
||||
self.eval_callback = EvalCallback(self.eval_env, deterministic=True,
|
||||
render=False, eval_freq=len(train_df),
|
||||
best_model_save_path=str(dk.data_path))
|
||||
@@ -162,6 +158,23 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
actions = self.train_env.get_actions()
|
||||
self.tensorboard_callback = TensorboardCallback(verbose=1, actions=actions)
|
||||
|
||||
def pack_env_dict(self, pair: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Create dictionary of environment arguments
|
||||
"""
|
||||
env_info = {"window_size": self.CONV_WIDTH,
|
||||
"reward_kwargs": self.reward_params,
|
||||
"config": self.config,
|
||||
"live": self.live,
|
||||
"can_short": self.can_short,
|
||||
"pair": pair,
|
||||
"df_raw": self.df_raw}
|
||||
if self.data_provider:
|
||||
env_info["fee"] = self.data_provider._exchange \
|
||||
.get_fee(symbol=self.data_provider.current_whitelist()[0]) # type: ignore
|
||||
|
||||
return env_info
|
||||
|
||||
@abstractmethod
|
||||
def fit(self, data_dictionary: Dict[str, Any], dk: FreqaiDataKitchen, **kwargs):
|
||||
"""
|
||||
@@ -222,6 +235,9 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
filtered_dataframe, _ = dk.filter_features(
|
||||
unfiltered_df, dk.training_features_list, training_filter=False
|
||||
)
|
||||
|
||||
filtered_dataframe = self.drop_ohlc_from_df(filtered_dataframe, dk)
|
||||
|
||||
filtered_dataframe = dk.normalize_data_from_metadata(filtered_dataframe)
|
||||
dk.data_dictionary["prediction_features"] = filtered_dataframe
|
||||
|
||||
@@ -271,29 +287,54 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
|
||||
# price data for model training and evaluation
|
||||
tf = self.config['timeframe']
|
||||
ohlc_list = [f'%-{pair}raw_open_{tf}', f'%-{pair}raw_low_{tf}',
|
||||
f'%-{pair}raw_high_{tf}', f'%-{pair}raw_close_{tf}']
|
||||
rename_dict = {f'%-{pair}raw_open_{tf}': 'open', f'%-{pair}raw_low_{tf}': 'low',
|
||||
f'%-{pair}raw_high_{tf}': ' high', f'%-{pair}raw_close_{tf}': 'close'}
|
||||
rename_dict = {'%-raw_open': 'open', '%-raw_low': 'low',
|
||||
'%-raw_high': ' high', '%-raw_close': 'close'}
|
||||
rename_dict_old = {f'%-{pair}raw_open_{tf}': 'open', f'%-{pair}raw_low_{tf}': 'low',
|
||||
f'%-{pair}raw_high_{tf}': ' high', f'%-{pair}raw_close_{tf}': 'close'}
|
||||
|
||||
prices_train = train_df.filter(rename_dict.keys(), axis=1)
|
||||
prices_train_old = train_df.filter(rename_dict_old.keys(), axis=1)
|
||||
if prices_train.empty or not prices_train_old.empty:
|
||||
if not prices_train_old.empty:
|
||||
prices_train = prices_train_old
|
||||
rename_dict = rename_dict_old
|
||||
logger.warning('Reinforcement learning module didnt find the correct raw prices '
|
||||
'assigned in feature_engineering_standard(). '
|
||||
'Please assign them with:\n'
|
||||
'dataframe["%-raw_close"] = dataframe["close"]\n'
|
||||
'dataframe["%-raw_open"] = dataframe["open"]\n'
|
||||
'dataframe["%-raw_high"] = dataframe["high"]\n'
|
||||
'dataframe["%-raw_low"] = dataframe["low"]\n'
|
||||
'inside `feature_engineering_standard()')
|
||||
elif prices_train.empty:
|
||||
raise OperationalException("No prices found, please follow log warning "
|
||||
"instructions to correct the strategy.")
|
||||
|
||||
prices_train = train_df.filter(ohlc_list, axis=1)
|
||||
if prices_train.empty:
|
||||
raise OperationalException('Reinforcement learning module didnt find the raw prices '
|
||||
'assigned in populate_any_indicators. Please assign them '
|
||||
'with:\n'
|
||||
'informative[f"%-{pair}raw_close"] = informative["close"]\n'
|
||||
'informative[f"%-{pair}raw_open"] = informative["open"]\n'
|
||||
'informative[f"%-{pair}raw_high"] = informative["high"]\n'
|
||||
'informative[f"%-{pair}raw_low"] = informative["low"]\n')
|
||||
prices_train.rename(columns=rename_dict, inplace=True)
|
||||
prices_train.reset_index(drop=True)
|
||||
|
||||
prices_test = test_df.filter(ohlc_list, axis=1)
|
||||
prices_test = test_df.filter(rename_dict.keys(), axis=1)
|
||||
prices_test.rename(columns=rename_dict, inplace=True)
|
||||
prices_test.reset_index(drop=True)
|
||||
|
||||
train_df = self.drop_ohlc_from_df(train_df, dk)
|
||||
test_df = self.drop_ohlc_from_df(test_df, dk)
|
||||
|
||||
return prices_train, prices_test
|
||||
|
||||
def drop_ohlc_from_df(self, df: DataFrame, dk: FreqaiDataKitchen):
|
||||
"""
|
||||
Given a dataframe, drop the ohlc data
|
||||
"""
|
||||
drop_list = ['%-raw_open', '%-raw_low', '%-raw_high', '%-raw_close']
|
||||
|
||||
if self.rl_config["drop_ohlc_from_features"]:
|
||||
df.drop(drop_list, axis=1, inplace=True)
|
||||
feature_list = dk.training_features_list
|
||||
dk.training_features_list = [e for e in feature_list if e not in drop_list]
|
||||
|
||||
return df
|
||||
|
||||
def load_model_from_disk(self, dk: FreqaiDataKitchen) -> Any:
|
||||
"""
|
||||
Can be used by user if they are trying to limit_ram_usage *and*
|
||||
@@ -326,7 +367,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
sets a custom reward based on profit and trade duration.
|
||||
"""
|
||||
|
||||
def calculate_reward(self, action: int) -> float:
|
||||
def calculate_reward(self, action: int) -> float: # noqa: C901
|
||||
"""
|
||||
An example reward function. This is the one function that users will likely
|
||||
wish to inject their own creativity into.
|
||||
@@ -342,10 +383,19 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
pnl = self.get_unrealized_profit()
|
||||
factor = 100.
|
||||
|
||||
# you can use feature values from dataframe
|
||||
rsi_now = self.raw_features[f"%-rsi-period-10_shift-1_{self.pair}_"
|
||||
f"{self.config['timeframe']}"].iloc[self._current_tick]
|
||||
|
||||
# reward agent for entering trades
|
||||
if (action in (Actions.Long_enter.value, Actions.Short_enter.value)
|
||||
and self._position == Positions.Neutral):
|
||||
return 25
|
||||
if rsi_now < 40:
|
||||
factor = 40 / rsi_now
|
||||
else:
|
||||
factor = 1
|
||||
return 25 * factor
|
||||
|
||||
# discourage agent from not entering trades
|
||||
if action == Actions.Neutral.value and self._position == Positions.Neutral:
|
||||
return -1
|
||||
@@ -383,8 +433,8 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||
|
||||
def make_env(MyRLEnv: Type[gym.Env], env_id: str, rank: int,
|
||||
seed: int, train_df: DataFrame, price: DataFrame,
|
||||
reward_params: Dict[str, int], window_size: int, monitor: bool = False,
|
||||
config: Dict[str, Any] = {}) -> Callable:
|
||||
monitor: bool = False,
|
||||
env_info: Dict[str, Any] = {}) -> Callable:
|
||||
"""
|
||||
Utility function for multiprocessed env.
|
||||
|
||||
@@ -392,13 +442,14 @@ def make_env(MyRLEnv: Type[gym.Env], env_id: str, rank: int,
|
||||
:param num_env: (int) the number of environment you wish to have in subprocesses
|
||||
:param seed: (int) the inital seed for RNG
|
||||
:param rank: (int) index of the subprocess
|
||||
:param env_info: (dict) all required arguments to instantiate the environment.
|
||||
:return: (Callable)
|
||||
"""
|
||||
|
||||
def _init() -> gym.Env:
|
||||
|
||||
env = MyRLEnv(df=train_df, prices=price, window_size=window_size,
|
||||
reward_kwargs=reward_params, id=env_id, seed=seed + rank, config=config)
|
||||
env = MyRLEnv(df=train_df, prices=price, id=env_id, seed=seed + rank,
|
||||
**env_info)
|
||||
if monitor:
|
||||
env = Monitor(env)
|
||||
return env
|
||||
|
@@ -42,19 +42,16 @@ class TensorboardCallback(BaseCallback):
|
||||
)
|
||||
|
||||
def _on_step(self) -> bool:
|
||||
custom_info = self.training_env.get_attr("custom_info")[0]
|
||||
self.logger.record("_state/position", self.locals["infos"][0]["position"])
|
||||
self.logger.record("_state/trade_duration", self.locals["infos"][0]["trade_duration"])
|
||||
self.logger.record("_state/current_profit_pct", self.locals["infos"]
|
||||
[0]["current_profit_pct"])
|
||||
self.logger.record("_reward/total_profit", self.locals["infos"][0]["total_profit"])
|
||||
self.logger.record("_reward/total_reward", self.locals["infos"][0]["total_reward"])
|
||||
self.logger.record_mean("_reward/mean_trade_duration", self.locals["infos"]
|
||||
[0]["trade_duration"])
|
||||
self.logger.record("_actions/action", self.locals["infos"][0]["action"])
|
||||
self.logger.record("_actions/_Invalid", custom_info["Invalid"])
|
||||
self.logger.record("_actions/_Unknown", custom_info["Unknown"])
|
||||
self.logger.record("_actions/Hold", custom_info["Hold"])
|
||||
for action in self.actions:
|
||||
self.logger.record(f"_actions/{action.name}", custom_info[action.name])
|
||||
|
||||
local_info = self.locals["infos"][0]
|
||||
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
|
||||
|
||||
for metric in local_info:
|
||||
if metric not in ["episode", "terminal_observation"]:
|
||||
self.logger.record(f"info/{metric}", local_info[metric])
|
||||
|
||||
for category in tensorboard_metrics:
|
||||
for metric in tensorboard_metrics[category]:
|
||||
self.logger.record(f"{category}/{metric}", tensorboard_metrics[category][metric])
|
||||
|
||||
return True
|
||||
|
@@ -95,9 +95,14 @@ class BaseClassifierModel(IFreqaiModel):
|
||||
self.data_cleaning_predict(dk)
|
||||
|
||||
predictions = self.model.predict(dk.data_dictionary["prediction_features"])
|
||||
if self.CONV_WIDTH == 1:
|
||||
predictions = np.reshape(predictions, (-1, len(dk.label_list)))
|
||||
|
||||
pred_df = DataFrame(predictions, columns=dk.label_list)
|
||||
|
||||
predictions_prob = self.model.predict_proba(dk.data_dictionary["prediction_features"])
|
||||
if self.CONV_WIDTH == 1:
|
||||
predictions_prob = np.reshape(predictions_prob, (-1, len(self.model.classes_)))
|
||||
pred_df_prob = DataFrame(predictions_prob, columns=self.model.classes_)
|
||||
|
||||
pred_df = pd.concat([pred_df, pred_df_prob], axis=1)
|
||||
|
@@ -95,6 +95,9 @@ class BaseRegressionModel(IFreqaiModel):
|
||||
self.data_cleaning_predict(dk)
|
||||
|
||||
predictions = self.model.predict(dk.data_dictionary["prediction_features"])
|
||||
if self.CONV_WIDTH == 1:
|
||||
predictions = np.reshape(predictions, (-1, len(dk.label_list)))
|
||||
|
||||
pred_df = DataFrame(predictions, columns=dk.label_list)
|
||||
|
||||
pred_df = dk.denormalize_labels_from_metadata(pred_df)
|
||||
|
@@ -59,7 +59,7 @@ class FreqaiDataDrawer:
|
||||
Juha Nykänen @suikula, Wagner Costa @wagnercosta, Johan Vlugt @Jooopieeert
|
||||
"""
|
||||
|
||||
def __init__(self, full_path: Path, config: Config, follow_mode: bool = False):
|
||||
def __init__(self, full_path: Path, config: Config):
|
||||
|
||||
self.config = config
|
||||
self.freqai_info = config.get("freqai", {})
|
||||
@@ -72,21 +72,13 @@ class FreqaiDataDrawer:
|
||||
self.model_return_values: Dict[str, DataFrame] = {}
|
||||
self.historic_data: Dict[str, Dict[str, DataFrame]] = {}
|
||||
self.historic_predictions: Dict[str, DataFrame] = {}
|
||||
self.follower_dict: Dict[str, pair_info] = {}
|
||||
self.full_path = full_path
|
||||
self.follower_name: str = self.config.get("bot_name", "follower1")
|
||||
self.follower_dict_path = Path(
|
||||
self.full_path / f"follower_dictionary-{self.follower_name}.json"
|
||||
)
|
||||
self.historic_predictions_path = Path(self.full_path / "historic_predictions.pkl")
|
||||
self.historic_predictions_bkp_path = Path(
|
||||
self.full_path / "historic_predictions.backup.pkl")
|
||||
self.pair_dictionary_path = Path(self.full_path / "pair_dictionary.json")
|
||||
self.global_metadata_path = Path(self.full_path / "global_metadata.json")
|
||||
self.metric_tracker_path = Path(self.full_path / "metric_tracker.json")
|
||||
self.follow_mode = follow_mode
|
||||
if follow_mode:
|
||||
self.create_follower_dict()
|
||||
self.load_drawer_from_disk()
|
||||
self.load_historic_predictions_from_disk()
|
||||
self.metric_tracker: Dict[str, Dict[str, Dict[str, list]]] = {}
|
||||
@@ -134,7 +126,7 @@ class FreqaiDataDrawer:
|
||||
"""
|
||||
exists = self.global_metadata_path.is_file()
|
||||
if exists:
|
||||
with open(self.global_metadata_path, "r") as fp:
|
||||
with self.global_metadata_path.open("r") as fp:
|
||||
metatada_dict = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||
return metatada_dict
|
||||
return {}
|
||||
@@ -147,15 +139,10 @@ class FreqaiDataDrawer:
|
||||
"""
|
||||
exists = self.pair_dictionary_path.is_file()
|
||||
if exists:
|
||||
with open(self.pair_dictionary_path, "r") as fp:
|
||||
with self.pair_dictionary_path.open("r") as fp:
|
||||
self.pair_dict = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||
elif not self.follow_mode:
|
||||
logger.info("Could not find existing datadrawer, starting from scratch")
|
||||
else:
|
||||
logger.warning(
|
||||
f"Follower could not find pair_dictionary at {self.full_path} "
|
||||
"sending null values back to strategy"
|
||||
)
|
||||
logger.info("Could not find existing datadrawer, starting from scratch")
|
||||
|
||||
def load_metric_tracker_from_disk(self):
|
||||
"""
|
||||
@@ -165,7 +152,7 @@ class FreqaiDataDrawer:
|
||||
if self.freqai_info.get('write_metrics_to_disk', False):
|
||||
exists = self.metric_tracker_path.is_file()
|
||||
if exists:
|
||||
with open(self.metric_tracker_path, "r") as fp:
|
||||
with self.metric_tracker_path.open("r") as fp:
|
||||
self.metric_tracker = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||
logger.info("Loading existing metric tracker from disk.")
|
||||
else:
|
||||
@@ -179,7 +166,7 @@ class FreqaiDataDrawer:
|
||||
exists = self.historic_predictions_path.is_file()
|
||||
if exists:
|
||||
try:
|
||||
with open(self.historic_predictions_path, "rb") as fp:
|
||||
with self.historic_predictions_path.open("rb") as fp:
|
||||
self.historic_predictions = cloudpickle.load(fp)
|
||||
logger.info(
|
||||
f"Found existing historic predictions at {self.full_path}, but beware "
|
||||
@@ -189,17 +176,12 @@ class FreqaiDataDrawer:
|
||||
except EOFError:
|
||||
logger.warning(
|
||||
'Historical prediction file was corrupted. Trying to load backup file.')
|
||||
with open(self.historic_predictions_bkp_path, "rb") as fp:
|
||||
with self.historic_predictions_bkp_path.open("rb") as fp:
|
||||
self.historic_predictions = cloudpickle.load(fp)
|
||||
logger.warning('FreqAI successfully loaded the backup historical predictions file.')
|
||||
|
||||
elif not self.follow_mode:
|
||||
logger.info("Could not find existing historic_predictions, starting from scratch")
|
||||
else:
|
||||
logger.warning(
|
||||
f"Follower could not find historic predictions at {self.full_path} "
|
||||
"sending null values back to strategy"
|
||||
)
|
||||
logger.info("Could not find existing historic_predictions, starting from scratch")
|
||||
|
||||
return exists
|
||||
|
||||
@@ -207,7 +189,7 @@ class FreqaiDataDrawer:
|
||||
"""
|
||||
Save historic predictions pickle to disk
|
||||
"""
|
||||
with open(self.historic_predictions_path, "wb") as fp:
|
||||
with self.historic_predictions_path.open("wb") as fp:
|
||||
cloudpickle.dump(self.historic_predictions, fp, protocol=cloudpickle.DEFAULT_PROTOCOL)
|
||||
|
||||
# create a backup
|
||||
@@ -218,58 +200,33 @@ class FreqaiDataDrawer:
|
||||
Save metric tracker of all pair metrics collected.
|
||||
"""
|
||||
with self.save_lock:
|
||||
with open(self.metric_tracker_path, 'w') as fp:
|
||||
with self.metric_tracker_path.open('w') as fp:
|
||||
rapidjson.dump(self.metric_tracker, fp, default=self.np_encoder,
|
||||
number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
def save_drawer_to_disk(self):
|
||||
def save_drawer_to_disk(self) -> None:
|
||||
"""
|
||||
Save data drawer full of all pair model metadata in present model folder.
|
||||
"""
|
||||
with self.save_lock:
|
||||
with open(self.pair_dictionary_path, 'w') as fp:
|
||||
with self.pair_dictionary_path.open('w') as fp:
|
||||
rapidjson.dump(self.pair_dict, fp, default=self.np_encoder,
|
||||
number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
def save_follower_dict_to_disk(self):
|
||||
"""
|
||||
Save follower dictionary to disk (used by strategy for persistent prediction targets)
|
||||
"""
|
||||
with open(self.follower_dict_path, "w") as fp:
|
||||
rapidjson.dump(self.follower_dict, fp, default=self.np_encoder,
|
||||
number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
def save_global_metadata_to_disk(self, metadata: Dict[str, Any]):
|
||||
"""
|
||||
Save global metadata json to disk
|
||||
"""
|
||||
with self.save_lock:
|
||||
with open(self.global_metadata_path, 'w') as fp:
|
||||
with self.global_metadata_path.open('w') as fp:
|
||||
rapidjson.dump(metadata, fp, default=self.np_encoder,
|
||||
number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
def create_follower_dict(self):
|
||||
"""
|
||||
Create or dictionary for each follower to maintain unique persistent prediction targets
|
||||
"""
|
||||
|
||||
whitelist_pairs = self.config.get("exchange", {}).get("pair_whitelist")
|
||||
|
||||
exists = self.follower_dict_path.is_file()
|
||||
|
||||
if exists:
|
||||
logger.info("Found an existing follower dictionary")
|
||||
|
||||
for pair in whitelist_pairs:
|
||||
self.follower_dict[pair] = {}
|
||||
|
||||
self.save_follower_dict_to_disk()
|
||||
|
||||
def np_encoder(self, object):
|
||||
if isinstance(object, np.generic):
|
||||
return object.item()
|
||||
|
||||
def get_pair_dict_info(self, pair: str) -> Tuple[str, int, bool]:
|
||||
def get_pair_dict_info(self, pair: str) -> Tuple[str, int]:
|
||||
"""
|
||||
Locate and load existing model metadata from persistent storage. If not located,
|
||||
create a new one and append the current pair to it and prepare it for its first
|
||||
@@ -278,32 +235,19 @@ class FreqaiDataDrawer:
|
||||
:return:
|
||||
model_filename: str = unique filename used for loading persistent objects from disk
|
||||
trained_timestamp: int = the last time the coin was trained
|
||||
return_null_array: bool = Follower could not find pair metadata
|
||||
"""
|
||||
|
||||
pair_dict = self.pair_dict.get(pair)
|
||||
data_path_set = self.pair_dict.get(pair, self.empty_pair_dict).get("data_path", "")
|
||||
return_null_array = False
|
||||
|
||||
if pair_dict:
|
||||
model_filename = pair_dict["model_filename"]
|
||||
trained_timestamp = pair_dict["trained_timestamp"]
|
||||
elif not self.follow_mode:
|
||||
else:
|
||||
self.pair_dict[pair] = self.empty_pair_dict.copy()
|
||||
model_filename = ""
|
||||
trained_timestamp = 0
|
||||
|
||||
if not data_path_set and self.follow_mode:
|
||||
logger.warning(
|
||||
f"Follower could not find current pair {pair} in "
|
||||
f"pair_dictionary at path {self.full_path}, sending null values "
|
||||
"back to strategy."
|
||||
)
|
||||
trained_timestamp = 0
|
||||
model_filename = ''
|
||||
return_null_array = True
|
||||
|
||||
return model_filename, trained_timestamp, return_null_array
|
||||
return model_filename, trained_timestamp
|
||||
|
||||
def set_pair_dict_info(self, metadata: dict) -> None:
|
||||
pair_in_dict = self.pair_dict.get(metadata["pair"])
|
||||
@@ -311,7 +255,6 @@ class FreqaiDataDrawer:
|
||||
return
|
||||
else:
|
||||
self.pair_dict[metadata["pair"]] = self.empty_pair_dict.copy()
|
||||
|
||||
return
|
||||
|
||||
def set_initial_return_values(self, pair: str, pred_df: DataFrame) -> None:
|
||||
@@ -423,6 +366,12 @@ class FreqaiDataDrawer:
|
||||
|
||||
def purge_old_models(self) -> None:
|
||||
|
||||
num_keep = self.freqai_info["purge_old_models"]
|
||||
if not num_keep:
|
||||
return
|
||||
elif type(num_keep) == bool:
|
||||
num_keep = 2
|
||||
|
||||
model_folders = [x for x in self.full_path.iterdir() if x.is_dir()]
|
||||
|
||||
pattern = re.compile(r"sub-train-(\w+)_(\d{10})")
|
||||
@@ -445,11 +394,11 @@ class FreqaiDataDrawer:
|
||||
delete_dict[coin]["timestamps"][int(timestamp)] = dir
|
||||
|
||||
for coin in delete_dict:
|
||||
if delete_dict[coin]["num_folders"] > 2:
|
||||
if delete_dict[coin]["num_folders"] > num_keep:
|
||||
sorted_dict = collections.OrderedDict(
|
||||
sorted(delete_dict[coin]["timestamps"].items())
|
||||
)
|
||||
num_delete = len(sorted_dict) - 2
|
||||
num_delete = len(sorted_dict) - num_keep
|
||||
deleted = 0
|
||||
for k, v in sorted_dict.items():
|
||||
if deleted >= num_delete:
|
||||
@@ -458,12 +407,6 @@ class FreqaiDataDrawer:
|
||||
shutil.rmtree(v)
|
||||
deleted += 1
|
||||
|
||||
def update_follower_metadata(self):
|
||||
# follower needs to load from disk to get any changes made by leader to pair_dict
|
||||
self.load_drawer_from_disk()
|
||||
if self.config.get("freqai", {}).get("purge_old_models", False):
|
||||
self.purge_old_models()
|
||||
|
||||
def save_metadata(self, dk: FreqaiDataKitchen) -> None:
|
||||
"""
|
||||
Saves only metadata for backtesting studies if user prefers
|
||||
@@ -481,7 +424,7 @@ class FreqaiDataDrawer:
|
||||
dk.data["training_features_list"] = list(dk.data_dictionary["train_features"].columns)
|
||||
dk.data["label_list"] = dk.label_list
|
||||
|
||||
with open(save_path / f"{dk.model_filename}_metadata.json", "w") as fp:
|
||||
with (save_path / f"{dk.model_filename}_metadata.json").open("w") as fp:
|
||||
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
return
|
||||
@@ -514,7 +457,7 @@ class FreqaiDataDrawer:
|
||||
dk.data["training_features_list"] = dk.training_features_list
|
||||
dk.data["label_list"] = dk.label_list
|
||||
# store the metadata
|
||||
with open(save_path / f"{dk.model_filename}_metadata.json", "w") as fp:
|
||||
with (save_path / f"{dk.model_filename}_metadata.json").open("w") as fp:
|
||||
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
# save the train data to file so we can check preds for area of applicability later
|
||||
@@ -528,7 +471,7 @@ class FreqaiDataDrawer:
|
||||
|
||||
if self.freqai_info["feature_parameters"].get("principal_component_analysis"):
|
||||
cloudpickle.dump(
|
||||
dk.pca, open(dk.data_path / f"{dk.model_filename}_pca_object.pkl", "wb")
|
||||
dk.pca, (dk.data_path / f"{dk.model_filename}_pca_object.pkl").open("wb")
|
||||
)
|
||||
|
||||
self.model_dictionary[coin] = model
|
||||
@@ -548,7 +491,7 @@ class FreqaiDataDrawer:
|
||||
Load only metadata into datakitchen to increase performance during
|
||||
presaved backtesting (prediction file loading).
|
||||
"""
|
||||
with open(dk.data_path / f"{dk.model_filename}_metadata.json", "r") as fp:
|
||||
with (dk.data_path / f"{dk.model_filename}_metadata.json").open("r") as fp:
|
||||
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||
dk.training_features_list = dk.data["training_features_list"]
|
||||
dk.label_list = dk.data["label_list"]
|
||||
@@ -571,7 +514,7 @@ class FreqaiDataDrawer:
|
||||
dk.data = self.meta_data_dictionary[coin]["meta_data"]
|
||||
dk.data_dictionary["train_features"] = self.meta_data_dictionary[coin]["train_df"]
|
||||
else:
|
||||
with open(dk.data_path / f"{dk.model_filename}_metadata.json", "r") as fp:
|
||||
with (dk.data_path / f"{dk.model_filename}_metadata.json").open("r") as fp:
|
||||
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
dk.data_dictionary["train_features"] = pd.read_pickle(
|
||||
@@ -609,7 +552,7 @@ class FreqaiDataDrawer:
|
||||
|
||||
if self.config["freqai"]["feature_parameters"]["principal_component_analysis"]:
|
||||
dk.pca = cloudpickle.load(
|
||||
open(dk.data_path / f"{dk.model_filename}_pca_object.pkl", "rb")
|
||||
(dk.data_path / f"{dk.model_filename}_pca_object.pkl").open("rb")
|
||||
)
|
||||
|
||||
return model
|
||||
@@ -627,12 +570,12 @@ class FreqaiDataDrawer:
|
||||
|
||||
for pair in dk.all_pairs:
|
||||
for tf in feat_params.get("include_timeframes"):
|
||||
|
||||
hist_df = history_data[pair][tf]
|
||||
# check if newest candle is already appended
|
||||
df_dp = strategy.dp.get_pair_dataframe(pair, tf)
|
||||
if len(df_dp.index) == 0:
|
||||
continue
|
||||
if str(history_data[pair][tf].iloc[-1]["date"]) == str(
|
||||
if str(hist_df.iloc[-1]["date"]) == str(
|
||||
df_dp.iloc[-1:]["date"].iloc[-1]
|
||||
):
|
||||
continue
|
||||
@@ -640,21 +583,30 @@ class FreqaiDataDrawer:
|
||||
try:
|
||||
index = (
|
||||
df_dp.loc[
|
||||
df_dp["date"] == history_data[pair][tf].iloc[-1]["date"]
|
||||
df_dp["date"] == hist_df.iloc[-1]["date"]
|
||||
].index[0]
|
||||
+ 1
|
||||
)
|
||||
except IndexError:
|
||||
logger.warning(
|
||||
f"Unable to update pair history for {pair}. "
|
||||
"If this does not resolve itself after 1 additional candle, "
|
||||
"please report the error to #freqai discord channel"
|
||||
)
|
||||
return
|
||||
if hist_df.iloc[-1]['date'] < df_dp['date'].iloc[0]:
|
||||
raise OperationalException("In memory historical data is older than "
|
||||
f"oldest DataProvider candle for {pair} on "
|
||||
f"timeframe {tf}")
|
||||
else:
|
||||
index = -1
|
||||
logger.warning(
|
||||
f"No common dates in historical data and dataprovider for {pair}. "
|
||||
f"Appending latest dataprovider candle to historical data "
|
||||
"but please be aware that there is likely a gap in the historical "
|
||||
"data. \n"
|
||||
f"Historical data ends at {hist_df.iloc[-1]['date']} "
|
||||
f"while dataprovider starts at {df_dp['date'].iloc[0]} and"
|
||||
f"ends at {df_dp['date'].iloc[0]}."
|
||||
)
|
||||
|
||||
history_data[pair][tf] = pd.concat(
|
||||
[
|
||||
history_data[pair][tf],
|
||||
hist_df,
|
||||
df_dp.iloc[index:],
|
||||
],
|
||||
ignore_index=True,
|
||||
|
@@ -1,10 +1,12 @@
|
||||
import copy
|
||||
import inspect
|
||||
import logging
|
||||
import random
|
||||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from math import cos, sin
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
@@ -23,6 +25,7 @@ from freqtrade.constants import Config
|
||||
from freqtrade.data.converter import reduce_dataframe_footprint
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import timeframe_to_seconds
|
||||
from freqtrade.strategy import merge_informative_pair
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
|
||||
|
||||
@@ -110,7 +113,7 @@ class FreqaiDataKitchen:
|
||||
def set_paths(
|
||||
self,
|
||||
pair: str,
|
||||
trained_timestamp: int = None,
|
||||
trained_timestamp: Optional[int] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Set the paths to the data for the present coin/botloop
|
||||
@@ -168,6 +171,19 @@ class FreqaiDataKitchen:
|
||||
train_labels = labels
|
||||
train_weights = weights
|
||||
|
||||
if feat_dict["shuffle_after_split"]:
|
||||
rint1 = random.randint(0, 100)
|
||||
rint2 = random.randint(0, 100)
|
||||
train_features = train_features.sample(
|
||||
frac=1, random_state=rint1).reset_index(drop=True)
|
||||
train_labels = train_labels.sample(frac=1, random_state=rint1).reset_index(drop=True)
|
||||
train_weights = pd.DataFrame(train_weights).sample(
|
||||
frac=1, random_state=rint1).reset_index(drop=True).to_numpy()[:, 0]
|
||||
test_features = test_features.sample(frac=1, random_state=rint2).reset_index(drop=True)
|
||||
test_labels = test_labels.sample(frac=1, random_state=rint2).reset_index(drop=True)
|
||||
test_weights = pd.DataFrame(test_weights).sample(
|
||||
frac=1, random_state=rint2).reset_index(drop=True).to_numpy()[:, 0]
|
||||
|
||||
# Simplest way to reverse the order of training and test data:
|
||||
if self.freqai_config['feature_parameters'].get('reverse_train_test_order', False):
|
||||
return self.build_data_dictionary(
|
||||
@@ -235,7 +251,7 @@ class FreqaiDataKitchen:
|
||||
(drop_index == 0) & (drop_index_labels == 0)
|
||||
]
|
||||
logger.info(
|
||||
f"dropped {len(unfiltered_df) - len(filtered_df)} training points"
|
||||
f"{self.pair}: dropped {len(unfiltered_df) - len(filtered_df)} training points"
|
||||
f" due to NaNs in populated dataset {len(unfiltered_df)}."
|
||||
)
|
||||
if (1 - len(filtered_df) / len(unfiltered_df)) > 0.1 and self.live:
|
||||
@@ -659,7 +675,7 @@ class FreqaiDataKitchen:
|
||||
]
|
||||
|
||||
logger.info(
|
||||
f"SVM tossed {len(y_pred) - kept_points.sum()}"
|
||||
f"{self.pair}: SVM tossed {len(y_pred) - kept_points.sum()}"
|
||||
f" test points from {len(y_pred)} total points."
|
||||
)
|
||||
|
||||
@@ -933,7 +949,7 @@ class FreqaiDataKitchen:
|
||||
|
||||
if (len(do_predict) - do_predict.sum()) > 0:
|
||||
logger.info(
|
||||
f"DI tossed {len(do_predict) - do_predict.sum()} predictions for "
|
||||
f"{self.pair}: DI tossed {len(do_predict) - do_predict.sum()} predictions for "
|
||||
"being too far from training data."
|
||||
)
|
||||
|
||||
@@ -1145,9 +1161,9 @@ class FreqaiDataKitchen:
|
||||
|
||||
for pair in pairs:
|
||||
pair = pair.replace(':', '') # lightgbm doesnt like colons
|
||||
valid_strs = [f"%-{pair}", f"%{pair}", f"%_{pair}"]
|
||||
pair_cols = [col for col in dataframe.columns if
|
||||
any(substr in col for substr in valid_strs)]
|
||||
pair_cols = [col for col in dataframe.columns if col.startswith("%")
|
||||
and f"{pair}_" in col]
|
||||
|
||||
if pair_cols:
|
||||
pair_cols.insert(0, 'date')
|
||||
corr_dataframes[pair] = dataframe.filter(pair_cols, axis=1)
|
||||
@@ -1176,6 +1192,105 @@ class FreqaiDataKitchen:
|
||||
|
||||
return dataframe
|
||||
|
||||
def get_pair_data_for_features(self,
|
||||
pair: str,
|
||||
tf: str,
|
||||
strategy: IStrategy,
|
||||
corr_dataframes: dict = {},
|
||||
base_dataframes: dict = {},
|
||||
is_corr_pairs: bool = False) -> DataFrame:
|
||||
"""
|
||||
Get the data for the pair. If it's not in the dictionary, get it from the data provider
|
||||
:param pair: str = pair to get data for
|
||||
:param tf: str = timeframe to get data for
|
||||
:param strategy: IStrategy = user defined strategy object
|
||||
:param corr_dataframes: dict = dict containing the df pair dataframes
|
||||
(for user defined timeframes)
|
||||
:param base_dataframes: dict = dict containing the current pair dataframes
|
||||
(for user defined timeframes)
|
||||
:param is_corr_pairs: bool = whether the pair is a corr pair or not
|
||||
:return: dataframe = dataframe containing the pair data
|
||||
"""
|
||||
if is_corr_pairs:
|
||||
dataframe = corr_dataframes[pair][tf]
|
||||
if not dataframe.empty:
|
||||
return dataframe
|
||||
else:
|
||||
dataframe = strategy.dp.get_pair_dataframe(pair=pair, timeframe=tf)
|
||||
return dataframe
|
||||
else:
|
||||
dataframe = base_dataframes[tf]
|
||||
if not dataframe.empty:
|
||||
return dataframe
|
||||
else:
|
||||
dataframe = strategy.dp.get_pair_dataframe(pair=pair, timeframe=tf)
|
||||
return dataframe
|
||||
|
||||
def merge_features(self, df_main: DataFrame, df_to_merge: DataFrame,
|
||||
tf: str, timeframe_inf: str, suffix: str) -> DataFrame:
|
||||
"""
|
||||
Merge the features of the dataframe and remove HLCV and date added columns
|
||||
:param df_main: DataFrame = main dataframe
|
||||
:param df_to_merge: DataFrame = dataframe to merge
|
||||
:param tf: str = timeframe of the main dataframe
|
||||
:param timeframe_inf: str = timeframe of the dataframe to merge
|
||||
:param suffix: str = suffix to add to the columns of the dataframe to merge
|
||||
:return: dataframe = merged dataframe
|
||||
"""
|
||||
dataframe = merge_informative_pair(df_main, df_to_merge, tf, timeframe_inf=timeframe_inf,
|
||||
append_timeframe=False, suffix=suffix, ffill=True)
|
||||
skip_columns = [
|
||||
(f"{s}_{suffix}") for s in ["date", "open", "high", "low", "close", "volume"]
|
||||
]
|
||||
dataframe = dataframe.drop(columns=skip_columns)
|
||||
return dataframe
|
||||
|
||||
def populate_features(self, dataframe: DataFrame, pair: str, strategy: IStrategy,
|
||||
corr_dataframes: dict, base_dataframes: dict,
|
||||
is_corr_pairs: bool = False) -> DataFrame:
|
||||
"""
|
||||
Use the user defined strategy functions for populating features
|
||||
:param dataframe: DataFrame = dataframe to populate
|
||||
:param pair: str = pair to populate
|
||||
:param strategy: IStrategy = user defined strategy object
|
||||
:param corr_dataframes: dict = dict containing the df pair dataframes
|
||||
:param base_dataframes: dict = dict containing the current pair dataframes
|
||||
:param is_corr_pairs: bool = whether the pair is a corr pair or not
|
||||
:return: dataframe = populated dataframe
|
||||
"""
|
||||
tfs: List[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
|
||||
|
||||
for tf in tfs:
|
||||
metadata = {"pair": pair, "tf": tf}
|
||||
informative_df = self.get_pair_data_for_features(
|
||||
pair, tf, strategy, corr_dataframes, base_dataframes, is_corr_pairs)
|
||||
informative_copy = informative_df.copy()
|
||||
|
||||
for t in self.freqai_config["feature_parameters"]["indicator_periods_candles"]:
|
||||
df_features = strategy.feature_engineering_expand_all(
|
||||
informative_copy.copy(), t, metadata=metadata)
|
||||
suffix = f"{t}"
|
||||
informative_df = self.merge_features(informative_df, df_features, tf, tf, suffix)
|
||||
|
||||
generic_df = strategy.feature_engineering_expand_basic(
|
||||
informative_copy.copy(), metadata=metadata)
|
||||
suffix = "gen"
|
||||
|
||||
informative_df = self.merge_features(informative_df, generic_df, tf, tf, suffix)
|
||||
|
||||
indicators = [col for col in informative_df if col.startswith("%")]
|
||||
for n in range(self.freqai_config["feature_parameters"]["include_shifted_candles"] + 1):
|
||||
if n == 0:
|
||||
continue
|
||||
df_shift = informative_df[indicators].shift(n)
|
||||
df_shift = df_shift.add_suffix("_shift-" + str(n))
|
||||
informative_df = pd.concat((informative_df, df_shift), axis=1)
|
||||
|
||||
dataframe = self.merge_features(dataframe.copy(), informative_df,
|
||||
self.config["timeframe"], tf, f'{pair}_{tf}')
|
||||
|
||||
return dataframe
|
||||
|
||||
def use_strategy_to_populate_indicators(
|
||||
self,
|
||||
strategy: IStrategy,
|
||||
@@ -1188,55 +1303,66 @@ class FreqaiDataKitchen:
|
||||
"""
|
||||
Use the user defined strategy for populating indicators during retrain
|
||||
:param strategy: IStrategy = user defined strategy object
|
||||
:param corr_dataframes: dict = dict containing the informative pair dataframes
|
||||
:param corr_dataframes: dict = dict containing the df pair dataframes
|
||||
(for user defined timeframes)
|
||||
:param base_dataframes: dict = dict containing the current pair dataframes
|
||||
(for user defined timeframes)
|
||||
:param metadata: dict = strategy furnished pair metadata
|
||||
:param pair: str = pair to populate
|
||||
:param prediction_dataframe: DataFrame = dataframe containing the pair data
|
||||
used for prediction
|
||||
:param do_corr_pairs: bool = whether to populate corr pairs or not
|
||||
:return:
|
||||
dataframe: DataFrame = dataframe containing populated indicators
|
||||
"""
|
||||
|
||||
# for prediction dataframe creation, we let dataprovider handle everything in the strategy
|
||||
# so we create empty dictionaries, which allows us to pass None to
|
||||
# `populate_any_indicators()`. Signaling we want the dp to give us the live dataframe.
|
||||
# check if the user is using the deprecated populate_any_indicators function
|
||||
new_version = inspect.getsource(strategy.populate_any_indicators) == (
|
||||
inspect.getsource(IStrategy.populate_any_indicators))
|
||||
|
||||
if not new_version:
|
||||
raise OperationalException(
|
||||
"You are using the `populate_any_indicators()` function"
|
||||
" which was deprecated on March 1, 2023. Please refer "
|
||||
"to the strategy migration guide to use the new "
|
||||
"feature_engineering_* methods: \n"
|
||||
"https://www.freqtrade.io/en/stable/strategy_migration/#freqai-strategy \n"
|
||||
"And the feature_engineering_* documentation: \n"
|
||||
"https://www.freqtrade.io/en/latest/freqai-feature-engineering/"
|
||||
)
|
||||
|
||||
tfs: List[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
|
||||
pairs: List[str] = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||
pairs: List[str] = self.freqai_config["feature_parameters"].get(
|
||||
"include_corr_pairlist", [])
|
||||
|
||||
for tf in tfs:
|
||||
if tf not in base_dataframes:
|
||||
base_dataframes[tf] = pd.DataFrame()
|
||||
for p in pairs:
|
||||
if p not in corr_dataframes:
|
||||
corr_dataframes[p] = {}
|
||||
if tf not in corr_dataframes[p]:
|
||||
corr_dataframes[p][tf] = pd.DataFrame()
|
||||
|
||||
if not prediction_dataframe.empty:
|
||||
dataframe = prediction_dataframe.copy()
|
||||
for tf in tfs:
|
||||
base_dataframes[tf] = None
|
||||
for p in pairs:
|
||||
if p not in corr_dataframes:
|
||||
corr_dataframes[p] = {}
|
||||
corr_dataframes[p][tf] = None
|
||||
else:
|
||||
dataframe = base_dataframes[self.config["timeframe"]].copy()
|
||||
|
||||
sgi = False
|
||||
for tf in tfs:
|
||||
if tf == tfs[-1]:
|
||||
sgi = True # doing this last allows user to use all tf raw prices in labels
|
||||
dataframe = strategy.populate_any_indicators(
|
||||
pair,
|
||||
dataframe.copy(),
|
||||
tf,
|
||||
informative=base_dataframes[tf],
|
||||
set_generalized_indicators=sgi
|
||||
)
|
||||
|
||||
corr_pairs: List[str] = self.freqai_config["feature_parameters"].get(
|
||||
"include_corr_pairlist", [])
|
||||
dataframe = self.populate_features(dataframe.copy(), pair, strategy,
|
||||
corr_dataframes, base_dataframes)
|
||||
metadata = {"pair": pair}
|
||||
dataframe = strategy.feature_engineering_standard(dataframe.copy(), metadata=metadata)
|
||||
# ensure corr pairs are always last
|
||||
for corr_pair in pairs:
|
||||
for corr_pair in corr_pairs:
|
||||
if pair == corr_pair:
|
||||
continue # dont repeat anything from whitelist
|
||||
for tf in tfs:
|
||||
if pairs and do_corr_pairs:
|
||||
dataframe = strategy.populate_any_indicators(
|
||||
corr_pair,
|
||||
dataframe.copy(),
|
||||
tf,
|
||||
informative=corr_dataframes[corr_pair][tf]
|
||||
)
|
||||
if corr_pairs and do_corr_pairs:
|
||||
dataframe = self.populate_features(dataframe.copy(), corr_pair, strategy,
|
||||
corr_dataframes, base_dataframes, True)
|
||||
|
||||
dataframe = strategy.set_freqai_targets(dataframe.copy(), metadata=metadata)
|
||||
|
||||
self.get_unique_classes_from_labels(dataframe)
|
||||
|
||||
@@ -1367,3 +1493,25 @@ class FreqaiDataKitchen:
|
||||
dataframe.columns = dataframe.columns.str.replace(c, "")
|
||||
|
||||
return dataframe
|
||||
|
||||
def buffer_timerange(self, timerange: TimeRange):
|
||||
"""
|
||||
Buffer the start and end of the timerange. This is used *after* the indicators
|
||||
are populated.
|
||||
|
||||
The main example use is when predicting maxima and minima, the argrelextrema
|
||||
function cannot know the maxima/minima at the edges of the timerange. To improve
|
||||
model accuracy, it is best to compute argrelextrema on the full timerange
|
||||
and then use this function to cut off the edges (buffer) by the kernel.
|
||||
|
||||
In another case, if the targets are set to a shifted price movement, this
|
||||
buffer is unnecessary because the shifted candles at the end of the timerange
|
||||
will be NaN and FreqAI will automatically cut those off of the training
|
||||
dataset.
|
||||
"""
|
||||
buffer = self.freqai_config["feature_parameters"]["buffer_train_data_candles"]
|
||||
if buffer:
|
||||
timerange.stopts -= buffer * timeframe_to_seconds(self.config["timeframe"])
|
||||
timerange.startts += buffer * timeframe_to_seconds(self.config["timeframe"])
|
||||
|
||||
return timerange
|
||||
|
@@ -65,12 +65,11 @@ class IFreqaiModel(ABC):
|
||||
self.retrain = False
|
||||
self.first = True
|
||||
self.set_full_path()
|
||||
self.follow_mode: bool = self.freqai_info.get("follow_mode", False)
|
||||
self.save_backtest_models: bool = self.freqai_info.get("save_backtest_models", True)
|
||||
if self.save_backtest_models:
|
||||
logger.info('Backtesting module configured to save all models.')
|
||||
|
||||
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config, self.follow_mode)
|
||||
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config)
|
||||
# set current candle to arbitrary historical date
|
||||
self.current_candle: datetime = datetime.fromtimestamp(637887600, tz=timezone.utc)
|
||||
self.dd.current_candle = self.current_candle
|
||||
@@ -104,6 +103,8 @@ class IFreqaiModel(ABC):
|
||||
self.metadata: Dict[str, Any] = self.dd.load_global_metadata_from_disk()
|
||||
self.data_provider: Optional[DataProvider] = None
|
||||
self.max_system_threads = max(int(psutil.cpu_count() * 2 - 2), 1)
|
||||
self.can_short = True # overridden in start() with strategy.can_short
|
||||
self.model: Any = None
|
||||
|
||||
record_params(config, self.full_path)
|
||||
|
||||
@@ -133,6 +134,7 @@ class IFreqaiModel(ABC):
|
||||
self.live = strategy.dp.runmode in (RunMode.DRY_RUN, RunMode.LIVE)
|
||||
self.dd.set_pair_dict_info(metadata)
|
||||
self.data_provider = strategy.dp
|
||||
self.can_short = strategy.can_short
|
||||
|
||||
if self.live:
|
||||
self.inference_timer('start')
|
||||
@@ -145,14 +147,11 @@ class IFreqaiModel(ABC):
|
||||
# (backtest window, i.e. window immediately following the training window).
|
||||
# FreqAI slides the window and sequentially builds the backtesting results before returning
|
||||
# the concatenated results for the full backtesting period back to the strategy.
|
||||
elif not self.follow_mode:
|
||||
else:
|
||||
self.dk = FreqaiDataKitchen(self.config, self.live, metadata["pair"])
|
||||
dataframe = self.dk.use_strategy_to_populate_indicators(
|
||||
strategy, prediction_dataframe=dataframe, pair=metadata["pair"]
|
||||
)
|
||||
if not self.config.get("freqai_backtest_live_models", False):
|
||||
logger.info(f"Training {len(self.dk.training_timeranges)} timeranges")
|
||||
dk = self.start_backtesting(dataframe, metadata, self.dk)
|
||||
dk = self.start_backtesting(dataframe, metadata, self.dk, strategy)
|
||||
dataframe = dk.remove_features_from_df(dk.return_dataframe)
|
||||
else:
|
||||
logger.info(
|
||||
@@ -223,7 +222,7 @@ class IFreqaiModel(ABC):
|
||||
logger.warning(f'{pair} not in current whitelist, removing from train queue.')
|
||||
continue
|
||||
|
||||
(_, trained_timestamp, _) = self.dd.get_pair_dict_info(pair)
|
||||
(_, trained_timestamp) = self.dd.get_pair_dict_info(pair)
|
||||
|
||||
dk = FreqaiDataKitchen(self.config, self.live, pair)
|
||||
(
|
||||
@@ -253,7 +252,7 @@ class IFreqaiModel(ABC):
|
||||
self.dd.save_metric_tracker_to_disk()
|
||||
|
||||
def start_backtesting(
|
||||
self, dataframe: DataFrame, metadata: dict, dk: FreqaiDataKitchen
|
||||
self, dataframe: DataFrame, metadata: dict, dk: FreqaiDataKitchen, strategy: IStrategy
|
||||
) -> FreqaiDataKitchen:
|
||||
"""
|
||||
The main broad execution for backtesting. For backtesting, each pair enters and then gets
|
||||
@@ -265,20 +264,23 @@ class IFreqaiModel(ABC):
|
||||
:param dataframe: DataFrame = strategy passed dataframe
|
||||
:param metadata: Dict = pair metadata
|
||||
:param dk: FreqaiDataKitchen = Data management/analysis tool associated to present pair only
|
||||
:param strategy: Strategy to train on
|
||||
:return:
|
||||
FreqaiDataKitchen = Data management/analysis tool associated to present pair only
|
||||
"""
|
||||
|
||||
self.pair_it += 1
|
||||
train_it = 0
|
||||
pair = metadata["pair"]
|
||||
populate_indicators = True
|
||||
check_features = True
|
||||
# Loop enforcing the sliding window training/backtesting paradigm
|
||||
# tr_train is the training time range e.g. 1 historical month
|
||||
# tr_backtest is the backtesting time range e.g. the week directly
|
||||
# following tr_train. Both of these windows slide through the
|
||||
# entire backtest
|
||||
for tr_train, tr_backtest in zip(dk.training_timeranges, dk.backtesting_timeranges):
|
||||
pair = metadata["pair"]
|
||||
(_, _, _) = self.dd.get_pair_dict_info(pair)
|
||||
(_, _) = self.dd.get_pair_dict_info(pair)
|
||||
train_it += 1
|
||||
total_trains = len(dk.backtesting_timeranges)
|
||||
self.training_timerange = tr_train
|
||||
@@ -299,23 +301,52 @@ class IFreqaiModel(ABC):
|
||||
dk.set_new_model_names(pair, timestamp_model_id)
|
||||
|
||||
if dk.check_if_backtest_prediction_is_valid(len_backtest_df):
|
||||
self.dd.load_metadata(dk)
|
||||
dk.find_features(dataframe)
|
||||
self.check_if_feature_list_matches_strategy(dk)
|
||||
if check_features:
|
||||
self.dd.load_metadata(dk)
|
||||
dataframe_dummy_features = self.dk.use_strategy_to_populate_indicators(
|
||||
strategy, prediction_dataframe=dataframe.tail(1), pair=metadata["pair"]
|
||||
)
|
||||
dk.find_features(dataframe_dummy_features)
|
||||
self.check_if_feature_list_matches_strategy(dk)
|
||||
check_features = False
|
||||
append_df = dk.get_backtesting_prediction()
|
||||
dk.append_predictions(append_df)
|
||||
else:
|
||||
dataframe_train = dk.slice_dataframe(tr_train, dataframe)
|
||||
dataframe_backtest = dk.slice_dataframe(tr_backtest, dataframe)
|
||||
if populate_indicators:
|
||||
dataframe = self.dk.use_strategy_to_populate_indicators(
|
||||
strategy, prediction_dataframe=dataframe, pair=metadata["pair"]
|
||||
)
|
||||
populate_indicators = False
|
||||
|
||||
dataframe_base_train = dataframe.loc[dataframe["date"] < tr_train.stopdt, :]
|
||||
dataframe_base_train = strategy.set_freqai_targets(
|
||||
dataframe_base_train, metadata=metadata)
|
||||
dataframe_base_backtest = dataframe.loc[dataframe["date"] < tr_backtest.stopdt, :]
|
||||
dataframe_base_backtest = strategy.set_freqai_targets(
|
||||
dataframe_base_backtest, metadata=metadata)
|
||||
|
||||
tr_train = dk.buffer_timerange(tr_train)
|
||||
|
||||
dataframe_train = dk.slice_dataframe(tr_train, dataframe_base_train)
|
||||
dataframe_backtest = dk.slice_dataframe(tr_backtest, dataframe_base_backtest)
|
||||
|
||||
if not self.model_exists(dk):
|
||||
dk.find_features(dataframe_train)
|
||||
dk.find_labels(dataframe_train)
|
||||
self.model = self.train(dataframe_train, pair, dk)
|
||||
|
||||
try:
|
||||
self.model = self.train(dataframe_train, pair, dk)
|
||||
except Exception as msg:
|
||||
logger.warning(
|
||||
f"Training {pair} raised exception {msg.__class__.__name__}. "
|
||||
f"Message: {msg}, skipping.", exc_info=True)
|
||||
self.model = None
|
||||
|
||||
self.dd.pair_dict[pair]["trained_timestamp"] = int(
|
||||
tr_train.stopts)
|
||||
if self.plot_features:
|
||||
if self.plot_features and self.model is not None:
|
||||
plot_feature_importance(self.model, pair, dk, self.plot_features)
|
||||
if self.save_backtest_models:
|
||||
if self.save_backtest_models and self.model is not None:
|
||||
logger.info('Saving backtest model to disk.')
|
||||
self.dd.save_data(self.model, pair, dk)
|
||||
else:
|
||||
@@ -348,18 +379,8 @@ class IFreqaiModel(ABC):
|
||||
dk: FreqaiDataKitchen = Data management/analysis tool associated to present pair only
|
||||
"""
|
||||
|
||||
# update follower
|
||||
if self.follow_mode:
|
||||
self.dd.update_follower_metadata()
|
||||
|
||||
# get the model metadata associated with the current pair
|
||||
(_, trained_timestamp, return_null_array) = self.dd.get_pair_dict_info(metadata["pair"])
|
||||
|
||||
# if the metadata doesn't exist, the follower returns null arrays to strategy
|
||||
if self.follow_mode and return_null_array:
|
||||
logger.info("Returning null array from follower to strategy")
|
||||
self.dd.return_null_values_to_strategy(dataframe, dk)
|
||||
return dk
|
||||
(_, trained_timestamp) = self.dd.get_pair_dict_info(metadata["pair"])
|
||||
|
||||
# append the historic data once per round
|
||||
if self.dd.historic_data:
|
||||
@@ -367,27 +388,18 @@ class IFreqaiModel(ABC):
|
||||
logger.debug(f'Updating historic data on pair {metadata["pair"]}')
|
||||
self.track_current_candle()
|
||||
|
||||
if not self.follow_mode:
|
||||
(_, new_trained_timerange, data_load_timerange) = dk.check_if_new_training_required(
|
||||
trained_timestamp
|
||||
)
|
||||
dk.set_paths(metadata["pair"], new_trained_timerange.stopts)
|
||||
|
||||
(_, new_trained_timerange, data_load_timerange) = dk.check_if_new_training_required(
|
||||
trained_timestamp
|
||||
)
|
||||
dk.set_paths(metadata["pair"], new_trained_timerange.stopts)
|
||||
# load candle history into memory if it is not yet.
|
||||
if not self.dd.historic_data:
|
||||
self.dd.load_all_pair_histories(data_load_timerange, dk)
|
||||
|
||||
# load candle history into memory if it is not yet.
|
||||
if not self.dd.historic_data:
|
||||
self.dd.load_all_pair_histories(data_load_timerange, dk)
|
||||
|
||||
if not self.scanning:
|
||||
self.scanning = True
|
||||
self.start_scanning(strategy)
|
||||
|
||||
elif self.follow_mode:
|
||||
dk.set_paths(metadata["pair"], trained_timestamp)
|
||||
logger.info(
|
||||
"FreqAI instance set to follow_mode, finding existing pair "
|
||||
f"using { self.identifier }"
|
||||
)
|
||||
if not self.scanning:
|
||||
self.scanning = True
|
||||
self.start_scanning(strategy)
|
||||
|
||||
# load the model and associated data into the data kitchen
|
||||
self.model = self.dd.load_data(metadata["pair"], dk)
|
||||
@@ -475,7 +487,7 @@ class IFreqaiModel(ABC):
|
||||
"strategy is furnishing the same features as the pretrained"
|
||||
"model. In case of --strategy-list, please be aware that FreqAI "
|
||||
"requires all strategies to maintain identical "
|
||||
"populate_any_indicator() functions"
|
||||
"feature_engineering_* functions"
|
||||
)
|
||||
|
||||
def data_cleaning_train(self, dk: FreqaiDataKitchen) -> None:
|
||||
@@ -549,7 +561,13 @@ class IFreqaiModel(ABC):
|
||||
:return:
|
||||
:boolean: whether the model file exists or not.
|
||||
"""
|
||||
path_to_modelfile = Path(dk.data_path / f"{dk.model_filename}_model.joblib")
|
||||
if self.dd.model_type == 'joblib':
|
||||
file_type = ".joblib"
|
||||
elif self.dd.model_type == 'keras':
|
||||
file_type = ".h5"
|
||||
elif 'stable_baselines' in self.dd.model_type or 'sb3_contrib' == self.dd.model_type:
|
||||
file_type = ".zip"
|
||||
path_to_modelfile = Path(dk.data_path / f"{dk.model_filename}_model{file_type}")
|
||||
file_exists = path_to_modelfile.is_file()
|
||||
if file_exists:
|
||||
logger.info("Found model at %s", dk.data_path / dk.model_filename)
|
||||
@@ -581,7 +599,7 @@ class IFreqaiModel(ABC):
|
||||
:param strategy: IStrategy = user defined strategy object
|
||||
:param dk: FreqaiDataKitchen = non-persistent data container for current coin/loop
|
||||
:param data_load_timerange: TimeRange = the amount of data to be loaded
|
||||
for populate_any_indicators
|
||||
for populating indicators
|
||||
(larger than new_trained_timerange so that
|
||||
new_trained_timerange does not contain any NaNs)
|
||||
"""
|
||||
@@ -594,6 +612,8 @@ class IFreqaiModel(ABC):
|
||||
strategy, corr_dataframes, base_dataframes, pair
|
||||
)
|
||||
|
||||
new_trained_timerange = dk.buffer_timerange(new_trained_timerange)
|
||||
|
||||
unfiltered_dataframe = dk.slice_dataframe(new_trained_timerange, unfiltered_dataframe)
|
||||
|
||||
# find the features indicated by strategy and store in datakitchen
|
||||
@@ -609,8 +629,7 @@ class IFreqaiModel(ABC):
|
||||
if self.plot_features:
|
||||
plot_feature_importance(model, pair, dk, self.plot_features)
|
||||
|
||||
if self.freqai_info.get("purge_old_models", False):
|
||||
self.dd.purge_old_models()
|
||||
self.dd.purge_old_models()
|
||||
|
||||
def set_initial_historic_predictions(
|
||||
self, pred_df: DataFrame, dk: FreqaiDataKitchen, pair: str, strat_df: DataFrame
|
||||
@@ -786,7 +805,7 @@ class IFreqaiModel(ABC):
|
||||
logger.warning("Couldn't cache corr_pair dataframes for improved performance. "
|
||||
"Consider ensuring that the full coin/stake, e.g. XYZ/USD, "
|
||||
"is included in the column names when you are creating features "
|
||||
"in `populate_any_indicators()`.")
|
||||
"in `feature_engineering_*` functions.")
|
||||
self.get_corr_dataframes = not bool(self.corr_dataframes)
|
||||
elif self.corr_dataframes:
|
||||
dataframe = dk.attach_corr_pair_columns(
|
||||
@@ -911,7 +930,6 @@ class IFreqaiModel(ABC):
|
||||
dk.return_dataframe = dk.return_dataframe.drop(columns=list(columns_to_drop))
|
||||
dk.return_dataframe = pd.merge(
|
||||
dk.return_dataframe, saved_dataframe, how='left', left_on='date', right_on="date_pred")
|
||||
# dk.return_dataframe = dk.return_dataframe[saved_dataframe.columns].fillna(0)
|
||||
return dk
|
||||
|
||||
# Following methods which are overridden by user made prediction models.
|
||||
|
@@ -61,7 +61,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
||||
model = self.MODELCLASS(self.policy_type, self.train_env, policy_kwargs=policy_kwargs,
|
||||
tensorboard_log=Path(
|
||||
dk.full_path / "tensorboard" / dk.pair.split('/')[0]),
|
||||
**self.freqai_info['model_training_parameters']
|
||||
**self.freqai_info.get('model_training_parameters', {})
|
||||
)
|
||||
else:
|
||||
logger.info('Continual training activated - starting training from previously '
|
||||
@@ -100,7 +100,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
||||
"""
|
||||
# first, penalize if the action is not valid
|
||||
if not self._is_valid(action):
|
||||
self.custom_info["Invalid"] += 1
|
||||
self.tensorboard_log("invalid", category="actions")
|
||||
return -2
|
||||
|
||||
pnl = self.get_unrealized_profit()
|
||||
@@ -109,15 +109,12 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
||||
# reward agent for entering trades
|
||||
if (action == Actions.Long_enter.value
|
||||
and self._position == Positions.Neutral):
|
||||
self.custom_info[f"{Actions.Long_enter.name}"] += 1
|
||||
return 25
|
||||
if (action == Actions.Short_enter.value
|
||||
and self._position == Positions.Neutral):
|
||||
self.custom_info[f"{Actions.Short_enter.name}"] += 1
|
||||
return 25
|
||||
# discourage agent from not entering trades
|
||||
if action == Actions.Neutral.value and self._position == Positions.Neutral:
|
||||
self.custom_info[f"{Actions.Neutral.name}"] += 1
|
||||
return -1
|
||||
|
||||
max_trade_duration = self.rl_config.get('max_trade_duration_candles', 300)
|
||||
@@ -131,22 +128,18 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
||||
# discourage sitting in position
|
||||
if (self._position in (Positions.Short, Positions.Long) and
|
||||
action == Actions.Neutral.value):
|
||||
self.custom_info["Hold"] += 1
|
||||
return -1 * trade_duration / max_trade_duration
|
||||
|
||||
# close long
|
||||
if action == Actions.Long_exit.value and self._position == Positions.Long:
|
||||
if pnl > self.profit_aim * self.rr:
|
||||
factor *= self.rl_config['model_reward_parameters'].get('win_reward_factor', 2)
|
||||
self.custom_info[f"{Actions.Long_exit.name}"] += 1
|
||||
return float(pnl * factor)
|
||||
|
||||
# close short
|
||||
if action == Actions.Short_exit.value and self._position == Positions.Short:
|
||||
if pnl > self.profit_aim * self.rr:
|
||||
factor *= self.rl_config['model_reward_parameters'].get('win_reward_factor', 2)
|
||||
self.custom_info[f"{Actions.Short_exit.name}"] += 1
|
||||
return float(pnl * factor)
|
||||
|
||||
self.custom_info["Unknown"] += 1
|
||||
return 0.
|
||||
|
@@ -34,17 +34,25 @@ class ReinforcementLearner_multiproc(ReinforcementLearner):
|
||||
train_df = data_dictionary["train_features"]
|
||||
test_df = data_dictionary["test_features"]
|
||||
|
||||
if self.train_env:
|
||||
self.train_env.close()
|
||||
if self.eval_env:
|
||||
self.eval_env.close()
|
||||
|
||||
env_info = self.pack_env_dict(dk.pair)
|
||||
|
||||
env_id = "train_env"
|
||||
self.train_env = SubprocVecEnv([make_env(self.MyRLEnv, env_id, i, 1, train_df, prices_train,
|
||||
self.reward_params, self.CONV_WIDTH, monitor=True,
|
||||
config=self.config) for i
|
||||
self.train_env = SubprocVecEnv([make_env(self.MyRLEnv, env_id, i, 1,
|
||||
train_df, prices_train,
|
||||
monitor=True,
|
||||
env_info=env_info) for i
|
||||
in range(self.max_threads)])
|
||||
|
||||
eval_env_id = 'eval_env'
|
||||
self.eval_env = SubprocVecEnv([make_env(self.MyRLEnv, eval_env_id, i, 1,
|
||||
test_df, prices_test,
|
||||
self.reward_params, self.CONV_WIDTH, monitor=True,
|
||||
config=self.config) for i
|
||||
monitor=True,
|
||||
env_info=env_info) for i
|
||||
in range(self.max_threads)])
|
||||
self.eval_callback = EvalCallback(self.eval_env, deterministic=True,
|
||||
render=False, eval_freq=len(train_df),
|
||||
|
@@ -211,7 +211,7 @@ def record_params(config: Dict[str, Any], full_path: Path) -> None:
|
||||
"pairs": config.get('exchange', {}).get('pair_whitelist')
|
||||
}
|
||||
|
||||
with open(params_record_path, "w") as handle:
|
||||
with params_record_path.open("w") as handle:
|
||||
rapidjson.dump(
|
||||
run_params,
|
||||
handle,
|
||||
|
@@ -33,6 +33,7 @@ from freqtrade.rpc.external_message_consumer import ExternalMessageConsumer
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
from freqtrade.util import FtPrecise
|
||||
from freqtrade.util.binance_mig import migrate_binance_futures_names
|
||||
from freqtrade.wallets import Wallets
|
||||
|
||||
|
||||
@@ -126,19 +127,19 @@ class FreqtradeBot(LoggingMixin):
|
||||
for minutes in [0, 15, 30, 45]:
|
||||
t = str(time(time_slot, minutes, 2))
|
||||
self._schedule.every().day.at(t).do(update)
|
||||
self.last_process = datetime(1970, 1, 1, tzinfo=timezone.utc)
|
||||
self.last_process: Optional[datetime] = None
|
||||
|
||||
self.strategy.ft_bot_start()
|
||||
# Initialize protections AFTER bot start - otherwise parameters are not loaded.
|
||||
self.protections = ProtectionManager(self.config, self.strategy.protections)
|
||||
|
||||
def notify_status(self, msg: str) -> None:
|
||||
def notify_status(self, msg: str, msg_type=RPCMessageType.STATUS) -> None:
|
||||
"""
|
||||
Public method for users of this class (worker, etc.) to send notifications
|
||||
via RPC about changes in the bot status.
|
||||
"""
|
||||
self.rpc.send_msg({
|
||||
'type': RPCMessageType.STATUS,
|
||||
'type': msg_type,
|
||||
'status': msg
|
||||
})
|
||||
|
||||
@@ -155,6 +156,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.cancel_all_open_orders()
|
||||
|
||||
self.check_for_open_trades()
|
||||
except Exception as e:
|
||||
logger.warning(f'Exception during cleanup: {e.__class__.__name__} {e}')
|
||||
|
||||
finally:
|
||||
self.strategy.ft_bot_cleanup()
|
||||
@@ -162,14 +165,21 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.rpc.cleanup()
|
||||
if self.emc:
|
||||
self.emc.shutdown()
|
||||
Trade.commit()
|
||||
self.exchange.close()
|
||||
try:
|
||||
Trade.commit()
|
||||
except Exception:
|
||||
# Exeptions here will be happening if the db disappeared.
|
||||
# At which point we can no longer commit anyway.
|
||||
pass
|
||||
|
||||
def startup(self) -> None:
|
||||
"""
|
||||
Called on startup and after reloading the bot - triggers notifications and
|
||||
performs startup tasks
|
||||
"""
|
||||
migrate_binance_futures_names(self.config)
|
||||
|
||||
self.rpc.startup_messages(self.config, self.pairlists, self.protections)
|
||||
# Update older trades with precision and precision mode
|
||||
self.startup_backpopulate_precision()
|
||||
@@ -334,7 +344,15 @@ class FreqtradeBot(LoggingMixin):
|
||||
try:
|
||||
fo = self.exchange.fetch_order_or_stoploss_order(order.order_id, order.ft_pair,
|
||||
order.ft_order_side == 'stoploss')
|
||||
|
||||
if not order.trade:
|
||||
# This should not happen, but it does if trades were deleted manually.
|
||||
# This can only incur on sqlite, which doesn't enforce foreign constraints.
|
||||
logger.warning(
|
||||
f"Order {order.order_id} has no trade attached. "
|
||||
"This may suggest a database corruption. "
|
||||
f"The expected trade ID is {order.ft_trade_id}. Ignoring this order."
|
||||
)
|
||||
continue
|
||||
self.update_trade_state(order.trade, order.order_id, fo,
|
||||
stoploss_order=(order.ft_order_side == 'stoploss'))
|
||||
|
||||
@@ -345,7 +363,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
"Order is older than 5 days. Assuming order was fully cancelled.")
|
||||
fo = order.to_ccxt_object()
|
||||
fo['status'] = 'canceled'
|
||||
self.handle_timedout_order(fo, order.trade)
|
||||
self.handle_cancel_order(fo, order.trade, constants.CANCEL_REASON['TIMEOUT'])
|
||||
|
||||
except ExchangeError as e:
|
||||
|
||||
@@ -367,7 +385,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
for trade in trades:
|
||||
if not trade.is_open and not trade.fee_updated(trade.exit_side):
|
||||
# Get sell fee
|
||||
order = trade.select_order(trade.exit_side, False)
|
||||
order = trade.select_order(trade.exit_side, False, only_filled=True)
|
||||
if not order:
|
||||
order = trade.select_order('stoploss', False)
|
||||
if order:
|
||||
@@ -383,7 +401,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
for trade in trades:
|
||||
with self._exit_lock:
|
||||
if trade.is_open and not trade.fee_updated(trade.entry_side):
|
||||
order = trade.select_order(trade.entry_side, False)
|
||||
order = trade.select_order(trade.entry_side, False, only_filled=True)
|
||||
open_order = trade.select_order(trade.entry_side, True)
|
||||
if order and open_order is None:
|
||||
logger.info(
|
||||
@@ -568,7 +586,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
min_entry_stake = self.exchange.get_min_pair_stake_amount(trade.pair,
|
||||
current_entry_rate,
|
||||
self.strategy.stoploss)
|
||||
0.0)
|
||||
min_exit_stake = self.exchange.get_min_pair_stake_amount(trade.pair,
|
||||
current_exit_rate,
|
||||
self.strategy.stoploss)
|
||||
@@ -615,7 +633,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
return
|
||||
|
||||
remaining = (trade.amount - amount) * current_exit_rate
|
||||
if remaining < min_exit_stake:
|
||||
if min_exit_stake and remaining < min_exit_stake:
|
||||
logger.info(f"Remaining amount of {remaining} would be smaller "
|
||||
f"than the minimum of {min_exit_stake}.")
|
||||
return
|
||||
@@ -682,7 +700,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
pos_adjust = trade is not None
|
||||
|
||||
enter_limit_requested, stake_amount, leverage = self.get_valid_enter_price_and_stake(
|
||||
pair, price, stake_amount, trade_side, enter_tag, trade, order_adjust, leverage_)
|
||||
pair, price, stake_amount, trade_side, enter_tag, trade, order_adjust, leverage_,
|
||||
pos_adjust)
|
||||
|
||||
if not stake_amount:
|
||||
return False
|
||||
@@ -713,7 +732,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
time_in_force=time_in_force,
|
||||
leverage=leverage
|
||||
)
|
||||
order_obj = Order.parse_from_ccxt_object(order, pair, side)
|
||||
order_obj = Order.parse_from_ccxt_object(order, pair, side, amount, enter_limit_requested)
|
||||
order_id = order['id']
|
||||
order_status = order.get('status')
|
||||
logger.info(f"Order #{order_id} was created for {pair} and status is {order_status}.")
|
||||
@@ -740,13 +759,15 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.exchange.name, order['filled'], order['amount'],
|
||||
order['remaining']
|
||||
)
|
||||
amount = safe_value_fallback(order, 'filled', 'amount')
|
||||
enter_limit_filled_price = safe_value_fallback(order, 'average', 'price')
|
||||
amount = safe_value_fallback(order, 'filled', 'amount', amount)
|
||||
enter_limit_filled_price = safe_value_fallback(
|
||||
order, 'average', 'price', enter_limit_filled_price)
|
||||
|
||||
# in case of FOK the order may be filled immediately and fully
|
||||
elif order_status == 'closed':
|
||||
amount = safe_value_fallback(order, 'filled', 'amount')
|
||||
enter_limit_filled_price = safe_value_fallback(order, 'average', 'price')
|
||||
amount = safe_value_fallback(order, 'filled', 'amount', amount)
|
||||
enter_limit_filled_price = safe_value_fallback(
|
||||
order, 'average', 'price', enter_limit_requested)
|
||||
|
||||
# Fee is applied twice because we make a LIMIT_BUY and LIMIT_SELL
|
||||
fee = self.exchange.get_fee(symbol=pair, taker_or_maker='maker')
|
||||
@@ -798,7 +819,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
trade.orders.append(order_obj)
|
||||
trade.recalc_trade_from_orders()
|
||||
Trade.query.session.add(trade)
|
||||
Trade.session.add(trade)
|
||||
Trade.commit()
|
||||
|
||||
# Updating wallets
|
||||
@@ -821,7 +842,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
def cancel_stoploss_on_exchange(self, trade: Trade) -> Trade:
|
||||
# First cancelling stoploss on exchange ...
|
||||
if self.strategy.order_types.get('stoploss_on_exchange') and trade.stoploss_order_id:
|
||||
if trade.stoploss_order_id:
|
||||
try:
|
||||
logger.info(f"Canceling stoploss on exchange for {trade}")
|
||||
co = self.exchange.cancel_stoploss_order_with_result(
|
||||
@@ -840,7 +861,12 @@ class FreqtradeBot(LoggingMixin):
|
||||
trade: Optional[Trade],
|
||||
order_adjust: bool,
|
||||
leverage_: Optional[float],
|
||||
pos_adjust: bool,
|
||||
) -> Tuple[float, float, float]:
|
||||
"""
|
||||
Validate and eventually adjust (within limits) limit, amount and leverage
|
||||
:return: Tuple with (price, amount, leverage)
|
||||
"""
|
||||
|
||||
if price:
|
||||
enter_limit_requested = price
|
||||
@@ -886,7 +912,9 @@ class FreqtradeBot(LoggingMixin):
|
||||
# We do however also need min-stake to determine leverage, therefore this is ignored as
|
||||
# edge-case for now.
|
||||
min_stake_amount = self.exchange.get_min_pair_stake_amount(
|
||||
pair, enter_limit_requested, self.strategy.stoploss, leverage)
|
||||
pair, enter_limit_requested,
|
||||
self.strategy.stoploss if not pos_adjust else 0.0,
|
||||
leverage)
|
||||
max_stake_amount = self.exchange.get_max_pair_stake_amount(
|
||||
pair, enter_limit_requested, leverage)
|
||||
|
||||
@@ -905,6 +933,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
stake_amount=stake_amount,
|
||||
min_stake_amount=min_stake_amount,
|
||||
max_stake_amount=max_stake_amount,
|
||||
trade_amount=trade.stake_amount if trade else None,
|
||||
)
|
||||
|
||||
return enter_limit_requested, stake_amount, leverage
|
||||
@@ -1057,7 +1086,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
datetime.now(timezone.utc),
|
||||
enter=enter,
|
||||
exit_=exit_,
|
||||
force_stoploss=self.edge.stoploss(trade.pair) if self.edge else 0
|
||||
force_stoploss=self.edge.get_stoploss(trade.pair) if self.edge else 0
|
||||
)
|
||||
for should_exit in exits:
|
||||
if should_exit.exit_flag:
|
||||
@@ -1077,7 +1106,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
:return: True if the order succeeded, and False in case of problems.
|
||||
"""
|
||||
try:
|
||||
stoploss_order = self.exchange.stoploss(
|
||||
stoploss_order = self.exchange.create_stoploss(
|
||||
pair=trade.pair,
|
||||
amount=trade.amount,
|
||||
stop_price=stop_price,
|
||||
@@ -1086,7 +1115,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
leverage=trade.leverage
|
||||
)
|
||||
|
||||
order_obj = Order.parse_from_ccxt_object(stoploss_order, trade.pair, 'stoploss')
|
||||
order_obj = Order.parse_from_ccxt_object(stoploss_order, trade.pair, 'stoploss',
|
||||
trade.amount, stop_price)
|
||||
trade.orders.append(order_obj)
|
||||
trade.stoploss_order_id = str(stoploss_order['id'])
|
||||
trade.stoploss_last_update = datetime.now(timezone.utc)
|
||||
@@ -1100,8 +1130,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
trade.stoploss_order_id = None
|
||||
logger.error(f'Unable to place a stoploss order on exchange. {e}')
|
||||
logger.warning('Exiting the trade forcefully')
|
||||
self.execute_trade_exit(trade, stop_price, exit_check=ExitCheckTuple(
|
||||
exit_type=ExitType.EMERGENCY_EXIT))
|
||||
self.emergency_exit(trade, stop_price)
|
||||
|
||||
except ExchangeError:
|
||||
trade.stoploss_order_id = None
|
||||
@@ -1148,15 +1177,13 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
# If enter order is fulfilled but there is no stoploss, we add a stoploss on exchange
|
||||
if not stoploss_order:
|
||||
stoploss = (
|
||||
self.edge.stoploss(pair=trade.pair)
|
||||
if self.edge else
|
||||
trade.stop_loss_pct / trade.leverage
|
||||
)
|
||||
if trade.is_short:
|
||||
stop_price = trade.open_rate * (1 - stoploss)
|
||||
else:
|
||||
stop_price = trade.open_rate * (1 + stoploss)
|
||||
stop_price = trade.stoploss_or_liquidation
|
||||
if self.edge:
|
||||
stoploss = self.edge.get_stoploss(pair=trade.pair)
|
||||
stop_price = (
|
||||
trade.open_rate * (1 - stoploss) if trade.is_short
|
||||
else trade.open_rate * (1 + stoploss)
|
||||
)
|
||||
|
||||
if self.create_stoploss_order(trade=trade, stop_price=stop_price):
|
||||
# The above will return False if the placement failed and the trade was force-sold.
|
||||
@@ -1241,11 +1268,11 @@ class FreqtradeBot(LoggingMixin):
|
||||
if not_closed:
|
||||
if fully_cancelled or (order_obj and self.strategy.ft_check_timed_out(
|
||||
trade, order_obj, datetime.now(timezone.utc))):
|
||||
self.handle_timedout_order(order, trade)
|
||||
self.handle_cancel_order(order, trade, constants.CANCEL_REASON['TIMEOUT'])
|
||||
else:
|
||||
self.replace_order(order, order_obj, trade)
|
||||
|
||||
def handle_timedout_order(self, order: Dict, trade: Trade) -> None:
|
||||
def handle_cancel_order(self, order: Dict, trade: Trade, reason: str) -> None:
|
||||
"""
|
||||
Check if current analyzed order timed out and cancel if necessary.
|
||||
:param order: Order dict grabbed with exchange.fetch_order()
|
||||
@@ -1253,22 +1280,24 @@ class FreqtradeBot(LoggingMixin):
|
||||
:return: None
|
||||
"""
|
||||
if order['side'] == trade.entry_side:
|
||||
self.handle_cancel_enter(trade, order, constants.CANCEL_REASON['TIMEOUT'])
|
||||
self.handle_cancel_enter(trade, order, reason)
|
||||
else:
|
||||
canceled = self.handle_cancel_exit(
|
||||
trade, order, constants.CANCEL_REASON['TIMEOUT'])
|
||||
canceled = self.handle_cancel_exit(trade, order, reason)
|
||||
canceled_count = trade.get_exit_order_count()
|
||||
max_timeouts = self.config.get('unfilledtimeout', {}).get('exit_timeout_count', 0)
|
||||
if canceled and max_timeouts > 0 and canceled_count >= max_timeouts:
|
||||
logger.warning(f'Emergency exiting trade {trade}, as the exit order '
|
||||
f'timed out {max_timeouts} times.')
|
||||
try:
|
||||
self.execute_trade_exit(
|
||||
trade, order['price'],
|
||||
exit_check=ExitCheckTuple(exit_type=ExitType.EMERGENCY_EXIT))
|
||||
except DependencyException as exception:
|
||||
logger.warning(
|
||||
f'Unable to emergency sell trade {trade.pair}: {exception}')
|
||||
self.emergency_exit(trade, order['price'])
|
||||
|
||||
def emergency_exit(self, trade: Trade, price: float) -> None:
|
||||
try:
|
||||
self.execute_trade_exit(
|
||||
trade, price,
|
||||
exit_check=ExitCheckTuple(exit_type=ExitType.EMERGENCY_EXIT))
|
||||
except DependencyException as exception:
|
||||
logger.warning(
|
||||
f'Unable to emergency exit trade {trade.pair}: {exception}')
|
||||
|
||||
def replace_order(self, order: Dict, order_obj: Optional[Order], trade: Trade) -> None:
|
||||
"""
|
||||
@@ -1295,7 +1324,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
default_retval=order_obj.price)(
|
||||
trade=trade, order=order_obj, pair=trade.pair,
|
||||
current_time=datetime.now(timezone.utc), proposed_rate=proposed_rate,
|
||||
current_order_rate=order_obj.price, entry_tag=trade.enter_tag,
|
||||
current_order_rate=order_obj.safe_price, entry_tag=trade.enter_tag,
|
||||
side=trade.entry_side)
|
||||
|
||||
replacing = True
|
||||
@@ -1311,7 +1340,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
# place new order only if new price is supplied
|
||||
self.execute_entry(
|
||||
pair=trade.pair,
|
||||
stake_amount=(order_obj.remaining * order_obj.price / trade.leverage),
|
||||
stake_amount=(
|
||||
order_obj.safe_remaining * order_obj.safe_price / trade.leverage),
|
||||
price=adjusted_entry_price,
|
||||
trade=trade,
|
||||
is_short=trade.is_short,
|
||||
@@ -1325,6 +1355,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
"""
|
||||
|
||||
for trade in Trade.get_open_order_trades():
|
||||
if not trade.open_order_id:
|
||||
continue
|
||||
try:
|
||||
order = self.exchange.fetch_order(trade.open_order_id, trade.pair)
|
||||
except (ExchangeError):
|
||||
@@ -1349,6 +1381,9 @@ class FreqtradeBot(LoggingMixin):
|
||||
"""
|
||||
was_trade_fully_canceled = False
|
||||
side = trade.entry_side.capitalize()
|
||||
if not trade.open_order_id:
|
||||
logger.warning(f"No open order for {trade}.")
|
||||
return False
|
||||
|
||||
# Cancelled orders may have the status of 'canceled' or 'closed'
|
||||
if order['status'] not in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
@@ -1435,34 +1470,32 @@ class FreqtradeBot(LoggingMixin):
|
||||
return False
|
||||
|
||||
try:
|
||||
co = self.exchange.cancel_order_with_result(trade.open_order_id, trade.pair,
|
||||
trade.amount)
|
||||
order = self.exchange.cancel_order_with_result(order['id'], trade.pair,
|
||||
trade.amount)
|
||||
except InvalidOrderException:
|
||||
logger.exception(
|
||||
f"Could not cancel {trade.exit_side} order {trade.open_order_id}")
|
||||
return False
|
||||
trade.close_rate = None
|
||||
trade.close_rate_requested = None
|
||||
trade.close_profit = None
|
||||
trade.close_profit_abs = None
|
||||
|
||||
# Set exit_reason for fill message
|
||||
exit_reason_prev = trade.exit_reason
|
||||
trade.exit_reason = trade.exit_reason + f", {reason}" if trade.exit_reason else reason
|
||||
self.update_trade_state(trade, trade.open_order_id, co)
|
||||
# Order might be filled above in odd timing issues.
|
||||
if co.get('status') in ('canceled', 'cancelled'):
|
||||
if order.get('status') in ('canceled', 'cancelled'):
|
||||
trade.exit_reason = None
|
||||
trade.open_order_id = None
|
||||
else:
|
||||
trade.exit_reason = exit_reason_prev
|
||||
|
||||
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
|
||||
cancelled = True
|
||||
else:
|
||||
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
|
||||
self.update_trade_state(trade, trade.open_order_id, order)
|
||||
trade.open_order_id = None
|
||||
trade.exit_reason = None
|
||||
|
||||
self.update_trade_state(trade, trade.open_order_id, order)
|
||||
|
||||
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
|
||||
trade.open_order_id = None
|
||||
trade.close_rate = None
|
||||
trade.close_rate_requested = None
|
||||
|
||||
self._notify_exit_cancel(
|
||||
trade,
|
||||
@@ -1510,7 +1543,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
*,
|
||||
exit_tag: Optional[str] = None,
|
||||
ordertype: Optional[str] = None,
|
||||
sub_trade_amt: float = None,
|
||||
sub_trade_amt: Optional[float] = None,
|
||||
) -> bool:
|
||||
"""
|
||||
Executes a trade exit for the given trade and limit
|
||||
@@ -1587,7 +1620,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.handle_insufficient_funds(trade)
|
||||
return False
|
||||
|
||||
order_obj = Order.parse_from_ccxt_object(order, trade.pair, trade.exit_side)
|
||||
order_obj = Order.parse_from_ccxt_object(order, trade.pair, trade.exit_side, amount, limit)
|
||||
trade.orders.append(order_obj)
|
||||
|
||||
trade.open_order_id = order['id']
|
||||
@@ -1604,7 +1637,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
return True
|
||||
|
||||
def _notify_exit(self, trade: Trade, order_type: str, fill: bool = False,
|
||||
sub_trade: bool = False, order: Order = None) -> None:
|
||||
sub_trade: bool = False, order: Optional[Order] = None) -> None:
|
||||
"""
|
||||
Sends rpc notification when a sell occurred.
|
||||
"""
|
||||
@@ -1614,13 +1647,13 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
# second condition is for mypy only; order will always be passed during sub trade
|
||||
if sub_trade and order is not None:
|
||||
amount = order.safe_filled if fill else order.amount
|
||||
amount = order.safe_filled if fill else order.safe_amount
|
||||
order_rate: float = order.safe_price
|
||||
|
||||
profit = trade.calc_profit(rate=order_rate, amount=amount, open_rate=trade.open_rate)
|
||||
profit_ratio = trade.calc_profit_ratio(order_rate, amount, trade.open_rate)
|
||||
else:
|
||||
order_rate = trade.close_rate if trade.close_rate else trade.close_rate_requested
|
||||
order_rate = trade.safe_close_rate
|
||||
profit = trade.calc_profit(rate=order_rate) + (0.0 if fill else trade.realized_profit)
|
||||
profit_ratio = trade.calc_profit_ratio(order_rate)
|
||||
amount = trade.amount
|
||||
@@ -1675,7 +1708,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
raise DependencyException(
|
||||
f"Order_obj not found for {order_id}. This should not have happened.")
|
||||
|
||||
profit_rate = trade.close_rate if trade.close_rate else trade.close_rate_requested
|
||||
profit_rate: float = trade.safe_close_rate
|
||||
profit_trade = trade.calc_profit(rate=profit_rate)
|
||||
current_rate = self.exchange.get_rate(
|
||||
trade.pair, side='exit', is_short=trade.is_short, refresh=False)
|
||||
@@ -1717,8 +1750,10 @@ class FreqtradeBot(LoggingMixin):
|
||||
# Common update trade state methods
|
||||
#
|
||||
|
||||
def update_trade_state(self, trade: Trade, order_id: str, action_order: Dict[str, Any] = None,
|
||||
stoploss_order: bool = False, send_msg: bool = True) -> bool:
|
||||
def update_trade_state(
|
||||
self, trade: Trade, order_id: Optional[str],
|
||||
action_order: Optional[Dict[str, Any]] = None,
|
||||
stoploss_order: bool = False, send_msg: bool = True) -> bool:
|
||||
"""
|
||||
Checks trades with open orders and updates the amount if necessary
|
||||
Handles closing both buy and sell orders.
|
||||
@@ -1776,6 +1811,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
is_short=trade.is_short,
|
||||
amount=trade.amount,
|
||||
stake_amount=trade.stake_amount,
|
||||
leverage=trade.leverage,
|
||||
wallet_balance=trade.stake_amount,
|
||||
))
|
||||
|
||||
|
@@ -1,2 +1 @@
|
||||
# flake8: noqa: F401
|
||||
from freqtrade.leverage.interest import interest
|
||||
from freqtrade.leverage.interest import interest # noqa: F401
|
||||
|
@@ -103,9 +103,9 @@ def setup_logging(config: Config) -> None:
|
||||
logging.root.addHandler(handler_sl)
|
||||
elif s[0] == 'journald': # pragma: no cover
|
||||
try:
|
||||
from systemd.journal import JournaldLogHandler
|
||||
from cysystemd.journal import JournaldLogHandler
|
||||
except ImportError:
|
||||
raise OperationalException("You need the systemd python package be installed in "
|
||||
raise OperationalException("You need the cysystemd python package be installed in "
|
||||
"order to use logging to journald.")
|
||||
handler_jd = get_existing_handlers(JournaldLogHandler)
|
||||
if handler_jd:
|
||||
|
@@ -5,7 +5,7 @@ Read the documentation to know what cli arguments you need.
|
||||
"""
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, List
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from freqtrade.util.gc_setup import gc_set_threshold
|
||||
|
||||
@@ -23,7 +23,7 @@ from freqtrade.loggers import setup_logging_pre
|
||||
logger = logging.getLogger('freqtrade')
|
||||
|
||||
|
||||
def main(sysargv: List[str] = None) -> None:
|
||||
def main(sysargv: Optional[List[str]] = None) -> None:
|
||||
"""
|
||||
This function will initiate the bot and start the trading loop.
|
||||
:return: None
|
||||
|
@@ -6,8 +6,7 @@ import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterator, List, Mapping, Union
|
||||
from typing.io import IO
|
||||
from typing import Any, Dict, Iterator, List, Mapping, Optional, TextIO, Union
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import orjson
|
||||
@@ -81,7 +80,7 @@ def file_dump_json(filename: Path, data: Any, is_zip: bool = False, log: bool =
|
||||
else:
|
||||
if log:
|
||||
logger.info(f'dumping json to "{filename}"')
|
||||
with open(filename, 'w') as fp:
|
||||
with filename.open('w') as fp:
|
||||
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
logger.debug(f'done json to "{filename}"')
|
||||
@@ -98,12 +97,12 @@ def file_dump_joblib(filename: Path, data: Any, log: bool = True) -> None:
|
||||
|
||||
if log:
|
||||
logger.info(f'dumping joblib to "{filename}"')
|
||||
with open(filename, 'wb') as fp:
|
||||
with filename.open('wb') as fp:
|
||||
joblib.dump(data, fp)
|
||||
logger.debug(f'done joblib dump to "{filename}"')
|
||||
|
||||
|
||||
def json_load(datafile: IO) -> Any:
|
||||
def json_load(datafile: Union[gzip.GzipFile, TextIO]) -> Any:
|
||||
"""
|
||||
load data with rapidjson
|
||||
Use this to have a consistent experience,
|
||||
@@ -112,7 +111,7 @@ def json_load(datafile: IO) -> Any:
|
||||
return rapidjson.load(datafile, number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
|
||||
def file_load_json(file):
|
||||
def file_load_json(file: Path):
|
||||
|
||||
if file.suffix != ".gz":
|
||||
gzipfile = file.with_suffix(file.suffix + '.gz')
|
||||
@@ -125,7 +124,7 @@ def file_load_json(file):
|
||||
pairdata = json_load(datafile)
|
||||
elif file.is_file():
|
||||
logger.debug(f"Loading historical data from file {file}")
|
||||
with open(file) as datafile:
|
||||
with file.open() as datafile:
|
||||
pairdata = json_load(datafile)
|
||||
else:
|
||||
return None
|
||||
@@ -205,7 +204,7 @@ def safe_value_fallback2(dict1: dictMap, dict2: dictMap, key1: str, key2: str, d
|
||||
return default_value
|
||||
|
||||
|
||||
def plural(num: float, singular: str, plural: str = None) -> str:
|
||||
def plural(num: float, singular: str, plural: Optional[str] = None) -> str:
|
||||
return singular if (num == 1 or num == -1) else plural or singular + 's'
|
||||
|
||||
|
||||
@@ -269,6 +268,8 @@ def dataframe_to_json(dataframe: pd.DataFrame) -> str:
|
||||
def default(z):
|
||||
if isinstance(z, pd.Timestamp):
|
||||
return z.timestamp() * 1e3
|
||||
if z is pd.NaT:
|
||||
return 'NaT'
|
||||
raise TypeError
|
||||
|
||||
return str(orjson.dumps(dataframe.to_dict(orient='split'), default=default), 'utf-8')
|
||||
@@ -301,3 +302,21 @@ def remove_entry_exit_signals(dataframe: pd.DataFrame):
|
||||
dataframe[SignalTagType.EXIT_TAG.value] = None
|
||||
|
||||
return dataframe
|
||||
|
||||
|
||||
def append_candles_to_dataframe(left: pd.DataFrame, right: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
Append the `right` dataframe to the `left` dataframe
|
||||
|
||||
:param left: The full dataframe you want appended to
|
||||
:param right: The new dataframe containing the data you want appended
|
||||
:returns: The dataframe with the right data in it
|
||||
"""
|
||||
if left.iloc[-1]['date'] != right.iloc[-1]['date']:
|
||||
left = pd.concat([left, right])
|
||||
|
||||
# Only keep the last 1500 candles in memory
|
||||
left = left[-1500:] if len(left) > 1500 else left
|
||||
left.reset_index(drop=True, inplace=True)
|
||||
|
||||
return left
|
||||
|
@@ -1,2 +1 @@
|
||||
# flake8: noqa: F401
|
||||
from freqtrade.mixins.logging_mixin import LoggingMixin
|
||||
from freqtrade.mixins.logging_mixin import LoggingMixin # noqa: F401
|
||||
|
@@ -29,7 +29,7 @@ def get_strategy_run_id(strategy) -> str:
|
||||
# Include _ft_params_from_file - so changing parameter files cause cache eviction
|
||||
digest.update(rapidjson.dumps(
|
||||
strategy._ft_params_from_file, default=str, number_mode=rapidjson.NM_NAN).encode('utf-8'))
|
||||
with open(strategy.__file__, 'rb') as fp:
|
||||
with Path(strategy.__file__).open('rb') as fp:
|
||||
digest.update(fp.read())
|
||||
return digest.hexdigest().lower()
|
||||
|
||||
|
@@ -15,7 +15,7 @@ from pandas import DataFrame
|
||||
|
||||
from freqtrade import constants
|
||||
from freqtrade.configuration import TimeRange, validate_config_consistency
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config, LongShort
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config, IntOrInf, LongShort
|
||||
from freqtrade.data import history
|
||||
from freqtrade.data.btanalysis import find_existing_backtest_stats, trade_list_to_dataframe
|
||||
from freqtrade.data.converter import trim_dataframe, trim_dataframes
|
||||
@@ -37,6 +37,7 @@ from freqtrade.plugins.protectionmanager import ProtectionManager
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
from freqtrade.util.binance_mig import migrate_binance_futures_data
|
||||
from freqtrade.wallets import Wallets
|
||||
|
||||
|
||||
@@ -94,7 +95,7 @@ class Backtesting:
|
||||
if self.config.get('strategy_list'):
|
||||
if self.config.get('freqai', {}).get('enabled', False):
|
||||
logger.warning("Using --strategy-list with FreqAI REQUIRES all strategies "
|
||||
"to have identical populate_any_indicators.")
|
||||
"to have identical feature_engineering_* functions.")
|
||||
for strat in list(self.config['strategy_list']):
|
||||
stratconf = deepcopy(self.config)
|
||||
stratconf['strategy'] = strat
|
||||
@@ -159,6 +160,7 @@ class Backtesting:
|
||||
self._can_short = self.trading_mode != TradingMode.SPOT
|
||||
self._position_stacking: bool = self.config.get('position_stacking', False)
|
||||
self.enable_protections: bool = self.config.get('enable_protections', False)
|
||||
migrate_binance_futures_data(config)
|
||||
|
||||
self.init_backtest()
|
||||
|
||||
@@ -440,7 +442,8 @@ class Backtesting:
|
||||
side_1 * abs(self.strategy.trailing_stop_positive / leverage)))
|
||||
else:
|
||||
# Worst case: price ticks tiny bit above open and dives down.
|
||||
stop_rate = row[OPEN_IDX] * (1 - side_1 * abs(trade.stop_loss_pct / leverage))
|
||||
stop_rate = row[OPEN_IDX] * (1 - side_1 * abs(
|
||||
(trade.stop_loss_pct or 0.0) / leverage))
|
||||
if is_short:
|
||||
assert stop_rate > row[LOW_IDX]
|
||||
else:
|
||||
@@ -472,7 +475,7 @@ class Backtesting:
|
||||
# - (Expected abs profit - open_rate - open_fee) / (fee_close -1)
|
||||
roi_rate = trade.open_rate * roi / leverage
|
||||
open_fee_rate = side_1 * trade.open_rate * (1 + side_1 * trade.fee_open)
|
||||
close_rate = -(roi_rate + open_fee_rate) / (trade.fee_close - side_1 * 1)
|
||||
close_rate = -(roi_rate + open_fee_rate) / ((trade.fee_close or 0.0) - side_1 * 1)
|
||||
if is_short:
|
||||
is_new_roi = row[OPEN_IDX] < close_rate
|
||||
else:
|
||||
@@ -563,7 +566,7 @@ class Backtesting:
|
||||
pos_trade = self._get_exit_for_signal(trade, row, exit_, amount)
|
||||
if pos_trade is not None:
|
||||
order = pos_trade.orders[-1]
|
||||
if self._get_order_filled(order.price, row):
|
||||
if self._get_order_filled(order.ft_price, row):
|
||||
order.close_bt_order(current_date, trade)
|
||||
trade.recalc_trade_from_orders()
|
||||
self.wallets.update()
|
||||
@@ -575,26 +578,6 @@ class Backtesting:
|
||||
""" Rate is within candle, therefore filled"""
|
||||
return row[LOW_IDX] <= rate <= row[HIGH_IDX]
|
||||
|
||||
def _get_exit_trade_entry_for_candle(self, trade: LocalTrade,
|
||||
row: Tuple) -> Optional[LocalTrade]:
|
||||
|
||||
# Check if we need to adjust our current positions
|
||||
if self.strategy.position_adjustment_enable:
|
||||
trade = self._get_adjust_trade_entry_for_candle(trade, row)
|
||||
|
||||
enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX]
|
||||
exit_sig = row[ESHORT_IDX] if trade.is_short else row[ELONG_IDX]
|
||||
exits = self.strategy.should_exit(
|
||||
trade, row[OPEN_IDX], row[DATE_IDX].to_pydatetime(), # type: ignore
|
||||
enter=enter, exit_=exit_sig,
|
||||
low=row[LOW_IDX], high=row[HIGH_IDX]
|
||||
)
|
||||
for exit_ in exits:
|
||||
t = self._get_exit_for_signal(trade, row, exit_)
|
||||
if t:
|
||||
return t
|
||||
return None
|
||||
|
||||
def _get_exit_for_signal(
|
||||
self, trade: LocalTrade, row: Tuple, exit_: ExitCheckTuple,
|
||||
amount: Optional[float] = None) -> Optional[LocalTrade]:
|
||||
@@ -664,7 +647,7 @@ class Backtesting:
|
||||
return None
|
||||
|
||||
def _exit_trade(self, trade: LocalTrade, sell_row: Tuple,
|
||||
close_rate: float, amount: float = None) -> Optional[LocalTrade]:
|
||||
close_rate: float, amount: Optional[float] = None) -> Optional[LocalTrade]:
|
||||
self.order_id_counter += 1
|
||||
exit_candle_time = sell_row[DATE_IDX].to_pydatetime()
|
||||
order_type = self.strategy.order_types['exit']
|
||||
@@ -684,6 +667,7 @@ class Backtesting:
|
||||
side=trade.exit_side,
|
||||
order_type=order_type,
|
||||
status="open",
|
||||
ft_price=close_rate,
|
||||
price=close_rate,
|
||||
average=close_rate,
|
||||
amount=amount,
|
||||
@@ -694,11 +678,10 @@ class Backtesting:
|
||||
trade.orders.append(order)
|
||||
return trade
|
||||
|
||||
def _get_exit_trade_entry(
|
||||
self, trade: LocalTrade, row: Tuple, is_first: bool) -> Optional[LocalTrade]:
|
||||
def _check_trade_exit(self, trade: LocalTrade, row: Tuple) -> Optional[LocalTrade]:
|
||||
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
|
||||
if is_first and self.trading_mode == TradingMode.FUTURES:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
trade.funding_fees = self.exchange.calculate_funding_fees(
|
||||
self.futures_data[trade.pair],
|
||||
amount=trade.amount,
|
||||
@@ -707,7 +690,22 @@ class Backtesting:
|
||||
close_date=exit_candle_time,
|
||||
)
|
||||
|
||||
return self._get_exit_trade_entry_for_candle(trade, row)
|
||||
# Check if we need to adjust our current positions
|
||||
if self.strategy.position_adjustment_enable:
|
||||
trade = self._get_adjust_trade_entry_for_candle(trade, row)
|
||||
|
||||
enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX]
|
||||
exit_sig = row[ESHORT_IDX] if trade.is_short else row[ELONG_IDX]
|
||||
exits = self.strategy.should_exit(
|
||||
trade, row[OPEN_IDX], row[DATE_IDX].to_pydatetime(), # type: ignore
|
||||
enter=enter, exit_=exit_sig,
|
||||
low=row[LOW_IDX], high=row[HIGH_IDX]
|
||||
)
|
||||
for exit_ in exits:
|
||||
t = self._get_exit_for_signal(trade, row, exit_)
|
||||
if t:
|
||||
return t
|
||||
return None
|
||||
|
||||
def get_valid_price_and_stake(
|
||||
self, pair: str, row: Tuple, propose_rate: float, stake_amount: float,
|
||||
@@ -753,7 +751,7 @@ class Backtesting:
|
||||
leverage = min(max(leverage, 1.0), max_leverage)
|
||||
|
||||
min_stake_amount = self.exchange.get_min_pair_stake_amount(
|
||||
pair, propose_rate, -0.05, leverage=leverage) or 0
|
||||
pair, propose_rate, -0.05 if not pos_adjust else 0.0, leverage=leverage) or 0
|
||||
max_stake_amount = self.exchange.get_max_pair_stake_amount(
|
||||
pair, propose_rate, leverage=leverage)
|
||||
stake_available = self.wallets.get_available_stake_amount()
|
||||
@@ -771,6 +769,7 @@ class Backtesting:
|
||||
stake_amount=stake_amount,
|
||||
min_stake_amount=min_stake_amount,
|
||||
max_stake_amount=max_stake_amount,
|
||||
trade_amount=trade.stake_amount if trade else None
|
||||
)
|
||||
|
||||
return propose_rate, stake_amount_val, leverage, min_stake_amount
|
||||
@@ -780,6 +779,11 @@ class Backtesting:
|
||||
trade: Optional[LocalTrade] = None,
|
||||
requested_rate: Optional[float] = None,
|
||||
requested_stake: Optional[float] = None) -> Optional[LocalTrade]:
|
||||
"""
|
||||
:param trade: Trade to adjust - initial entry if None
|
||||
:param requested_rate: Adjusted entry rate
|
||||
:param requested_stake: Stake amount for adjusted orders (`adjust_entry_price`).
|
||||
"""
|
||||
|
||||
current_time = row[DATE_IDX].to_pydatetime()
|
||||
entry_tag = row[ENTER_TAG_IDX] if len(row) >= ENTER_TAG_IDX + 1 else None
|
||||
@@ -805,7 +809,7 @@ class Backtesting:
|
||||
return trade
|
||||
time_in_force = self.strategy.order_time_in_force['entry']
|
||||
|
||||
if stake_amount and (not min_stake_amount or stake_amount > min_stake_amount):
|
||||
if stake_amount and (not min_stake_amount or stake_amount >= min_stake_amount):
|
||||
self.order_id_counter += 1
|
||||
base_currency = self.exchange.get_pair_base_currency(pair)
|
||||
amount_p = (stake_amount / propose_rate) * leverage
|
||||
@@ -868,6 +872,7 @@ class Backtesting:
|
||||
open_rate=propose_rate,
|
||||
amount=amount,
|
||||
stake_amount=trade.stake_amount,
|
||||
leverage=trade.leverage,
|
||||
wallet_balance=trade.stake_amount,
|
||||
is_short=is_short,
|
||||
))
|
||||
@@ -886,6 +891,7 @@ class Backtesting:
|
||||
order_date=current_time,
|
||||
order_filled_date=current_time,
|
||||
order_update_date=current_time,
|
||||
ft_price=propose_rate,
|
||||
price=propose_rate,
|
||||
average=propose_rate,
|
||||
amount=amount,
|
||||
@@ -894,7 +900,7 @@ class Backtesting:
|
||||
cost=stake_amount + trade.fee_open,
|
||||
)
|
||||
trade.orders.append(order)
|
||||
if pos_adjust and self._get_order_filled(order.price, row):
|
||||
if pos_adjust and self._get_order_filled(order.ft_price, row):
|
||||
order.close_bt_order(current_time, trade)
|
||||
else:
|
||||
trade.open_order_id = str(self.order_id_counter)
|
||||
@@ -921,8 +927,9 @@ class Backtesting:
|
||||
trade.close(exit_row[OPEN_IDX], show_msg=False)
|
||||
LocalTrade.close_bt_trade(trade)
|
||||
|
||||
def trade_slot_available(self, max_open_trades: int, open_trade_count: int) -> bool:
|
||||
def trade_slot_available(self, open_trade_count: int) -> bool:
|
||||
# Always allow trades when max_open_trades is enabled.
|
||||
max_open_trades: IntOrInf = self.config['max_open_trades']
|
||||
if max_open_trades <= 0 or open_trade_count < max_open_trades:
|
||||
return True
|
||||
# Rejected trade
|
||||
@@ -1006,15 +1013,15 @@ class Backtesting:
|
||||
# only check on new candles for open entry orders
|
||||
if order.side == trade.entry_side and current_time > order.order_date_utc:
|
||||
requested_rate = strategy_safe_wrapper(self.strategy.adjust_entry_price,
|
||||
default_retval=order.price)(
|
||||
default_retval=order.ft_price)(
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
order=order, pair=trade.pair, current_time=current_time,
|
||||
proposed_rate=row[OPEN_IDX], current_order_rate=order.price,
|
||||
proposed_rate=row[OPEN_IDX], current_order_rate=order.ft_price,
|
||||
entry_tag=trade.enter_tag, side=trade.trade_direction
|
||||
) # default value is current order price
|
||||
|
||||
# cancel existing order whenever a new rate is requested (or None)
|
||||
if requested_rate == order.price:
|
||||
if requested_rate == order.ft_price:
|
||||
# assumption: there can't be multiple open entry orders at any given time
|
||||
return False
|
||||
else:
|
||||
@@ -1026,7 +1033,8 @@ class Backtesting:
|
||||
if requested_rate:
|
||||
self._enter_trade(pair=trade.pair, row=row, trade=trade,
|
||||
requested_rate=requested_rate,
|
||||
requested_stake=(order.remaining * order.price / trade.leverage),
|
||||
requested_stake=(
|
||||
order.safe_remaining * order.ft_price / trade.leverage),
|
||||
direction='short' if trade.is_short else 'long')
|
||||
self.replaced_entry_orders += 1
|
||||
else:
|
||||
@@ -1064,7 +1072,8 @@ class Backtesting:
|
||||
|
||||
def backtest_loop(
|
||||
self, row: Tuple, pair: str, current_time: datetime, end_date: datetime,
|
||||
max_open_trades: int, open_trade_count_start: int, is_first: bool = True) -> int:
|
||||
open_trade_count_start: int, trade_dir: Optional[LongShort],
|
||||
is_first: bool = True) -> int:
|
||||
"""
|
||||
NOTE: This method is used by Hyperopt at each iteration. Please keep it optimized.
|
||||
|
||||
@@ -1083,7 +1092,6 @@ class Backtesting:
|
||||
# max_open_trades must be respected
|
||||
# don't open on the last row
|
||||
# We only open trades on the main candle, not on detail candles
|
||||
trade_dir = self.check_for_trade_entry(row)
|
||||
if (
|
||||
(self._position_stacking or len(LocalTrade.bt_trades_open_pp[pair]) == 0)
|
||||
and is_first
|
||||
@@ -1091,7 +1099,7 @@ class Backtesting:
|
||||
and trade_dir is not None
|
||||
and not PairLocks.is_pair_locked(pair, row[DATE_IDX], trade_dir)
|
||||
):
|
||||
if (self.trade_slot_available(max_open_trades, open_trade_count_start)):
|
||||
if (self.trade_slot_available(open_trade_count_start)):
|
||||
trade = self._enter_trade(pair, row, trade_dir)
|
||||
if trade:
|
||||
# TODO: hacky workaround to avoid opening > max_open_trades
|
||||
@@ -1107,18 +1115,18 @@ class Backtesting:
|
||||
for trade in list(LocalTrade.bt_trades_open_pp[pair]):
|
||||
# 3. Process entry orders.
|
||||
order = trade.select_order(trade.entry_side, is_open=True)
|
||||
if order and self._get_order_filled(order.price, row):
|
||||
if order and self._get_order_filled(order.ft_price, row):
|
||||
order.close_bt_order(current_time, trade)
|
||||
trade.open_order_id = None
|
||||
self.wallets.update()
|
||||
|
||||
# 4. Create exit orders (if any)
|
||||
if not trade.open_order_id:
|
||||
self._get_exit_trade_entry(trade, row, is_first) # Place exit order if necessary
|
||||
self._check_trade_exit(trade, row) # Place exit order if necessary
|
||||
|
||||
# 5. Process exit orders.
|
||||
order = trade.select_order(trade.exit_side, is_open=True)
|
||||
if order and self._get_order_filled(order.price, row):
|
||||
if order and self._get_order_filled(order.ft_price, row):
|
||||
order.close_bt_order(current_time, trade)
|
||||
trade.open_order_id = None
|
||||
sub_trade = order.safe_amount_after_fee != trade.amount
|
||||
@@ -1127,7 +1135,7 @@ class Backtesting:
|
||||
trade.recalc_trade_from_orders()
|
||||
else:
|
||||
trade.close_date = current_time
|
||||
trade.close(order.price, show_msg=False)
|
||||
trade.close(order.ft_price, show_msg=False)
|
||||
|
||||
# logger.debug(f"{pair} - Backtesting exit {trade}")
|
||||
LocalTrade.close_bt_trade(trade)
|
||||
@@ -1136,8 +1144,7 @@ class Backtesting:
|
||||
return open_trade_count_start
|
||||
|
||||
def backtest(self, processed: Dict,
|
||||
start_date: datetime, end_date: datetime,
|
||||
max_open_trades: int = 0) -> Dict[str, Any]:
|
||||
start_date: datetime, end_date: datetime) -> Dict[str, Any]:
|
||||
"""
|
||||
Implement backtesting functionality
|
||||
|
||||
@@ -1149,7 +1156,6 @@ class Backtesting:
|
||||
optimize memory usage!
|
||||
:param start_date: backtesting timerange start datetime
|
||||
:param end_date: backtesting timerange end datetime
|
||||
:param max_open_trades: maximum number of concurrent trades, <= 0 means unlimited
|
||||
:return: DataFrame with trades (results of backtesting)
|
||||
"""
|
||||
self.prepare_backtest(self.enable_protections)
|
||||
@@ -1179,7 +1185,15 @@ class Backtesting:
|
||||
indexes[pair] = row_index
|
||||
self.dataprovider._set_dataframe_max_index(row_index)
|
||||
current_detail_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
if self.timeframe_detail and pair in self.detail_data:
|
||||
trade_dir: Optional[LongShort] = self.check_for_trade_entry(row)
|
||||
|
||||
if (
|
||||
(trade_dir is not None or len(LocalTrade.bt_trades_open_pp[pair]) > 0)
|
||||
and self.timeframe_detail and pair in self.detail_data
|
||||
):
|
||||
# Spread out into detail timeframe.
|
||||
# Should only happen when we are either in a trade for this pair
|
||||
# or when we got the signal for a new trade.
|
||||
exit_candle_end = current_detail_time + timedelta(minutes=self.timeframe_min)
|
||||
|
||||
detail_data = self.detail_data[pair]
|
||||
@@ -1190,8 +1204,9 @@ class Backtesting:
|
||||
if len(detail_data) == 0:
|
||||
# Fall back to "regular" data if no detail data was found for this candle
|
||||
open_trade_count_start = self.backtest_loop(
|
||||
row, pair, current_time, end_date, max_open_trades,
|
||||
open_trade_count_start)
|
||||
row, pair, current_time, end_date,
|
||||
open_trade_count_start, trade_dir)
|
||||
continue
|
||||
detail_data.loc[:, 'enter_long'] = row[LONG_IDX]
|
||||
detail_data.loc[:, 'exit_long'] = row[ELONG_IDX]
|
||||
detail_data.loc[:, 'enter_short'] = row[SHORT_IDX]
|
||||
@@ -1202,13 +1217,14 @@ class Backtesting:
|
||||
current_time_det = current_time
|
||||
for det_row in detail_data[HEADERS].values.tolist():
|
||||
open_trade_count_start = self.backtest_loop(
|
||||
det_row, pair, current_time_det, end_date, max_open_trades,
|
||||
open_trade_count_start, is_first)
|
||||
det_row, pair, current_time_det, end_date,
|
||||
open_trade_count_start, trade_dir, is_first)
|
||||
current_time_det += timedelta(minutes=self.timeframe_detail_min)
|
||||
is_first = False
|
||||
else:
|
||||
open_trade_count_start = self.backtest_loop(
|
||||
row, pair, current_time, end_date, max_open_trades, open_trade_count_start)
|
||||
row, pair, current_time, end_date,
|
||||
open_trade_count_start, trade_dir)
|
||||
|
||||
# Move time one configured time_interval ahead.
|
||||
self.progress.increment()
|
||||
@@ -1240,13 +1256,11 @@ class Backtesting:
|
||||
self._set_strategy(strat)
|
||||
|
||||
# Use max_open_trades in backtesting, except --disable-max-market-positions is set
|
||||
if self.config.get('use_max_market_positions', True):
|
||||
# Must come from strategy config, as the strategy may modify this setting.
|
||||
max_open_trades = self.strategy.config['max_open_trades']
|
||||
else:
|
||||
if not self.config.get('use_max_market_positions', True):
|
||||
logger.info(
|
||||
'Ignoring max_open_trades (--disable-max-market-positions was used) ...')
|
||||
max_open_trades = 0
|
||||
self.strategy.max_open_trades = float('inf')
|
||||
self.config.update({'max_open_trades': self.strategy.max_open_trades})
|
||||
|
||||
# need to reprocess data every time to populate signals
|
||||
preprocessed = self.strategy.advise_all_indicators(data)
|
||||
@@ -1269,7 +1283,6 @@ class Backtesting:
|
||||
processed=preprocessed,
|
||||
start_date=min_date,
|
||||
end_date=max_date,
|
||||
max_open_trades=max_open_trades,
|
||||
)
|
||||
backtest_end_time = datetime.now(timezone.utc)
|
||||
results.update({
|
||||
|
@@ -74,6 +74,7 @@ class Hyperopt:
|
||||
self.roi_space: List[Dimension] = []
|
||||
self.stoploss_space: List[Dimension] = []
|
||||
self.trailing_space: List[Dimension] = []
|
||||
self.max_open_trades_space: List[Dimension] = []
|
||||
self.dimensions: List[Dimension] = []
|
||||
|
||||
self.config = config
|
||||
@@ -117,11 +118,10 @@ class Hyperopt:
|
||||
self.current_best_epoch: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Use max_open_trades for hyperopt as well, except --disable-max-market-positions is set
|
||||
if self.config.get('use_max_market_positions', True):
|
||||
self.max_open_trades = self.config['max_open_trades']
|
||||
else:
|
||||
if not self.config.get('use_max_market_positions', True):
|
||||
logger.debug('Ignoring max_open_trades (--disable-max-market-positions was used) ...')
|
||||
self.max_open_trades = 0
|
||||
self.backtesting.strategy.max_open_trades = float('inf')
|
||||
config.update({'max_open_trades': self.backtesting.strategy.max_open_trades})
|
||||
|
||||
if HyperoptTools.has_space(self.config, 'sell'):
|
||||
# Make sure use_exit_signal is enabled
|
||||
@@ -209,6 +209,10 @@ class Hyperopt:
|
||||
result['stoploss'] = {p.name: params.get(p.name) for p in self.stoploss_space}
|
||||
if HyperoptTools.has_space(self.config, 'trailing'):
|
||||
result['trailing'] = self.custom_hyperopt.generate_trailing_params(params)
|
||||
if HyperoptTools.has_space(self.config, 'trades'):
|
||||
result['max_open_trades'] = {
|
||||
'max_open_trades': self.backtesting.strategy.max_open_trades
|
||||
if self.backtesting.strategy.max_open_trades != float('inf') else -1}
|
||||
|
||||
return result
|
||||
|
||||
@@ -229,6 +233,8 @@ class Hyperopt:
|
||||
'trailing_stop_positive_offset': strategy.trailing_stop_positive_offset,
|
||||
'trailing_only_offset_is_reached': strategy.trailing_only_offset_is_reached,
|
||||
}
|
||||
if not HyperoptTools.has_space(self.config, 'trades'):
|
||||
result['max_open_trades'] = {'max_open_trades': strategy.max_open_trades}
|
||||
return result
|
||||
|
||||
def print_results(self, results) -> None:
|
||||
@@ -280,8 +286,13 @@ class Hyperopt:
|
||||
logger.debug("Hyperopt has 'trailing' space")
|
||||
self.trailing_space = self.custom_hyperopt.trailing_space()
|
||||
|
||||
if HyperoptTools.has_space(self.config, 'trades'):
|
||||
logger.debug("Hyperopt has 'trades' space")
|
||||
self.max_open_trades_space = self.custom_hyperopt.max_open_trades_space()
|
||||
|
||||
self.dimensions = (self.buy_space + self.sell_space + self.protection_space
|
||||
+ self.roi_space + self.stoploss_space + self.trailing_space)
|
||||
+ self.roi_space + self.stoploss_space + self.trailing_space
|
||||
+ self.max_open_trades_space)
|
||||
|
||||
def assign_params(self, params_dict: Dict, category: str) -> None:
|
||||
"""
|
||||
@@ -328,6 +339,20 @@ class Hyperopt:
|
||||
self.backtesting.strategy.trailing_only_offset_is_reached = \
|
||||
d['trailing_only_offset_is_reached']
|
||||
|
||||
if HyperoptTools.has_space(self.config, 'trades'):
|
||||
if self.config["stake_amount"] == "unlimited" and \
|
||||
(params_dict['max_open_trades'] == -1 or params_dict['max_open_trades'] == 0):
|
||||
# Ignore unlimited max open trades if stake amount is unlimited
|
||||
params_dict.update({'max_open_trades': self.config['max_open_trades']})
|
||||
|
||||
updated_max_open_trades = int(params_dict['max_open_trades']) \
|
||||
if (params_dict['max_open_trades'] != -1
|
||||
and params_dict['max_open_trades'] != 0) else float('inf')
|
||||
|
||||
self.config.update({'max_open_trades': updated_max_open_trades})
|
||||
|
||||
self.backtesting.strategy.max_open_trades = updated_max_open_trades
|
||||
|
||||
with self.data_pickle_file.open('rb') as f:
|
||||
processed = load(f, mmap_mode='r')
|
||||
if self.analyze_per_epoch:
|
||||
@@ -337,8 +362,7 @@ class Hyperopt:
|
||||
bt_results = self.backtesting.backtest(
|
||||
processed=processed,
|
||||
start_date=self.min_date,
|
||||
end_date=self.max_date,
|
||||
max_open_trades=self.max_open_trades,
|
||||
end_date=self.max_date
|
||||
)
|
||||
backtest_end_time = datetime.now(timezone.utc)
|
||||
bt_results.update({
|
||||
|
@@ -91,5 +91,8 @@ class HyperOptAuto(IHyperOpt):
|
||||
def trailing_space(self) -> List['Dimension']:
|
||||
return self._get_func('trailing_space')()
|
||||
|
||||
def max_open_trades_space(self) -> List['Dimension']:
|
||||
return self._get_func('max_open_trades_space')()
|
||||
|
||||
def generate_estimator(self, dimensions: List['Dimension'], **kwargs) -> EstimatorType:
|
||||
return self._get_func('generate_estimator')(dimensions=dimensions, **kwargs)
|
||||
|
@@ -191,6 +191,16 @@ class IHyperOpt(ABC):
|
||||
Categorical([True, False], name='trailing_only_offset_is_reached'),
|
||||
]
|
||||
|
||||
def max_open_trades_space(self) -> List[Dimension]:
|
||||
"""
|
||||
Create a max open trades space.
|
||||
|
||||
You may override it in your custom Hyperopt class.
|
||||
"""
|
||||
return [
|
||||
Integer(-1, 10, name='max_open_trades'),
|
||||
]
|
||||
|
||||
# This is needed for proper unpickling the class attribute timeframe
|
||||
# which is set to the actual value by the resolver.
|
||||
# Why do I still need such shamanic mantras in modern python?
|
||||
|
@@ -5,13 +5,11 @@ This module defines the alternative HyperOptLoss class which can be used for
|
||||
Hyperoptimization.
|
||||
"""
|
||||
from datetime import datetime
|
||||
from math import sqrt as msqrt
|
||||
from typing import Any, Dict
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.data.metrics import calculate_calmar
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
@@ -23,42 +21,15 @@ class CalmarHyperOptLoss(IHyperOptLoss):
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def hyperopt_loss_function(
|
||||
results: DataFrame,
|
||||
trade_count: int,
|
||||
min_date: datetime,
|
||||
max_date: datetime,
|
||||
config: Config,
|
||||
processed: Dict[str, DataFrame],
|
||||
backtest_stats: Dict[str, Any],
|
||||
*args,
|
||||
**kwargs
|
||||
) -> float:
|
||||
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
||||
min_date: datetime, max_date: datetime,
|
||||
config: Config, *args, **kwargs) -> float:
|
||||
"""
|
||||
Objective function, returns smaller number for more optimal results.
|
||||
|
||||
Uses Calmar Ratio calculation.
|
||||
"""
|
||||
total_profit = backtest_stats["profit_total"]
|
||||
days_period = (max_date - min_date).days
|
||||
|
||||
# adding slippage of 0.1% per trade
|
||||
total_profit = total_profit - 0.0005
|
||||
expected_returns_mean = total_profit.sum() / days_period * 100
|
||||
|
||||
# calculate max drawdown
|
||||
try:
|
||||
_, _, _, _, _, max_drawdown = calculate_max_drawdown(
|
||||
results, value_col="profit_abs"
|
||||
)
|
||||
except ValueError:
|
||||
max_drawdown = 0
|
||||
|
||||
if max_drawdown != 0:
|
||||
calmar_ratio = expected_returns_mean / max_drawdown * msqrt(365)
|
||||
else:
|
||||
# Define high (negative) calmar ratio to be clear that this is NOT optimal.
|
||||
calmar_ratio = -20.0
|
||||
|
||||
starting_balance = config['dry_run_wallet']
|
||||
calmar_ratio = calculate_calmar(results, min_date, max_date, starting_balance)
|
||||
# print(expected_returns_mean, max_drawdown, calmar_ratio)
|
||||
return -calmar_ratio
|
||||
|
@@ -6,9 +6,10 @@ Hyperoptimization.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
import numpy as np
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.data.metrics import calculate_sharpe
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
@@ -22,25 +23,13 @@ class SharpeHyperOptLoss(IHyperOptLoss):
|
||||
@staticmethod
|
||||
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
||||
min_date: datetime, max_date: datetime,
|
||||
*args, **kwargs) -> float:
|
||||
config: Config, *args, **kwargs) -> float:
|
||||
"""
|
||||
Objective function, returns smaller number for more optimal results.
|
||||
|
||||
Uses Sharpe Ratio calculation.
|
||||
"""
|
||||
total_profit = results["profit_ratio"]
|
||||
days_period = (max_date - min_date).days
|
||||
|
||||
# adding slippage of 0.1% per trade
|
||||
total_profit = total_profit - 0.0005
|
||||
expected_returns_mean = total_profit.sum() / days_period
|
||||
up_stdev = np.std(total_profit)
|
||||
|
||||
if up_stdev != 0:
|
||||
sharp_ratio = expected_returns_mean / up_stdev * np.sqrt(365)
|
||||
else:
|
||||
# Define high (negative) sharpe ratio to be clear that this is NOT optimal.
|
||||
sharp_ratio = -20.
|
||||
|
||||
starting_balance = config['dry_run_wallet']
|
||||
sharp_ratio = calculate_sharpe(results, min_date, max_date, starting_balance)
|
||||
# print(expected_returns_mean, up_stdev, sharp_ratio)
|
||||
return -sharp_ratio
|
||||
|
@@ -44,7 +44,7 @@ class SharpeHyperOptLossDaily(IHyperOptLoss):
|
||||
|
||||
sum_daily = (
|
||||
results.resample(resample_freq, on='close_date').agg(
|
||||
{"profit_ratio_after_slippage": sum}).reindex(t_index).fillna(0)
|
||||
{"profit_ratio_after_slippage": 'sum'}).reindex(t_index).fillna(0)
|
||||
)
|
||||
|
||||
total_profit = sum_daily["profit_ratio_after_slippage"] - risk_free_rate
|
||||
|
@@ -6,9 +6,10 @@ Hyperoptimization.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
import numpy as np
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.data.metrics import calculate_sortino
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
@@ -22,28 +23,13 @@ class SortinoHyperOptLoss(IHyperOptLoss):
|
||||
@staticmethod
|
||||
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
||||
min_date: datetime, max_date: datetime,
|
||||
*args, **kwargs) -> float:
|
||||
config: Config, *args, **kwargs) -> float:
|
||||
"""
|
||||
Objective function, returns smaller number for more optimal results.
|
||||
|
||||
Uses Sortino Ratio calculation.
|
||||
"""
|
||||
total_profit = results["profit_ratio"]
|
||||
days_period = (max_date - min_date).days
|
||||
|
||||
# adding slippage of 0.1% per trade
|
||||
total_profit = total_profit - 0.0005
|
||||
expected_returns_mean = total_profit.sum() / days_period
|
||||
|
||||
results['downside_returns'] = 0
|
||||
results.loc[total_profit < 0, 'downside_returns'] = results['profit_ratio']
|
||||
down_stdev = np.std(results['downside_returns'])
|
||||
|
||||
if down_stdev != 0:
|
||||
sortino_ratio = expected_returns_mean / down_stdev * np.sqrt(365)
|
||||
else:
|
||||
# Define high (negative) sortino ratio to be clear that this is NOT optimal.
|
||||
sortino_ratio = -20.
|
||||
|
||||
starting_balance = config['dry_run_wallet']
|
||||
sortino_ratio = calculate_sortino(results, min_date, max_date, starting_balance)
|
||||
# print(expected_returns_mean, down_stdev, sortino_ratio)
|
||||
return -sortino_ratio
|
||||
|
@@ -46,7 +46,7 @@ class SortinoHyperOptLossDaily(IHyperOptLoss):
|
||||
|
||||
sum_daily = (
|
||||
results.resample(resample_freq, on='close_date').agg(
|
||||
{"profit_ratio_after_slippage": sum}).reindex(t_index).fillna(0)
|
||||
{"profit_ratio_after_slippage": 'sum'}).reindex(t_index).fillna(0)
|
||||
)
|
||||
|
||||
total_profit = sum_daily["profit_ratio_after_slippage"] - minimum_acceptable_return
|
||||
|
15
freqtrade/optimize/hyperopt_tools.py
Executable file → Normal file
15
freqtrade/optimize/hyperopt_tools.py
Executable file → Normal file
@@ -96,7 +96,7 @@ class HyperoptTools():
|
||||
Tell if the space value is contained in the configuration
|
||||
"""
|
||||
# 'trailing' and 'protection spaces are not included in the 'default' set of spaces
|
||||
if space in ('trailing', 'protection'):
|
||||
if space in ('trailing', 'protection', 'trades'):
|
||||
return any(s in config['spaces'] for s in [space, 'all'])
|
||||
else:
|
||||
return any(s in config['spaces'] for s in [space, 'all', 'default'])
|
||||
@@ -170,7 +170,7 @@ class HyperoptTools():
|
||||
|
||||
@staticmethod
|
||||
def show_epoch_details(results, total_epochs: int, print_json: bool,
|
||||
no_header: bool = False, header_str: str = None) -> None:
|
||||
no_header: bool = False, header_str: Optional[str] = None) -> None:
|
||||
"""
|
||||
Display details of the hyperopt result
|
||||
"""
|
||||
@@ -187,7 +187,8 @@ class HyperoptTools():
|
||||
|
||||
if print_json:
|
||||
result_dict: Dict = {}
|
||||
for s in ['buy', 'sell', 'protection', 'roi', 'stoploss', 'trailing']:
|
||||
for s in ['buy', 'sell', 'protection',
|
||||
'roi', 'stoploss', 'trailing', 'max_open_trades']:
|
||||
HyperoptTools._params_update_for_json(result_dict, params, non_optimized, s)
|
||||
print(rapidjson.dumps(result_dict, default=str, number_mode=rapidjson.NM_NATIVE))
|
||||
|
||||
@@ -201,6 +202,8 @@ class HyperoptTools():
|
||||
HyperoptTools._params_pretty_print(params, 'roi', "ROI table:", non_optimized)
|
||||
HyperoptTools._params_pretty_print(params, 'stoploss', "Stoploss:", non_optimized)
|
||||
HyperoptTools._params_pretty_print(params, 'trailing', "Trailing stop:", non_optimized)
|
||||
HyperoptTools._params_pretty_print(
|
||||
params, 'max_open_trades', "Max Open Trades:", non_optimized)
|
||||
|
||||
@staticmethod
|
||||
def _params_update_for_json(result_dict, params, non_optimized, space: str) -> None:
|
||||
@@ -239,7 +242,9 @@ class HyperoptTools():
|
||||
if space == "stoploss":
|
||||
stoploss = safe_value_fallback2(space_params, no_params, space, space)
|
||||
result += (f"stoploss = {stoploss}{appendix}")
|
||||
|
||||
elif space == "max_open_trades":
|
||||
max_open_trades = safe_value_fallback2(space_params, no_params, space, space)
|
||||
result += (f"max_open_trades = {max_open_trades}{appendix}")
|
||||
elif space == "roi":
|
||||
result = result[:-1] + f'{appendix}\n'
|
||||
minimal_roi_result = rapidjson.dumps({
|
||||
@@ -259,7 +264,7 @@ class HyperoptTools():
|
||||
print(result)
|
||||
|
||||
@staticmethod
|
||||
def _space_params(params, space: str, r: int = None) -> Dict:
|
||||
def _space_params(params, space: str, r: Optional[int] = None) -> Dict:
|
||||
d = params.get(space)
|
||||
if d:
|
||||
# Round floats to `r` digits after the decimal point if requested
|
||||
|
@@ -8,9 +8,10 @@ from pandas import DataFrame, to_datetime
|
||||
from tabulate import tabulate
|
||||
|
||||
from freqtrade.constants import (DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN, UNLIMITED_STAKE_AMOUNT,
|
||||
Config)
|
||||
from freqtrade.data.metrics import (calculate_cagr, calculate_csum, calculate_market_change,
|
||||
calculate_max_drawdown)
|
||||
Config, IntOrInf)
|
||||
from freqtrade.data.metrics import (calculate_cagr, calculate_calmar, calculate_csum,
|
||||
calculate_expectancy, calculate_market_change,
|
||||
calculate_max_drawdown, calculate_sharpe, calculate_sortino)
|
||||
from freqtrade.misc import decimals_per_coin, file_dump_joblib, file_dump_json, round_coin_value
|
||||
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
||||
|
||||
@@ -199,7 +200,7 @@ def generate_tag_metrics(tag_type: str,
|
||||
return []
|
||||
|
||||
|
||||
def generate_exit_reason_stats(max_open_trades: int, results: DataFrame) -> List[Dict]:
|
||||
def generate_exit_reason_stats(max_open_trades: IntOrInf, results: DataFrame) -> List[Dict]:
|
||||
"""
|
||||
Generate small table outlining Backtest results
|
||||
:param max_open_trades: Max_open_trades parameter
|
||||
@@ -457,6 +458,10 @@ def generate_strategy_stats(pairlist: List[str],
|
||||
'profit_total_long_abs': results.loc[~results['is_short'], 'profit_abs'].sum(),
|
||||
'profit_total_short_abs': results.loc[results['is_short'], 'profit_abs'].sum(),
|
||||
'cagr': calculate_cagr(backtest_days, start_balance, content['final_balance']),
|
||||
'expectancy': calculate_expectancy(results),
|
||||
'sortino': calculate_sortino(results, min_date, max_date, start_balance),
|
||||
'sharpe': calculate_sharpe(results, min_date, max_date, start_balance),
|
||||
'calmar': calculate_calmar(results, min_date, max_date, start_balance),
|
||||
'profit_factor': profit_factor,
|
||||
'backtest_start': min_date.strftime(DATETIME_PRINT_FORMAT),
|
||||
'backtest_start_ts': int(min_date.timestamp() * 1000),
|
||||
@@ -794,8 +799,13 @@ def text_table_add_metrics(strat_results: Dict) -> str:
|
||||
strat_results['stake_currency'])),
|
||||
('Total profit %', f"{strat_results['profit_total']:.2%}"),
|
||||
('CAGR %', f"{strat_results['cagr']:.2%}" if 'cagr' in strat_results else 'N/A'),
|
||||
('Sortino', f"{strat_results['sortino']:.2f}" if 'sortino' in strat_results else 'N/A'),
|
||||
('Sharpe', f"{strat_results['sharpe']:.2f}" if 'sharpe' in strat_results else 'N/A'),
|
||||
('Calmar', f"{strat_results['calmar']:.2f}" if 'calmar' in strat_results else 'N/A'),
|
||||
('Profit factor', f'{strat_results["profit_factor"]:.2f}' if 'profit_factor'
|
||||
in strat_results else 'N/A'),
|
||||
('Expectancy', f"{strat_results['expectancy']:.2f}" if 'expectancy'
|
||||
in strat_results else 'N/A'),
|
||||
('Trades per day', strat_results['trades_per_day']),
|
||||
('Avg. daily profit %',
|
||||
f"{(strat_results['profit_total'] / strat_results['backtest_days']):.2%}"),
|
||||
|
@@ -1,4 +1,3 @@
|
||||
# flake8: noqa: F401
|
||||
from skopt.space import Categorical, Dimension, Integer, Real
|
||||
from skopt.space import Categorical, Dimension, Integer, Real # noqa: F401
|
||||
|
||||
from .decimalspace import SKDecimal
|
||||
from .decimalspace import SKDecimal # noqa: F401
|
||||
|
@@ -1,7 +1,9 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from sqlalchemy.orm import DeclarativeBase, Session, scoped_session
|
||||
|
||||
|
||||
_DECL_BASE: Any = declarative_base()
|
||||
SessionType = scoped_session[Session]
|
||||
|
||||
|
||||
class ModelBase(DeclarativeBase):
|
||||
pass
|
||||
|
@@ -109,11 +109,10 @@ def migrate_trades_and_orders_table(
|
||||
else:
|
||||
is_short = get_column_def(cols, 'is_short', '0')
|
||||
|
||||
# Margin Properties
|
||||
# Futures Properties
|
||||
interest_rate = get_column_def(cols, 'interest_rate', '0.0')
|
||||
|
||||
# Futures properties
|
||||
funding_fees = get_column_def(cols, 'funding_fees', '0.0')
|
||||
max_stake_amount = get_column_def(cols, 'max_stake_amount', 'stake_amount')
|
||||
|
||||
# If ticker-interval existed use that, else null.
|
||||
if has_column(cols, 'ticker_interval'):
|
||||
@@ -162,7 +161,8 @@ def migrate_trades_and_orders_table(
|
||||
timeframe, open_trade_value, close_profit_abs,
|
||||
trading_mode, leverage, liquidation_price, is_short,
|
||||
interest_rate, funding_fees, realized_profit,
|
||||
amount_precision, price_precision, precision_mode, contract_size
|
||||
amount_precision, price_precision, precision_mode, contract_size,
|
||||
max_stake_amount
|
||||
)
|
||||
select id, lower(exchange), pair, {base_currency} base_currency,
|
||||
{stake_currency} stake_currency,
|
||||
@@ -190,7 +190,8 @@ def migrate_trades_and_orders_table(
|
||||
{is_short} is_short, {interest_rate} interest_rate,
|
||||
{funding_fees} funding_fees, {realized_profit} realized_profit,
|
||||
{amount_precision} amount_precision, {price_precision} price_precision,
|
||||
{precision_mode} precision_mode, {contract_size} contract_size
|
||||
{precision_mode} precision_mode, {contract_size} contract_size,
|
||||
{max_stake_amount} max_stake_amount
|
||||
from {trade_back_name}
|
||||
"""))
|
||||
|
||||
@@ -213,17 +214,22 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
|
||||
average = get_column_def(cols_order, 'average', 'null')
|
||||
stop_price = get_column_def(cols_order, 'stop_price', 'null')
|
||||
funding_fee = get_column_def(cols_order, 'funding_fee', '0.0')
|
||||
ft_amount = get_column_def(cols_order, 'ft_amount', 'coalesce(amount, 0.0)')
|
||||
ft_price = get_column_def(cols_order, 'ft_price', 'coalesce(price, 0.0)')
|
||||
|
||||
# sqlite does not support literals for booleans
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text(f"""
|
||||
insert into orders (id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
status, symbol, order_type, side, price, amount, filled, average, remaining, cost,
|
||||
stop_price, order_date, order_filled_date, order_update_date, ft_fee_base, funding_fee)
|
||||
stop_price, order_date, order_filled_date, order_update_date, ft_fee_base, funding_fee,
|
||||
ft_amount, ft_price
|
||||
)
|
||||
select id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
status, symbol, order_type, side, price, amount, filled, {average} average, remaining,
|
||||
cost, {stop_price} stop_price, order_date, order_filled_date,
|
||||
order_update_date, {ft_fee_base} ft_fee_base, {funding_fee} funding_fee
|
||||
order_update_date, {ft_fee_base} ft_fee_base, {funding_fee} funding_fee,
|
||||
{ft_amount} ft_amount, {ft_price} ft_price
|
||||
from {table_back_name}
|
||||
"""))
|
||||
|
||||
@@ -310,8 +316,8 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
# if ('orders' not in previous_tables
|
||||
# or not has_column(cols_orders, 'funding_fee')):
|
||||
migrating = False
|
||||
# if not has_column(cols_trades, 'contract_size'):
|
||||
if not has_column(cols_orders, 'funding_fee'):
|
||||
# if not has_column(cols_trades, 'max_stake_amount'):
|
||||
if not has_column(cols_orders, 'ft_price'):
|
||||
migrating = True
|
||||
logger.info(f"Running database migration for trades - "
|
||||
f"backup: {table_back_name}, {order_table_bak_name}")
|
||||
|
@@ -2,6 +2,7 @@
|
||||
This module contains the class to persist trades into SQLite
|
||||
"""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from sqlalchemy import create_engine, inspect
|
||||
from sqlalchemy.exc import NoSuchModuleError
|
||||
@@ -9,7 +10,7 @@ from sqlalchemy.orm import scoped_session, sessionmaker
|
||||
from sqlalchemy.pool import StaticPool
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.persistence.base import _DECL_BASE
|
||||
from freqtrade.persistence.base import ModelBase
|
||||
from freqtrade.persistence.migrations import check_migrate
|
||||
from freqtrade.persistence.pairlock import PairLock
|
||||
from freqtrade.persistence.trade_model import Order, Trade
|
||||
@@ -29,7 +30,7 @@ def init_db(db_url: str) -> None:
|
||||
:param db_url: Database to use
|
||||
:return: None
|
||||
"""
|
||||
kwargs = {}
|
||||
kwargs: Dict[str, Any] = {}
|
||||
|
||||
if db_url == 'sqlite:///':
|
||||
raise OperationalException(
|
||||
@@ -53,11 +54,10 @@ def init_db(db_url: str) -> None:
|
||||
# https://docs.sqlalchemy.org/en/13/orm/contextual.html#thread-local-scope
|
||||
# Scoped sessions proxy requests to the appropriate thread-local session.
|
||||
# We should use the scoped_session object - not a seperately initialized version
|
||||
Trade._session = scoped_session(sessionmaker(bind=engine, autoflush=False))
|
||||
Trade.query = Trade._session.query_property()
|
||||
Order.query = Trade._session.query_property()
|
||||
PairLock.query = Trade._session.query_property()
|
||||
Trade.session = scoped_session(sessionmaker(bind=engine, autoflush=False))
|
||||
Order.session = Trade.session
|
||||
PairLock.session = Trade.session
|
||||
|
||||
previous_tables = inspect(engine).get_table_names()
|
||||
_DECL_BASE.metadata.create_all(engine)
|
||||
check_migrate(engine, decl_base=_DECL_BASE, previous_tables=previous_tables)
|
||||
ModelBase.metadata.create_all(engine)
|
||||
check_migrate(engine, decl_base=ModelBase, previous_tables=previous_tables)
|
||||
|
@@ -1,33 +1,34 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, Optional
|
||||
from typing import Any, ClassVar, Dict, Optional
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, Integer, String, or_
|
||||
from sqlalchemy.orm import Query
|
||||
from sqlalchemy import ScalarResult, String, or_, select
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||
from freqtrade.persistence.base import _DECL_BASE
|
||||
from freqtrade.persistence.base import ModelBase, SessionType
|
||||
|
||||
|
||||
class PairLock(_DECL_BASE):
|
||||
class PairLock(ModelBase):
|
||||
"""
|
||||
Pair Locks database model.
|
||||
"""
|
||||
__tablename__ = 'pairlocks'
|
||||
session: ClassVar[SessionType]
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
|
||||
pair = Column(String(25), nullable=False, index=True)
|
||||
pair: Mapped[str] = mapped_column(String(25), nullable=False, index=True)
|
||||
# lock direction - long, short or * (for both)
|
||||
side = Column(String(25), nullable=False, default="*")
|
||||
reason = Column(String(255), nullable=True)
|
||||
side: Mapped[str] = mapped_column(String(25), nullable=False, default="*")
|
||||
reason: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
# Time the pair was locked (start time)
|
||||
lock_time = Column(DateTime, nullable=False)
|
||||
lock_time: Mapped[datetime] = mapped_column(nullable=False)
|
||||
# Time until the pair is locked (end time)
|
||||
lock_end_time = Column(DateTime, nullable=False, index=True)
|
||||
lock_end_time: Mapped[datetime] = mapped_column(nullable=False, index=True)
|
||||
|
||||
active = Column(Boolean, nullable=False, default=True, index=True)
|
||||
active: Mapped[bool] = mapped_column(nullable=False, default=True, index=True)
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
lock_time = self.lock_time.strftime(DATETIME_PRINT_FORMAT)
|
||||
lock_end_time = self.lock_end_time.strftime(DATETIME_PRINT_FORMAT)
|
||||
return (
|
||||
@@ -35,7 +36,8 @@ class PairLock(_DECL_BASE):
|
||||
f'lock_end_time={lock_end_time}, reason={self.reason}, active={self.active})')
|
||||
|
||||
@staticmethod
|
||||
def query_pair_locks(pair: Optional[str], now: datetime, side: str = '*') -> Query:
|
||||
def query_pair_locks(
|
||||
pair: Optional[str], now: datetime, side: str = '*') -> ScalarResult['PairLock']:
|
||||
"""
|
||||
Get all currently active locks for this pair
|
||||
:param pair: Pair to check for. Returns all current locks if pair is empty
|
||||
@@ -51,9 +53,11 @@ class PairLock(_DECL_BASE):
|
||||
else:
|
||||
filters.append(PairLock.side == '*')
|
||||
|
||||
return PairLock.query.filter(
|
||||
*filters
|
||||
)
|
||||
return PairLock.session.scalars(select(PairLock).filter(*filters))
|
||||
|
||||
@staticmethod
|
||||
def get_all_locks() -> ScalarResult['PairLock']:
|
||||
return PairLock.session.scalars(select(PairLock))
|
||||
|
||||
def to_json(self) -> Dict[str, Any]:
|
||||
return {
|
||||
|
@@ -1,6 +1,8 @@
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
from freqtrade.exchange import timeframe_to_next_date
|
||||
from freqtrade.persistence.models import PairLock
|
||||
@@ -30,8 +32,8 @@ class PairLocks():
|
||||
PairLocks.locks = []
|
||||
|
||||
@staticmethod
|
||||
def lock_pair(pair: str, until: datetime, reason: str = None, *,
|
||||
now: datetime = None, side: str = '*') -> PairLock:
|
||||
def lock_pair(pair: str, until: datetime, reason: Optional[str] = None, *,
|
||||
now: Optional[datetime] = None, side: str = '*') -> PairLock:
|
||||
"""
|
||||
Create PairLock from now to "until".
|
||||
Uses database by default, unless PairLocks.use_db is set to False,
|
||||
@@ -51,15 +53,15 @@ class PairLocks():
|
||||
active=True
|
||||
)
|
||||
if PairLocks.use_db:
|
||||
PairLock.query.session.add(lock)
|
||||
PairLock.query.session.commit()
|
||||
PairLock.session.add(lock)
|
||||
PairLock.session.commit()
|
||||
else:
|
||||
PairLocks.locks.append(lock)
|
||||
return lock
|
||||
|
||||
@staticmethod
|
||||
def get_pair_locks(
|
||||
pair: Optional[str], now: Optional[datetime] = None, side: str = '*') -> List[PairLock]:
|
||||
def get_pair_locks(pair: Optional[str], now: Optional[datetime] = None,
|
||||
side: str = '*') -> Sequence[PairLock]:
|
||||
"""
|
||||
Get all currently active locks for this pair
|
||||
:param pair: Pair to check for. Returns all current locks if pair is empty
|
||||
@@ -106,7 +108,7 @@ class PairLocks():
|
||||
for lock in locks:
|
||||
lock.active = False
|
||||
if PairLocks.use_db:
|
||||
PairLock.query.session.commit()
|
||||
PairLock.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def unlock_reason(reason: str, now: Optional[datetime] = None) -> None:
|
||||
@@ -126,15 +128,15 @@ class PairLocks():
|
||||
PairLock.active.is_(True),
|
||||
PairLock.reason == reason
|
||||
]
|
||||
locks = PairLock.query.filter(*filters)
|
||||
locks = PairLock.session.scalars(select(PairLock).filter(*filters)).all()
|
||||
for lock in locks:
|
||||
logger.info(f"Releasing lock for {lock.pair} with reason '{reason}'.")
|
||||
lock.active = False
|
||||
PairLock.query.session.commit()
|
||||
PairLock.session.commit()
|
||||
else:
|
||||
# used in backtesting mode; don't show log messages for speed
|
||||
locks = PairLocks.get_pair_locks(None)
|
||||
for lock in locks:
|
||||
locksb = PairLocks.get_pair_locks(None)
|
||||
for lock in locksb:
|
||||
if lock.reason == reason:
|
||||
lock.active = False
|
||||
|
||||
@@ -165,11 +167,11 @@ class PairLocks():
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_all_locks() -> List[PairLock]:
|
||||
def get_all_locks() -> Sequence[PairLock]:
|
||||
"""
|
||||
Return all locks, also locks with expired end date
|
||||
"""
|
||||
if PairLocks.use_db:
|
||||
return PairLock.query.all()
|
||||
return PairLock.get_all_locks().all()
|
||||
else:
|
||||
return PairLocks.locks
|
||||
|
@@ -5,11 +5,11 @@ import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import isclose
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, ClassVar, Dict, List, Optional, Sequence, cast
|
||||
|
||||
from sqlalchemy import (Boolean, Column, DateTime, Enum, Float, ForeignKey, Integer, String,
|
||||
UniqueConstraint, desc, func)
|
||||
from sqlalchemy.orm import Query, lazyload, relationship
|
||||
from sqlalchemy import (Enum, Float, ForeignKey, Integer, ScalarResult, Select, String,
|
||||
UniqueConstraint, desc, func, select)
|
||||
from sqlalchemy.orm import Mapped, lazyload, mapped_column, relationship
|
||||
|
||||
from freqtrade.constants import (DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC, NON_OPEN_EXCHANGE_STATES,
|
||||
BuySell, LongShort)
|
||||
@@ -17,14 +17,14 @@ from freqtrade.enums import ExitType, TradingMode
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
from freqtrade.exchange import amount_to_contract_precision, price_to_precision
|
||||
from freqtrade.leverage import interest
|
||||
from freqtrade.persistence.base import _DECL_BASE
|
||||
from freqtrade.persistence.base import ModelBase, SessionType
|
||||
from freqtrade.util import FtPrecise
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Order(_DECL_BASE):
|
||||
class Order(ModelBase):
|
||||
"""
|
||||
Order database model
|
||||
Keeps a record of all orders placed on the exchange
|
||||
@@ -36,39 +36,43 @@ class Order(_DECL_BASE):
|
||||
Mirrors CCXT Order structure
|
||||
"""
|
||||
__tablename__ = 'orders'
|
||||
session: ClassVar[SessionType]
|
||||
|
||||
# Uniqueness should be ensured over pair, order_id
|
||||
# its likely that order_id is unique per Pair on some exchanges.
|
||||
__table_args__ = (UniqueConstraint('ft_pair', 'order_id', name="_order_pair_order_id"),)
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
ft_trade_id = Column(Integer, ForeignKey('trades.id'), index=True)
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
ft_trade_id: Mapped[int] = mapped_column(Integer, ForeignKey('trades.id'), index=True)
|
||||
|
||||
trade = relationship("Trade", back_populates="orders")
|
||||
trade: Mapped[List["Trade"]] = relationship("Trade", back_populates="orders")
|
||||
|
||||
# order_side can only be 'buy', 'sell' or 'stoploss'
|
||||
ft_order_side: str = Column(String(25), nullable=False)
|
||||
ft_pair: str = Column(String(25), nullable=False)
|
||||
ft_is_open = Column(Boolean, nullable=False, default=True, index=True)
|
||||
ft_order_side: Mapped[str] = mapped_column(String(25), nullable=False)
|
||||
ft_pair: Mapped[str] = mapped_column(String(25), nullable=False)
|
||||
ft_is_open: Mapped[bool] = mapped_column(nullable=False, default=True, index=True)
|
||||
ft_amount: Mapped[float] = mapped_column(Float(), nullable=False)
|
||||
ft_price: Mapped[float] = mapped_column(Float(), nullable=False)
|
||||
|
||||
order_id: str = Column(String(255), nullable=False, index=True)
|
||||
status = Column(String(255), nullable=True)
|
||||
symbol = Column(String(25), nullable=True)
|
||||
order_type: str = Column(String(50), nullable=True)
|
||||
side = Column(String(25), nullable=True)
|
||||
price = Column(Float, nullable=True)
|
||||
average = Column(Float, nullable=True)
|
||||
amount = Column(Float, nullable=True)
|
||||
filled = Column(Float, nullable=True)
|
||||
remaining = Column(Float, nullable=True)
|
||||
cost = Column(Float, nullable=True)
|
||||
stop_price = Column(Float, nullable=True)
|
||||
order_date = Column(DateTime, nullable=True, default=datetime.utcnow)
|
||||
order_filled_date = Column(DateTime, nullable=True)
|
||||
order_update_date = Column(DateTime, nullable=True)
|
||||
order_id: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
status: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
symbol: Mapped[Optional[str]] = mapped_column(String(25), nullable=True)
|
||||
# TODO: type: order_type type is Optional[str]
|
||||
order_type: Mapped[str] = mapped_column(String(50), nullable=True)
|
||||
side: Mapped[str] = mapped_column(String(25), nullable=True)
|
||||
price: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
average: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
amount: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
filled: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
remaining: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
cost: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
stop_price: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
order_date: Mapped[datetime] = mapped_column(nullable=True, default=datetime.utcnow)
|
||||
order_filled_date: Mapped[Optional[datetime]] = mapped_column(nullable=True)
|
||||
order_update_date: Mapped[Optional[datetime]] = mapped_column(nullable=True)
|
||||
funding_fee: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
|
||||
funding_fee = Column(Float, nullable=True)
|
||||
|
||||
ft_fee_base = Column(Float, nullable=True)
|
||||
ft_fee_base: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
|
||||
@property
|
||||
def order_date_utc(self) -> datetime:
|
||||
@@ -82,19 +86,27 @@ class Order(_DECL_BASE):
|
||||
self.order_filled_date.replace(tzinfo=timezone.utc) if self.order_filled_date else None
|
||||
)
|
||||
|
||||
@property
|
||||
def safe_amount(self) -> float:
|
||||
return self.amount or self.ft_amount
|
||||
|
||||
@property
|
||||
def safe_price(self) -> float:
|
||||
return self.average or self.price or self.stop_price
|
||||
return self.average or self.price or self.stop_price or self.ft_price
|
||||
|
||||
@property
|
||||
def safe_filled(self) -> float:
|
||||
return self.filled if self.filled is not None else self.amount or 0.0
|
||||
|
||||
@property
|
||||
def safe_cost(self) -> float:
|
||||
return self.cost or 0.0
|
||||
|
||||
@property
|
||||
def safe_remaining(self) -> float:
|
||||
return (
|
||||
self.remaining if self.remaining is not None else
|
||||
self.amount - (self.filled or 0.0)
|
||||
self.safe_amount - (self.filled or 0.0)
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -107,8 +119,9 @@ class Order(_DECL_BASE):
|
||||
|
||||
def __repr__(self):
|
||||
|
||||
return (f'Order(id={self.id}, order_id={self.order_id}, trade_id={self.ft_trade_id}, '
|
||||
f'side={self.side}, order_type={self.order_type}, status={self.status})')
|
||||
return (f"Order(id={self.id}, order_id={self.order_id}, trade_id={self.ft_trade_id}, "
|
||||
f"side={self.side}, filled={self.safe_filled}, price={self.safe_price}, "
|
||||
f"order_type={self.order_type}, status={self.status})")
|
||||
|
||||
def update_from_ccxt_object(self, order):
|
||||
"""
|
||||
@@ -140,12 +153,12 @@ class Order(_DECL_BASE):
|
||||
# Assign funding fee up to this point
|
||||
# (represents the funding fee since the last order)
|
||||
self.funding_fee = self.trade.funding_fees
|
||||
if (order.get('filled', 0.0) or 0.0) > 0:
|
||||
if (order.get('filled', 0.0) or 0.0) > 0 and not self.order_filled_date:
|
||||
self.order_filled_date = datetime.now(timezone.utc)
|
||||
self.order_update_date = datetime.now(timezone.utc)
|
||||
|
||||
def to_ccxt_object(self) -> Dict[str, Any]:
|
||||
return {
|
||||
order: Dict[str, Any] = {
|
||||
'id': self.order_id,
|
||||
'symbol': self.ft_pair,
|
||||
'price': self.price,
|
||||
@@ -163,10 +176,13 @@ class Order(_DECL_BASE):
|
||||
'fee': None,
|
||||
'info': {},
|
||||
}
|
||||
if self.ft_order_side == 'stoploss':
|
||||
order['ft_order_type'] = 'stoploss'
|
||||
return order
|
||||
|
||||
def to_json(self, entry_side: str, minified: bool = False) -> Dict[str, Any]:
|
||||
resp = {
|
||||
'amount': self.amount,
|
||||
'amount': self.safe_amount,
|
||||
'safe_price': self.safe_price,
|
||||
'ft_order_side': self.ft_order_side,
|
||||
'order_filled_timestamp': int(self.order_filled_date.replace(
|
||||
@@ -204,7 +220,7 @@ class Order(_DECL_BASE):
|
||||
# Assumes backtesting will use date_last_filled_utc to calculate future funding fees.
|
||||
self.funding_fee = trade.funding_fees
|
||||
|
||||
if (self.ft_order_side == trade.entry_side):
|
||||
if (self.ft_order_side == trade.entry_side and self.price):
|
||||
trade.open_rate = self.price
|
||||
trade.recalc_trade_from_orders()
|
||||
trade.adjust_stop_loss(trade.open_rate, trade.stop_loss_pct, refresh=True)
|
||||
@@ -227,22 +243,31 @@ class Order(_DECL_BASE):
|
||||
logger.warning(f"Did not find order for {order}.")
|
||||
|
||||
@staticmethod
|
||||
def parse_from_ccxt_object(order: Dict[str, Any], pair: str, side: str) -> 'Order':
|
||||
def parse_from_ccxt_object(
|
||||
order: Dict[str, Any], pair: str, side: str,
|
||||
amount: Optional[float] = None, price: Optional[float] = None) -> 'Order':
|
||||
"""
|
||||
Parse an order from a ccxt object and return a new order Object.
|
||||
Optional support for overriding amount and price is only used for test simplification.
|
||||
"""
|
||||
o = Order(order_id=str(order['id']), ft_order_side=side, ft_pair=pair)
|
||||
o = Order(
|
||||
order_id=str(order['id']),
|
||||
ft_order_side=side,
|
||||
ft_pair=pair,
|
||||
ft_amount=amount if amount else order['amount'],
|
||||
ft_price=price if price else order['price'],
|
||||
)
|
||||
|
||||
o.update_from_ccxt_object(order)
|
||||
return o
|
||||
|
||||
@staticmethod
|
||||
def get_open_orders() -> List['Order']:
|
||||
def get_open_orders() -> Sequence['Order']:
|
||||
"""
|
||||
Retrieve open orders from the database
|
||||
:return: List of open orders
|
||||
"""
|
||||
return Order.query.filter(Order.ft_is_open.is_(True)).all()
|
||||
return Order.session.scalars(select(Order).filter(Order.ft_is_open.is_(True))).all()
|
||||
|
||||
@staticmethod
|
||||
def order_by_id(order_id: str) -> Optional['Order']:
|
||||
@@ -250,7 +275,7 @@ class Order(_DECL_BASE):
|
||||
Retrieve order based on order_id
|
||||
:return: Order or None
|
||||
"""
|
||||
return Order.query.filter(Order.order_id == order_id).first()
|
||||
return Order.session.scalars(select(Order).filter(Order.order_id == order_id)).first()
|
||||
|
||||
|
||||
class LocalTrade():
|
||||
@@ -275,15 +300,15 @@ class LocalTrade():
|
||||
|
||||
exchange: str = ''
|
||||
pair: str = ''
|
||||
base_currency: str = ''
|
||||
stake_currency: str = ''
|
||||
base_currency: Optional[str] = ''
|
||||
stake_currency: Optional[str] = ''
|
||||
is_open: bool = True
|
||||
fee_open: float = 0.0
|
||||
fee_open_cost: Optional[float] = None
|
||||
fee_open_currency: str = ''
|
||||
fee_close: float = 0.0
|
||||
fee_open_currency: Optional[str] = ''
|
||||
fee_close: Optional[float] = 0.0
|
||||
fee_close_cost: Optional[float] = None
|
||||
fee_close_currency: str = ''
|
||||
fee_close_currency: Optional[str] = ''
|
||||
open_rate: float = 0.0
|
||||
open_rate_requested: Optional[float] = None
|
||||
# open_trade_value - calculated via _calc_open_trade_value
|
||||
@@ -293,6 +318,7 @@ class LocalTrade():
|
||||
close_profit: Optional[float] = None
|
||||
close_profit_abs: Optional[float] = None
|
||||
stake_amount: float = 0.0
|
||||
max_stake_amount: Optional[float] = 0.0
|
||||
amount: float = 0.0
|
||||
amount_requested: Optional[float] = None
|
||||
open_date: datetime
|
||||
@@ -301,9 +327,9 @@ class LocalTrade():
|
||||
# absolute value of the stop loss
|
||||
stop_loss: float = 0.0
|
||||
# percentage value of the stop loss
|
||||
stop_loss_pct: float = 0.0
|
||||
stop_loss_pct: Optional[float] = 0.0
|
||||
# absolute value of the initial stop loss
|
||||
initial_stop_loss: float = 0.0
|
||||
initial_stop_loss: Optional[float] = 0.0
|
||||
# percentage value of the initial stop loss
|
||||
initial_stop_loss_pct: Optional[float] = None
|
||||
# stoploss order id which is on exchange
|
||||
@@ -311,12 +337,12 @@ class LocalTrade():
|
||||
# last update time of the stoploss order on exchange
|
||||
stoploss_last_update: Optional[datetime] = None
|
||||
# absolute value of the highest reached price
|
||||
max_rate: float = 0.0
|
||||
max_rate: Optional[float] = None
|
||||
# Lowest price reached
|
||||
min_rate: float = 0.0
|
||||
exit_reason: str = ''
|
||||
exit_order_status: str = ''
|
||||
strategy: str = ''
|
||||
min_rate: Optional[float] = None
|
||||
exit_reason: Optional[str] = ''
|
||||
exit_order_status: Optional[str] = ''
|
||||
strategy: Optional[str] = ''
|
||||
enter_tag: Optional[str] = None
|
||||
timeframe: Optional[int] = None
|
||||
|
||||
@@ -397,12 +423,6 @@ class LocalTrade():
|
||||
def close_date_utc(self):
|
||||
return self.close_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
@property
|
||||
def enter_side(self) -> str:
|
||||
""" DEPRECATED, please use entry_side instead"""
|
||||
# TODO: Please remove me after 2022.5
|
||||
return self.entry_side
|
||||
|
||||
@property
|
||||
def entry_side(self) -> str:
|
||||
if self.is_short:
|
||||
@@ -475,8 +495,8 @@ class LocalTrade():
|
||||
'amount': round(self.amount, 8),
|
||||
'amount_requested': round(self.amount_requested, 8) if self.amount_requested else None,
|
||||
'stake_amount': round(self.stake_amount, 8),
|
||||
'max_stake_amount': round(self.max_stake_amount, 8) if self.max_stake_amount else None,
|
||||
'strategy': self.strategy,
|
||||
'buy_tag': self.enter_tag,
|
||||
'enter_tag': self.enter_tag,
|
||||
'timeframe': self.timeframe,
|
||||
|
||||
@@ -498,6 +518,8 @@ class LocalTrade():
|
||||
'close_timestamp': int(self.close_date.replace(
|
||||
tzinfo=timezone.utc).timestamp() * 1000) if self.close_date else None,
|
||||
'realized_profit': self.realized_profit or 0.0,
|
||||
# Close-profit corresponds to relative realized_profit ratio
|
||||
'realized_profit_ratio': self.close_profit or None,
|
||||
'close_rate': self.close_rate,
|
||||
'close_rate_requested': self.close_rate_requested,
|
||||
'close_profit': self.close_profit, # Deprecated
|
||||
@@ -513,7 +535,6 @@ class LocalTrade():
|
||||
'profit_pct': round(self.close_profit * 100, 2) if self.close_profit else None,
|
||||
'profit_abs': self.close_profit_abs,
|
||||
|
||||
'sell_reason': self.exit_reason, # Deprecated
|
||||
'exit_reason': self.exit_reason,
|
||||
'exit_order_status': self.exit_order_status,
|
||||
'stop_loss_abs': self.stop_loss,
|
||||
@@ -580,7 +601,7 @@ class LocalTrade():
|
||||
|
||||
self.stop_loss_pct = -1 * abs(percent)
|
||||
|
||||
def adjust_stop_loss(self, current_price: float, stoploss: float,
|
||||
def adjust_stop_loss(self, current_price: float, stoploss: Optional[float],
|
||||
initial: bool = False, refresh: bool = False) -> None:
|
||||
"""
|
||||
This adjusts the stop loss to it's most recently observed setting
|
||||
@@ -589,7 +610,7 @@ class LocalTrade():
|
||||
:param initial: Called to initiate stop_loss.
|
||||
Skips everything if self.stop_loss is already set.
|
||||
"""
|
||||
if initial and not (self.stop_loss is None or self.stop_loss == 0):
|
||||
if stoploss is None or (initial and not (self.stop_loss is None or self.stop_loss == 0)):
|
||||
# Don't modify if called with initial and nothing to do
|
||||
return
|
||||
refresh = True if refresh and self.nr_of_successful_entries == 1 else False
|
||||
@@ -628,7 +649,7 @@ class LocalTrade():
|
||||
f"initial_stop_loss={self.initial_stop_loss:.8f}, "
|
||||
f"stop_loss={self.stop_loss:.8f}. "
|
||||
f"Trailing stoploss saved us: "
|
||||
f"{float(self.stop_loss) - float(self.initial_stop_loss):.8f}.")
|
||||
f"{float(self.stop_loss) - float(self.initial_stop_loss or 0.0):.8f}.")
|
||||
|
||||
def update_trade(self, order: Order) -> None:
|
||||
"""
|
||||
@@ -780,17 +801,17 @@ class LocalTrade():
|
||||
|
||||
return interest(exchange_name=self.exchange, borrowed=borrowed, rate=rate, hours=hours)
|
||||
|
||||
def _calc_base_close(self, amount: FtPrecise, rate: float, fee: float) -> FtPrecise:
|
||||
def _calc_base_close(self, amount: FtPrecise, rate: float, fee: Optional[float]) -> FtPrecise:
|
||||
|
||||
close_trade = amount * FtPrecise(rate)
|
||||
fees = close_trade * FtPrecise(fee)
|
||||
fees = close_trade * FtPrecise(fee or 0.0)
|
||||
|
||||
if self.is_short:
|
||||
return close_trade + fees
|
||||
else:
|
||||
return close_trade - fees
|
||||
|
||||
def calc_close_trade_value(self, rate: float, amount: float = None) -> float:
|
||||
def calc_close_trade_value(self, rate: float, amount: Optional[float] = None) -> float:
|
||||
"""
|
||||
Calculate the Trade's close value including fees
|
||||
:param rate: rate to compare with.
|
||||
@@ -828,7 +849,8 @@ class LocalTrade():
|
||||
raise OperationalException(
|
||||
f"{self.trading_mode.value} trading is not yet available using freqtrade")
|
||||
|
||||
def calc_profit(self, rate: float, amount: float = None, open_rate: float = None) -> float:
|
||||
def calc_profit(self, rate: float, amount: Optional[float] = None,
|
||||
open_rate: Optional[float] = None) -> float:
|
||||
"""
|
||||
Calculate the absolute profit in stake currency between Close and Open trade
|
||||
:param rate: close rate to compare with.
|
||||
@@ -849,7 +871,8 @@ class LocalTrade():
|
||||
return float(f"{profit:.8f}")
|
||||
|
||||
def calc_profit_ratio(
|
||||
self, rate: float, amount: float = None, open_rate: float = None) -> float:
|
||||
self, rate: float, amount: Optional[float] = None,
|
||||
open_rate: Optional[float] = None) -> float:
|
||||
"""
|
||||
Calculates the profit as ratio (including fee).
|
||||
:param rate: rate to compare with.
|
||||
@@ -882,6 +905,7 @@ class LocalTrade():
|
||||
ZERO = FtPrecise(0.0)
|
||||
current_amount = FtPrecise(0.0)
|
||||
current_stake = FtPrecise(0.0)
|
||||
max_stake_amount = FtPrecise(0.0)
|
||||
total_stake = 0.0 # Total stake after all buy orders (does not subtract!)
|
||||
avg_price = FtPrecise(0.0)
|
||||
close_profit = 0.0
|
||||
@@ -923,7 +947,9 @@ class LocalTrade():
|
||||
exit_rate, amount=exit_amount, open_rate=avg_price)
|
||||
else:
|
||||
total_stake = total_stake + self._calc_open_trade_value(tmp_amount, price)
|
||||
max_stake_amount += (tmp_amount * price)
|
||||
self.funding_fees = funding_fees
|
||||
self.max_stake_amount = float(max_stake_amount)
|
||||
|
||||
if close_profit:
|
||||
self.close_profit = close_profit
|
||||
@@ -959,11 +985,12 @@ class LocalTrade():
|
||||
return None
|
||||
|
||||
def select_order(self, order_side: Optional[str] = None,
|
||||
is_open: Optional[bool] = None) -> Optional[Order]:
|
||||
is_open: Optional[bool] = None, only_filled: bool = False) -> Optional[Order]:
|
||||
"""
|
||||
Finds latest order for this orderside and status
|
||||
:param order_side: ft_order_side of the order (either 'buy', 'sell' or 'stoploss')
|
||||
:param is_open: Only search for open orders?
|
||||
:param only_filled: Only search for Filled orders (only valid with is_open=False).
|
||||
:return: latest Order object if it exists, else None
|
||||
"""
|
||||
orders = self.orders
|
||||
@@ -971,6 +998,8 @@ class LocalTrade():
|
||||
orders = [o for o in orders if o.ft_order_side == order_side]
|
||||
if is_open is not None:
|
||||
orders = [o for o in orders if o.ft_is_open == is_open]
|
||||
if is_open is False and only_filled:
|
||||
orders = [o for o in orders if o.filled and o.status in NON_OPEN_EXCHANGE_STATES]
|
||||
if len(orders) > 0:
|
||||
return orders[-1]
|
||||
else:
|
||||
@@ -1039,13 +1068,18 @@ class LocalTrade():
|
||||
return len(self.select_filled_orders('sell'))
|
||||
|
||||
@property
|
||||
def sell_reason(self) -> str:
|
||||
def sell_reason(self) -> Optional[str]:
|
||||
""" DEPRECATED! Please use exit_reason instead."""
|
||||
return self.exit_reason
|
||||
|
||||
@property
|
||||
def safe_close_rate(self) -> float:
|
||||
return self.close_rate or self.close_rate_requested or 0.0
|
||||
|
||||
@staticmethod
|
||||
def get_trades_proxy(*, pair: str = None, is_open: bool = None,
|
||||
open_date: datetime = None, close_date: datetime = None,
|
||||
def get_trades_proxy(*, pair: Optional[str] = None, is_open: Optional[bool] = None,
|
||||
open_date: Optional[datetime] = None,
|
||||
close_date: Optional[datetime] = None,
|
||||
) -> List['LocalTrade']:
|
||||
"""
|
||||
Helper function to query Trades.
|
||||
@@ -1053,6 +1087,11 @@ class LocalTrade():
|
||||
In live mode, converts the filter to a database query and returns all rows
|
||||
In Backtest mode, uses filters on Trade.trades to get the result.
|
||||
|
||||
:param pair: Filter by pair
|
||||
:param is_open: Filter by open/closed status
|
||||
:param open_date: Filter by open_date (filters via trade.open_date > input)
|
||||
:param close_date: Filter by close_date (filters via trade.close_date > input)
|
||||
Will implicitly only return closed trades.
|
||||
:return: unsorted List[Trade]
|
||||
"""
|
||||
|
||||
@@ -1103,7 +1142,7 @@ class LocalTrade():
|
||||
@staticmethod
|
||||
def get_open_trades() -> List[Any]:
|
||||
"""
|
||||
Query trades from persistence layer
|
||||
Retrieve open trades
|
||||
"""
|
||||
return Trade.get_trades_proxy(is_open=True)
|
||||
|
||||
@@ -1113,7 +1152,9 @@ class LocalTrade():
|
||||
get open trade count
|
||||
"""
|
||||
if Trade.use_db:
|
||||
return Trade.query.filter(Trade.is_open.is_(True)).count()
|
||||
return Trade.session.execute(
|
||||
select(func.count(Trade.id)).filter(Trade.is_open.is_(True))
|
||||
).scalar_one()
|
||||
else:
|
||||
return LocalTrade.bt_open_open_trade_count
|
||||
|
||||
@@ -1138,7 +1179,7 @@ class LocalTrade():
|
||||
logger.info(f"New stoploss: {trade.stop_loss}.")
|
||||
|
||||
|
||||
class Trade(_DECL_BASE, LocalTrade):
|
||||
class Trade(ModelBase, LocalTrade):
|
||||
"""
|
||||
Trade database model.
|
||||
Also handles updating and querying trades
|
||||
@@ -1146,78 +1187,97 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
Note: Fields must be aligned with LocalTrade class
|
||||
"""
|
||||
__tablename__ = 'trades'
|
||||
session: ClassVar[SessionType]
|
||||
|
||||
use_db: bool = True
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True) # type: ignore
|
||||
|
||||
orders = relationship("Order", order_by="Order.id", cascade="all, delete-orphan",
|
||||
lazy="selectin", innerjoin=True)
|
||||
orders: Mapped[List[Order]] = relationship(
|
||||
"Order", order_by="Order.id", cascade="all, delete-orphan", lazy="selectin",
|
||||
innerjoin=True) # type: ignore
|
||||
|
||||
exchange = Column(String(25), nullable=False)
|
||||
pair = Column(String(25), nullable=False, index=True)
|
||||
base_currency = Column(String(25), nullable=True)
|
||||
stake_currency = Column(String(25), nullable=True)
|
||||
is_open = Column(Boolean, nullable=False, default=True, index=True)
|
||||
fee_open = Column(Float, nullable=False, default=0.0)
|
||||
fee_open_cost = Column(Float, nullable=True)
|
||||
fee_open_currency = Column(String(25), nullable=True)
|
||||
fee_close = Column(Float, nullable=False, default=0.0)
|
||||
fee_close_cost = Column(Float, nullable=True)
|
||||
fee_close_currency = Column(String(25), nullable=True)
|
||||
open_rate: float = Column(Float)
|
||||
open_rate_requested = Column(Float)
|
||||
exchange: Mapped[str] = mapped_column(String(25), nullable=False) # type: ignore
|
||||
pair: Mapped[str] = mapped_column(String(25), nullable=False, index=True) # type: ignore
|
||||
base_currency: Mapped[Optional[str]] = mapped_column(String(25), nullable=True) # type: ignore
|
||||
stake_currency: Mapped[Optional[str]] = mapped_column(String(25), nullable=True) # type: ignore
|
||||
is_open: Mapped[bool] = mapped_column(nullable=False, default=True, index=True) # type: ignore
|
||||
fee_open: Mapped[float] = mapped_column(Float(), nullable=False, default=0.0) # type: ignore
|
||||
fee_open_cost: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
fee_open_currency: Mapped[Optional[str]] = mapped_column(
|
||||
String(25), nullable=True) # type: ignore
|
||||
fee_close: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=False, default=0.0) # type: ignore
|
||||
fee_close_cost: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
fee_close_currency: Mapped[Optional[str]] = mapped_column(
|
||||
String(25), nullable=True) # type: ignore
|
||||
open_rate: Mapped[float] = mapped_column(Float()) # type: ignore
|
||||
open_rate_requested: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True) # type: ignore
|
||||
# open_trade_value - calculated via _calc_open_trade_value
|
||||
open_trade_value = Column(Float)
|
||||
close_rate: Optional[float] = Column(Float)
|
||||
close_rate_requested = Column(Float)
|
||||
realized_profit = Column(Float, default=0.0)
|
||||
close_profit = Column(Float)
|
||||
close_profit_abs = Column(Float)
|
||||
stake_amount = Column(Float, nullable=False)
|
||||
amount = Column(Float)
|
||||
amount_requested = Column(Float)
|
||||
open_date = Column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
close_date = Column(DateTime)
|
||||
open_order_id = Column(String(255))
|
||||
open_trade_value: Mapped[float] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
close_rate: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
close_rate_requested: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
realized_profit: Mapped[float] = mapped_column(
|
||||
Float(), default=0.0, nullable=True) # type: ignore
|
||||
close_profit: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
close_profit_abs: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
stake_amount: Mapped[float] = mapped_column(Float(), nullable=False) # type: ignore
|
||||
max_stake_amount: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
amount: Mapped[float] = mapped_column(Float()) # type: ignore
|
||||
amount_requested: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
open_date: Mapped[datetime] = mapped_column(
|
||||
nullable=False, default=datetime.utcnow) # type: ignore
|
||||
close_date: Mapped[Optional[datetime]] = mapped_column() # type: ignore
|
||||
open_order_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) # type: ignore
|
||||
# absolute value of the stop loss
|
||||
stop_loss = Column(Float, nullable=True, default=0.0)
|
||||
stop_loss: Mapped[float] = mapped_column(Float(), nullable=True, default=0.0) # type: ignore
|
||||
# percentage value of the stop loss
|
||||
stop_loss_pct = Column(Float, nullable=True)
|
||||
stop_loss_pct: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
# absolute value of the initial stop loss
|
||||
initial_stop_loss = Column(Float, nullable=True, default=0.0)
|
||||
initial_stop_loss: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True, default=0.0) # type: ignore
|
||||
# percentage value of the initial stop loss
|
||||
initial_stop_loss_pct = Column(Float, nullable=True)
|
||||
initial_stop_loss_pct: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True) # type: ignore
|
||||
# stoploss order id which is on exchange
|
||||
stoploss_order_id = Column(String(255), nullable=True, index=True)
|
||||
stoploss_order_id: Mapped[Optional[str]] = mapped_column(
|
||||
String(255), nullable=True, index=True) # type: ignore
|
||||
# last update time of the stoploss order on exchange
|
||||
stoploss_last_update = Column(DateTime, nullable=True)
|
||||
stoploss_last_update: Mapped[Optional[datetime]] = mapped_column(nullable=True) # type: ignore
|
||||
# absolute value of the highest reached price
|
||||
max_rate = Column(Float, nullable=True, default=0.0)
|
||||
max_rate: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True, default=0.0) # type: ignore
|
||||
# Lowest price reached
|
||||
min_rate = Column(Float, nullable=True)
|
||||
exit_reason = Column(String(100), nullable=True)
|
||||
exit_order_status = Column(String(100), nullable=True)
|
||||
strategy = Column(String(100), nullable=True)
|
||||
enter_tag = Column(String(100), nullable=True)
|
||||
timeframe = Column(Integer, nullable=True)
|
||||
min_rate: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
exit_reason: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
|
||||
exit_order_status: Mapped[Optional[str]] = mapped_column(
|
||||
String(100), nullable=True) # type: ignore
|
||||
strategy: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
|
||||
enter_tag: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
|
||||
timeframe: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # type: ignore
|
||||
|
||||
trading_mode = Column(Enum(TradingMode), nullable=True)
|
||||
amount_precision = Column(Float, nullable=True)
|
||||
price_precision = Column(Float, nullable=True)
|
||||
precision_mode = Column(Integer, nullable=True)
|
||||
contract_size = Column(Float, nullable=True)
|
||||
trading_mode: Mapped[TradingMode] = mapped_column(
|
||||
Enum(TradingMode), nullable=True) # type: ignore
|
||||
amount_precision: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True) # type: ignore
|
||||
price_precision: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
precision_mode: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # type: ignore
|
||||
contract_size: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
|
||||
# Leverage trading properties
|
||||
leverage = Column(Float, nullable=True, default=1.0)
|
||||
is_short = Column(Boolean, nullable=False, default=False)
|
||||
liquidation_price = Column(Float, nullable=True)
|
||||
leverage: Mapped[float] = mapped_column(Float(), nullable=True, default=1.0) # type: ignore
|
||||
is_short: Mapped[bool] = mapped_column(nullable=False, default=False) # type: ignore
|
||||
liquidation_price: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True) # type: ignore
|
||||
|
||||
# Margin Trading Properties
|
||||
interest_rate = Column(Float, nullable=False, default=0.0)
|
||||
interest_rate: Mapped[float] = mapped_column(
|
||||
Float(), nullable=False, default=0.0) # type: ignore
|
||||
|
||||
# Futures properties
|
||||
funding_fees = Column(Float, nullable=True, default=None)
|
||||
funding_fees: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True, default=None) # type: ignore
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
@@ -1227,22 +1287,23 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
def delete(self) -> None:
|
||||
|
||||
for order in self.orders:
|
||||
Order.query.session.delete(order)
|
||||
Order.session.delete(order)
|
||||
|
||||
Trade.query.session.delete(self)
|
||||
Trade.session.delete(self)
|
||||
Trade.commit()
|
||||
|
||||
@staticmethod
|
||||
def commit():
|
||||
Trade.query.session.commit()
|
||||
Trade.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def rollback():
|
||||
Trade.query.session.rollback()
|
||||
Trade.session.rollback()
|
||||
|
||||
@staticmethod
|
||||
def get_trades_proxy(*, pair: str = None, is_open: bool = None,
|
||||
open_date: datetime = None, close_date: datetime = None,
|
||||
def get_trades_proxy(*, pair: Optional[str] = None, is_open: Optional[bool] = None,
|
||||
open_date: Optional[datetime] = None,
|
||||
close_date: Optional[datetime] = None,
|
||||
) -> List['LocalTrade']:
|
||||
"""
|
||||
Helper function to query Trades.j
|
||||
@@ -1262,7 +1323,7 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
trade_filter.append(Trade.close_date > close_date)
|
||||
if is_open is not None:
|
||||
trade_filter.append(Trade.is_open.is_(is_open))
|
||||
return Trade.get_trades(trade_filter).all()
|
||||
return cast(List[LocalTrade], Trade.get_trades(trade_filter).all())
|
||||
else:
|
||||
return LocalTrade.get_trades_proxy(
|
||||
pair=pair, is_open=is_open,
|
||||
@@ -1271,7 +1332,7 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_trades(trade_filter=None, include_orders: bool = True) -> Query:
|
||||
def get_trades_query(trade_filter=None, include_orders: bool = True) -> Select:
|
||||
"""
|
||||
Helper function to query Trades using filters.
|
||||
NOTE: Not supported in Backtesting.
|
||||
@@ -1286,22 +1347,35 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
if trade_filter is not None:
|
||||
if not isinstance(trade_filter, list):
|
||||
trade_filter = [trade_filter]
|
||||
this_query = Trade.query.filter(*trade_filter)
|
||||
this_query = select(Trade).filter(*trade_filter)
|
||||
else:
|
||||
this_query = Trade.query
|
||||
this_query = select(Trade)
|
||||
if not include_orders:
|
||||
# Don't load order relations
|
||||
# Consider using noload or raiseload instead of lazyload
|
||||
this_query = this_query.options(lazyload(Trade.orders))
|
||||
return this_query
|
||||
|
||||
@staticmethod
|
||||
def get_trades(trade_filter=None, include_orders: bool = True) -> ScalarResult['Trade']:
|
||||
"""
|
||||
Helper function to query Trades using filters.
|
||||
NOTE: Not supported in Backtesting.
|
||||
:param trade_filter: Optional filter to apply to trades
|
||||
Can be either a Filter object, or a List of filters
|
||||
e.g. `(trade_filter=[Trade.id == trade_id, Trade.is_open.is_(True),])`
|
||||
e.g. `(trade_filter=Trade.id == trade_id)`
|
||||
:return: unsorted query object
|
||||
"""
|
||||
return Trade.session.scalars(Trade.get_trades_query(trade_filter, include_orders))
|
||||
|
||||
@staticmethod
|
||||
def get_open_order_trades() -> List['Trade']:
|
||||
"""
|
||||
Returns all open trades
|
||||
NOTE: Not supported in Backtesting.
|
||||
"""
|
||||
return Trade.get_trades(Trade.open_order_id.isnot(None)).all()
|
||||
return cast(List[Trade], Trade.get_trades(Trade.open_order_id.isnot(None)).all())
|
||||
|
||||
@staticmethod
|
||||
def get_open_trades_without_assigned_fees():
|
||||
@@ -1331,11 +1405,12 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
Retrieves total realized profit
|
||||
"""
|
||||
if Trade.use_db:
|
||||
total_profit = Trade.query.with_entities(
|
||||
func.sum(Trade.close_profit_abs)).filter(Trade.is_open.is_(False)).scalar()
|
||||
total_profit: float = Trade.session.execute(
|
||||
select(func.sum(Trade.close_profit_abs)).filter(Trade.is_open.is_(False))
|
||||
).scalar_one()
|
||||
else:
|
||||
total_profit = sum(
|
||||
t.close_profit_abs for t in LocalTrade.get_trades_proxy(is_open=False))
|
||||
total_profit = sum(t.close_profit_abs # type: ignore
|
||||
for t in LocalTrade.get_trades_proxy(is_open=False))
|
||||
return total_profit or 0
|
||||
|
||||
@staticmethod
|
||||
@@ -1345,8 +1420,9 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
in stake currency
|
||||
"""
|
||||
if Trade.use_db:
|
||||
total_open_stake_amount = Trade.query.with_entities(
|
||||
func.sum(Trade.stake_amount)).filter(Trade.is_open.is_(True)).scalar()
|
||||
total_open_stake_amount = Trade.session.scalar(
|
||||
select(func.sum(Trade.stake_amount)).filter(Trade.is_open.is_(True))
|
||||
)
|
||||
else:
|
||||
total_open_stake_amount = sum(
|
||||
t.stake_amount for t in LocalTrade.get_trades_proxy(is_open=True))
|
||||
@@ -1358,19 +1434,22 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
Returns List of dicts containing all Trades, including profit and trade count
|
||||
NOTE: Not supported in Backtesting.
|
||||
"""
|
||||
filters = [Trade.is_open.is_(False)]
|
||||
filters: List = [Trade.is_open.is_(False)]
|
||||
if minutes:
|
||||
start_date = datetime.now(timezone.utc) - timedelta(minutes=minutes)
|
||||
filters.append(Trade.close_date >= start_date)
|
||||
pair_rates = Trade.query.with_entities(
|
||||
Trade.pair,
|
||||
func.sum(Trade.close_profit).label('profit_sum'),
|
||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||
func.count(Trade.pair).label('count')
|
||||
).filter(*filters)\
|
||||
.group_by(Trade.pair) \
|
||||
.order_by(desc('profit_sum_abs')) \
|
||||
.all()
|
||||
|
||||
pair_rates = Trade.session.execute(
|
||||
select(
|
||||
Trade.pair,
|
||||
func.sum(Trade.close_profit).label('profit_sum'),
|
||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||
func.count(Trade.pair).label('count')
|
||||
).filter(*filters)
|
||||
.group_by(Trade.pair)
|
||||
.order_by(desc('profit_sum_abs'))
|
||||
).all()
|
||||
|
||||
return [
|
||||
{
|
||||
'pair': pair,
|
||||
@@ -1391,19 +1470,20 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
NOTE: Not supported in Backtesting.
|
||||
"""
|
||||
|
||||
filters = [Trade.is_open.is_(False)]
|
||||
filters: List = [Trade.is_open.is_(False)]
|
||||
if (pair is not None):
|
||||
filters.append(Trade.pair == pair)
|
||||
|
||||
enter_tag_perf = Trade.query.with_entities(
|
||||
Trade.enter_tag,
|
||||
func.sum(Trade.close_profit).label('profit_sum'),
|
||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||
func.count(Trade.pair).label('count')
|
||||
).filter(*filters)\
|
||||
.group_by(Trade.enter_tag) \
|
||||
.order_by(desc('profit_sum_abs')) \
|
||||
.all()
|
||||
enter_tag_perf = Trade.session.execute(
|
||||
select(
|
||||
Trade.enter_tag,
|
||||
func.sum(Trade.close_profit).label('profit_sum'),
|
||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||
func.count(Trade.pair).label('count')
|
||||
).filter(*filters)
|
||||
.group_by(Trade.enter_tag)
|
||||
.order_by(desc('profit_sum_abs'))
|
||||
).all()
|
||||
|
||||
return [
|
||||
{
|
||||
@@ -1424,19 +1504,19 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
NOTE: Not supported in Backtesting.
|
||||
"""
|
||||
|
||||
filters = [Trade.is_open.is_(False)]
|
||||
filters: List = [Trade.is_open.is_(False)]
|
||||
if (pair is not None):
|
||||
filters.append(Trade.pair == pair)
|
||||
|
||||
sell_tag_perf = Trade.query.with_entities(
|
||||
Trade.exit_reason,
|
||||
func.sum(Trade.close_profit).label('profit_sum'),
|
||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||
func.count(Trade.pair).label('count')
|
||||
).filter(*filters)\
|
||||
.group_by(Trade.exit_reason) \
|
||||
.order_by(desc('profit_sum_abs')) \
|
||||
.all()
|
||||
sell_tag_perf = Trade.session.execute(
|
||||
select(
|
||||
Trade.exit_reason,
|
||||
func.sum(Trade.close_profit).label('profit_sum'),
|
||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||
func.count(Trade.pair).label('count')
|
||||
).filter(*filters)
|
||||
.group_by(Trade.exit_reason)
|
||||
.order_by(desc('profit_sum_abs'))
|
||||
).all()
|
||||
|
||||
return [
|
||||
{
|
||||
@@ -1457,21 +1537,21 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
NOTE: Not supported in Backtesting.
|
||||
"""
|
||||
|
||||
filters = [Trade.is_open.is_(False)]
|
||||
filters: List = [Trade.is_open.is_(False)]
|
||||
if (pair is not None):
|
||||
filters.append(Trade.pair == pair)
|
||||
|
||||
mix_tag_perf = Trade.query.with_entities(
|
||||
Trade.id,
|
||||
Trade.enter_tag,
|
||||
Trade.exit_reason,
|
||||
func.sum(Trade.close_profit).label('profit_sum'),
|
||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||
func.count(Trade.pair).label('count')
|
||||
).filter(*filters)\
|
||||
.group_by(Trade.id) \
|
||||
.order_by(desc('profit_sum_abs')) \
|
||||
.all()
|
||||
mix_tag_perf = Trade.session.execute(
|
||||
select(
|
||||
Trade.id,
|
||||
Trade.enter_tag,
|
||||
Trade.exit_reason,
|
||||
func.sum(Trade.close_profit).label('profit_sum'),
|
||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||
func.count(Trade.pair).label('count')
|
||||
).filter(*filters)
|
||||
.group_by(Trade.id)
|
||||
.order_by(desc('profit_sum_abs'))
|
||||
).all()
|
||||
|
||||
return_list: List[Dict] = []
|
||||
for id, enter_tag, exit_reason, profit, profit_abs, count in mix_tag_perf:
|
||||
@@ -1507,11 +1587,15 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
NOTE: Not supported in Backtesting.
|
||||
:returns: Tuple containing (pair, profit_sum)
|
||||
"""
|
||||
best_pair = Trade.query.with_entities(
|
||||
Trade.pair, func.sum(Trade.close_profit).label('profit_sum')
|
||||
).filter(Trade.is_open.is_(False) & (Trade.close_date >= start_date)) \
|
||||
.group_by(Trade.pair) \
|
||||
.order_by(desc('profit_sum')).first()
|
||||
best_pair = Trade.session.execute(
|
||||
select(
|
||||
Trade.pair,
|
||||
func.sum(Trade.close_profit).label('profit_sum')
|
||||
).filter(Trade.is_open.is_(False) & (Trade.close_date >= start_date))
|
||||
.group_by(Trade.pair)
|
||||
.order_by(desc('profit_sum'))
|
||||
).first()
|
||||
|
||||
return best_pair
|
||||
|
||||
@staticmethod
|
||||
@@ -1521,12 +1605,13 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
NOTE: Not supported in Backtesting.
|
||||
:returns: Tuple containing (pair, profit_sum)
|
||||
"""
|
||||
trading_volume = Order.query.with_entities(
|
||||
func.sum(Order.cost).label('volume')
|
||||
).filter(
|
||||
Order.order_filled_date >= start_date,
|
||||
Order.status == 'closed'
|
||||
).scalar()
|
||||
trading_volume = Trade.session.execute(
|
||||
select(
|
||||
func.sum(Order.cost).label('volume')
|
||||
).filter(
|
||||
Order.order_filled_date >= start_date,
|
||||
Order.status == 'closed'
|
||||
)).scalar_one()
|
||||
return trading_volume
|
||||
|
||||
@staticmethod
|
||||
|
@@ -436,11 +436,11 @@ def create_scatter(
|
||||
return None
|
||||
|
||||
|
||||
def generate_candlestick_graph(pair: str, data: pd.DataFrame, trades: pd.DataFrame = None, *,
|
||||
indicators1: List[str] = [],
|
||||
indicators2: List[str] = [],
|
||||
plot_config: Dict[str, Dict] = {},
|
||||
) -> go.Figure:
|
||||
def generate_candlestick_graph(
|
||||
pair: str, data: pd.DataFrame, trades: Optional[pd.DataFrame] = None, *,
|
||||
indicators1: List[str] = [], indicators2: List[str] = [],
|
||||
plot_config: Dict[str, Dict] = {},
|
||||
) -> go.Figure:
|
||||
"""
|
||||
Generate the graph from the data generated by Backtesting or from DB
|
||||
Volume will always be ploted in row2, so Row 1 and 3 are to our disposal for custom indicators
|
||||
|
206
freqtrade/plugins/pairlist/RemotePairList.py
Normal file
206
freqtrade/plugins/pairlist/RemotePairList.py
Normal file
@@ -0,0 +1,206 @@
|
||||
"""
|
||||
Remote PairList provider
|
||||
|
||||
Provides pair list fetched from a remote source
|
||||
"""
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
import requests
|
||||
from cachetools import TTLCache
|
||||
|
||||
from freqtrade import __version__
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange.types import Tickers
|
||||
from freqtrade.plugins.pairlist.IPairList import IPairList
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RemotePairList(IPairList):
|
||||
|
||||
def __init__(self, exchange, pairlistmanager,
|
||||
config: Config, pairlistconfig: Dict[str, Any],
|
||||
pairlist_pos: int) -> None:
|
||||
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)
|
||||
|
||||
if 'number_assets' not in self._pairlistconfig:
|
||||
raise OperationalException(
|
||||
'`number_assets` not specified. Please check your configuration '
|
||||
'for "pairlist.config.number_assets"')
|
||||
|
||||
if 'pairlist_url' not in self._pairlistconfig:
|
||||
raise OperationalException(
|
||||
'`pairlist_url` not specified. Please check your configuration '
|
||||
'for "pairlist.config.pairlist_url"')
|
||||
|
||||
self._number_pairs = self._pairlistconfig['number_assets']
|
||||
self._refresh_period: int = self._pairlistconfig.get('refresh_period', 1800)
|
||||
self._keep_pairlist_on_failure = self._pairlistconfig.get('keep_pairlist_on_failure', True)
|
||||
self._pair_cache: TTLCache = TTLCache(maxsize=1, ttl=self._refresh_period)
|
||||
self._pairlist_url = self._pairlistconfig.get('pairlist_url', '')
|
||||
self._read_timeout = self._pairlistconfig.get('read_timeout', 60)
|
||||
self._bearer_token = self._pairlistconfig.get('bearer_token', '')
|
||||
self._init_done = False
|
||||
self._last_pairlist: List[Any] = list()
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
"""
|
||||
Boolean property defining if tickers are necessary.
|
||||
If no Pairlist requires tickers, an empty Dict is passed
|
||||
as tickers argument to filter_pairlist
|
||||
"""
|
||||
return False
|
||||
|
||||
def short_desc(self) -> str:
|
||||
"""
|
||||
Short whitelist method description - used for startup-messages
|
||||
"""
|
||||
return f"{self.name} - {self._pairlistconfig['number_assets']} pairs from RemotePairlist."
|
||||
|
||||
def process_json(self, jsonparse) -> List[str]:
|
||||
|
||||
pairlist = jsonparse.get('pairs', [])
|
||||
remote_refresh_period = int(jsonparse.get('refresh_period', self._refresh_period))
|
||||
|
||||
if self._refresh_period < remote_refresh_period:
|
||||
self.log_once(f'Refresh Period has been increased from {self._refresh_period}'
|
||||
f' to minimum allowed: {remote_refresh_period} from Remote.', logger.info)
|
||||
|
||||
self._refresh_period = remote_refresh_period
|
||||
self._pair_cache = TTLCache(maxsize=1, ttl=remote_refresh_period)
|
||||
|
||||
self._init_done = True
|
||||
|
||||
return pairlist
|
||||
|
||||
def return_last_pairlist(self) -> List[str]:
|
||||
if self._keep_pairlist_on_failure:
|
||||
pairlist = self._last_pairlist
|
||||
self.log_once('Keeping last fetched pairlist', logger.info)
|
||||
else:
|
||||
pairlist = []
|
||||
|
||||
return pairlist
|
||||
|
||||
def fetch_pairlist(self) -> Tuple[List[str], float]:
|
||||
|
||||
headers = {
|
||||
'User-Agent': 'Freqtrade/' + __version__ + ' Remotepairlist'
|
||||
}
|
||||
|
||||
if self._bearer_token:
|
||||
headers['Authorization'] = f'Bearer {self._bearer_token}'
|
||||
|
||||
try:
|
||||
response = requests.get(self._pairlist_url, headers=headers,
|
||||
timeout=self._read_timeout)
|
||||
content_type = response.headers.get('content-type')
|
||||
time_elapsed = response.elapsed.total_seconds()
|
||||
|
||||
if "application/json" in str(content_type):
|
||||
jsonparse = response.json()
|
||||
|
||||
try:
|
||||
pairlist = self.process_json(jsonparse)
|
||||
except Exception as e:
|
||||
|
||||
if self._init_done:
|
||||
pairlist = self.return_last_pairlist()
|
||||
logger.warning(f'Error while processing JSON data: {type(e)}')
|
||||
else:
|
||||
raise OperationalException(f'Error while processing JSON data: {type(e)}')
|
||||
|
||||
else:
|
||||
if self._init_done:
|
||||
self.log_once(f'Error: RemotePairList is not of type JSON: '
|
||||
f' {self._pairlist_url}', logger.info)
|
||||
pairlist = self.return_last_pairlist()
|
||||
else:
|
||||
raise OperationalException('RemotePairList is not of type JSON, abort.')
|
||||
|
||||
except requests.exceptions.RequestException:
|
||||
self.log_once(f'Was not able to fetch pairlist from:'
|
||||
f' {self._pairlist_url}', logger.info)
|
||||
|
||||
pairlist = self.return_last_pairlist()
|
||||
|
||||
time_elapsed = 0
|
||||
|
||||
return pairlist, time_elapsed
|
||||
|
||||
def gen_pairlist(self, tickers: Tickers) -> List[str]:
|
||||
"""
|
||||
Generate the pairlist
|
||||
:param tickers: Tickers (from exchange.get_tickers). May be cached.
|
||||
:return: List of pairs
|
||||
"""
|
||||
|
||||
if self._init_done:
|
||||
pairlist = self._pair_cache.get('pairlist')
|
||||
else:
|
||||
pairlist = []
|
||||
|
||||
time_elapsed = 0.0
|
||||
|
||||
if pairlist:
|
||||
# Item found - no refresh necessary
|
||||
return pairlist.copy()
|
||||
else:
|
||||
if self._pairlist_url.startswith("file:///"):
|
||||
filename = self._pairlist_url.split("file:///", 1)[1]
|
||||
file_path = Path(filename)
|
||||
|
||||
if file_path.exists():
|
||||
with file_path.open() as json_file:
|
||||
# Load the JSON data into a dictionary
|
||||
jsonparse = json.load(json_file)
|
||||
|
||||
try:
|
||||
pairlist = self.process_json(jsonparse)
|
||||
except Exception as e:
|
||||
if self._init_done:
|
||||
pairlist = self.return_last_pairlist()
|
||||
logger.warning(f'Error while processing JSON data: {type(e)}')
|
||||
else:
|
||||
raise OperationalException('Error while processing'
|
||||
f'JSON data: {type(e)}')
|
||||
else:
|
||||
raise ValueError(f"{self._pairlist_url} does not exist.")
|
||||
else:
|
||||
# Fetch Pairlist from Remote URL
|
||||
pairlist, time_elapsed = self.fetch_pairlist()
|
||||
|
||||
self.log_once(f"Fetched pairs: {pairlist}", logger.debug)
|
||||
|
||||
pairlist = self._whitelist_for_active_markets(pairlist)
|
||||
pairlist = pairlist[:self._number_pairs]
|
||||
|
||||
self._pair_cache['pairlist'] = pairlist.copy()
|
||||
|
||||
if time_elapsed != 0.0:
|
||||
self.log_once(f'Pairlist Fetched in {time_elapsed} seconds.', logger.info)
|
||||
else:
|
||||
self.log_once('Fetched Pairlist.', logger.info)
|
||||
|
||||
self._last_pairlist = list(pairlist)
|
||||
|
||||
return pairlist
|
||||
|
||||
def filter_pairlist(self, pairlist: List[str], tickers: Dict) -> List[str]:
|
||||
"""
|
||||
Filters and sorts pairlist and returns the whitelist again.
|
||||
Called on each bot iteration - please use internal caching if necessary
|
||||
:param pairlist: pairlist to filter or sort
|
||||
:param tickers: Tickers (from exchange.get_tickers). May be cached.
|
||||
:return: new whitelist
|
||||
"""
|
||||
rpl_pairlist = self.gen_pairlist(tickers)
|
||||
merged_list = pairlist + rpl_pairlist
|
||||
merged_list = sorted(set(merged_list), key=merged_list.index)
|
||||
return merged_list
|
@@ -5,6 +5,7 @@ import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange.types import Ticker
|
||||
from freqtrade.plugins.pairlist.IPairList import IPairList
|
||||
|
||||
@@ -22,6 +23,12 @@ class SpreadFilter(IPairList):
|
||||
self._max_spread_ratio = pairlistconfig.get('max_spread_ratio', 0.005)
|
||||
self._enabled = self._max_spread_ratio != 0
|
||||
|
||||
if not self._exchange.get_option('tickers_have_bid_ask'):
|
||||
raise OperationalException(
|
||||
f"{self.name} requires exchange to have bid/ask data for tickers, "
|
||||
"which is not available for the selected exchange / trading mode."
|
||||
)
|
||||
|
||||
@property
|
||||
def needstickers(self) -> bool:
|
||||
"""
|
||||
|
@@ -135,7 +135,7 @@ class VolumePairList(IPairList):
|
||||
filtered_tickers = [
|
||||
v for k, v in tickers.items()
|
||||
if (self._exchange.get_pair_quote_currency(k) == self._stake_currency
|
||||
and (self._use_range or v[self._sort_key] is not None)
|
||||
and (self._use_range or v.get(self._sort_key) is not None)
|
||||
and v['symbol'] in _pairlist)]
|
||||
pairlist = [s['symbol'] for s in filtered_tickers]
|
||||
else:
|
||||
@@ -218,7 +218,7 @@ class VolumePairList(IPairList):
|
||||
else:
|
||||
filtered_tickers[i]['quoteVolume'] = 0
|
||||
else:
|
||||
# Tickers mode - filter based on incomming pairlist.
|
||||
# Tickers mode - filter based on incoming pairlist.
|
||||
filtered_tickers = [v for k, v in tickers.items() if k in pairlist]
|
||||
|
||||
if self._min_value > 0:
|
||||
|
@@ -23,7 +23,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class PairListManager(LoggingMixin):
|
||||
|
||||
def __init__(self, exchange, config: Config, dataprovider: DataProvider = None) -> None:
|
||||
def __init__(
|
||||
self, exchange, config: Config, dataprovider: Optional[DataProvider] = None) -> None:
|
||||
self._exchange = exchange
|
||||
self._config = config
|
||||
self._whitelist = self._config['exchange'].get('pair_whitelist')
|
||||
@@ -153,7 +154,8 @@ class PairListManager(LoggingMixin):
|
||||
return []
|
||||
return whitelist
|
||||
|
||||
def create_pair_list(self, pairs: List[str], timeframe: str = None) -> ListPairsWithTimeframes:
|
||||
def create_pair_list(
|
||||
self, pairs: List[str], timeframe: Optional[str] = None) -> ListPairsWithTimeframes:
|
||||
"""
|
||||
Create list of pair tuples with (pair, timeframe)
|
||||
"""
|
||||
|
@@ -89,7 +89,8 @@ class IResolver:
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
try:
|
||||
spec.loader.exec_module(module) # type: ignore # importlib does not use typehints
|
||||
except (ModuleNotFoundError, SyntaxError, ImportError, NameError) as err:
|
||||
except (AttributeError, ModuleNotFoundError, SyntaxError,
|
||||
ImportError, NameError) as err:
|
||||
# Catch errors in case a specific module is not installed
|
||||
logger.warning(f"Could not import {module_path} due to '{err}'")
|
||||
if enum_failed:
|
||||
|
@@ -33,7 +33,7 @@ class StrategyResolver(IResolver):
|
||||
extra_path = "strategy_path"
|
||||
|
||||
@staticmethod
|
||||
def load_strategy(config: Config = None) -> IStrategy:
|
||||
def load_strategy(config: Optional[Config] = None) -> IStrategy:
|
||||
"""
|
||||
Load the custom class from config parameter
|
||||
:param config: configuration dictionary or None
|
||||
@@ -76,6 +76,7 @@ class StrategyResolver(IResolver):
|
||||
("ignore_buying_expired_candle_after", 0),
|
||||
("position_adjustment_enable", False),
|
||||
("max_entry_position_adjustment", -1),
|
||||
("max_open_trades", -1)
|
||||
]
|
||||
for attribute, default in attributes:
|
||||
StrategyResolver._override_attribute_helper(strategy, config,
|
||||
@@ -110,7 +111,11 @@ class StrategyResolver(IResolver):
|
||||
val = getattr(strategy, attribute)
|
||||
# None's cannot exist in the config, so do not copy them
|
||||
if val is not None:
|
||||
config[attribute] = val
|
||||
# max_open_trades set to -1 in the strategy will be copied as infinity in the config
|
||||
if attribute == 'max_open_trades' and val == -1:
|
||||
config[attribute] = float('inf')
|
||||
else:
|
||||
config[attribute] = val
|
||||
# Explicitly check for None here as other "falsy" values are possible
|
||||
elif default is not None:
|
||||
setattr(strategy, attribute, default)
|
||||
@@ -128,6 +133,8 @@ class StrategyResolver(IResolver):
|
||||
key=lambda t: t[0]))
|
||||
if hasattr(strategy, 'stoploss'):
|
||||
strategy.stoploss = float(strategy.stoploss)
|
||||
if hasattr(strategy, 'max_open_trades') and strategy.max_open_trades < 0:
|
||||
strategy.max_open_trades = float('inf')
|
||||
return strategy
|
||||
|
||||
@staticmethod
|
||||
|
@@ -1,3 +1,2 @@
|
||||
# flake8: noqa: F401
|
||||
from .rpc import RPC, RPCException, RPCHandler
|
||||
from .rpc_manager import RPCManager
|
||||
from .rpc import RPC, RPCException, RPCHandler # noqa: F401
|
||||
from .rpc_manager import RPCManager # noqa: F401
|
||||
|
@@ -1,2 +1 @@
|
||||
# flake8: noqa: F401
|
||||
from .webserver import ApiServer
|
||||
from .webserver import ApiServer # noqa: F401
|
||||
|
@@ -10,7 +10,8 @@ from fastapi.exceptions import HTTPException
|
||||
from freqtrade.configuration.config_validation import validate_config_consistency
|
||||
from freqtrade.data.btanalysis import get_backtest_resultlist, load_and_merge_backtest_result
|
||||
from freqtrade.enums import BacktestState
|
||||
from freqtrade.exceptions import DependencyException
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
from freqtrade.misc import deep_merge_dicts
|
||||
from freqtrade.rpc.api_server.api_schemas import (BacktestHistoryEntry, BacktestRequest,
|
||||
BacktestResponse)
|
||||
from freqtrade.rpc.api_server.deps import get_config, is_webserver_mode
|
||||
@@ -25,9 +26,10 @@ router = APIRouter()
|
||||
|
||||
|
||||
@router.post('/backtest', response_model=BacktestResponse, tags=['webserver', 'backtest'])
|
||||
# flake8: noqa: C901
|
||||
async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: BackgroundTasks,
|
||||
config=Depends(get_config), ws_mode=Depends(is_webserver_mode)):
|
||||
async def api_start_backtest( # noqa: C901
|
||||
bt_settings: BacktestRequest, background_tasks: BackgroundTasks,
|
||||
config=Depends(get_config), ws_mode=Depends(is_webserver_mode)):
|
||||
ApiServer._bt['bt_error'] = None
|
||||
"""Start backtesting if not done so already"""
|
||||
if ApiServer._bgtask_running:
|
||||
raise RPCException('Bot Background task already running')
|
||||
@@ -37,10 +39,11 @@ async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: Bac
|
||||
|
||||
btconfig = deepcopy(config)
|
||||
settings = dict(bt_settings)
|
||||
if settings.get('freqai', None) is not None:
|
||||
settings['freqai'] = dict(settings['freqai'])
|
||||
# Pydantic models will contain all keys, but non-provided ones are None
|
||||
for setting in settings.keys():
|
||||
if settings[setting] is not None:
|
||||
btconfig[setting] = settings[setting]
|
||||
|
||||
btconfig = deep_merge_dicts(settings, btconfig, allow_null_overrides=False)
|
||||
try:
|
||||
btconfig['stake_amount'] = float(btconfig['stake_amount'])
|
||||
except ValueError:
|
||||
@@ -58,30 +61,31 @@ async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: Bac
|
||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||
try:
|
||||
# Reload strategy
|
||||
lastconfig = ApiServer._bt_last_config
|
||||
lastconfig = ApiServer._bt['last_config']
|
||||
strat = StrategyResolver.load_strategy(btconfig)
|
||||
validate_config_consistency(btconfig)
|
||||
|
||||
if (
|
||||
not ApiServer._bt
|
||||
not ApiServer._bt['bt']
|
||||
or lastconfig.get('timeframe') != strat.timeframe
|
||||
or lastconfig.get('timeframe_detail') != btconfig.get('timeframe_detail')
|
||||
or lastconfig.get('timerange') != btconfig['timerange']
|
||||
):
|
||||
from freqtrade.optimize.backtesting import Backtesting
|
||||
ApiServer._bt = Backtesting(btconfig)
|
||||
ApiServer._bt.load_bt_data_detail()
|
||||
ApiServer._bt['bt'] = Backtesting(btconfig)
|
||||
ApiServer._bt['bt'].load_bt_data_detail()
|
||||
else:
|
||||
ApiServer._bt.config = btconfig
|
||||
ApiServer._bt.init_backtest()
|
||||
ApiServer._bt['bt'].config = btconfig
|
||||
ApiServer._bt['bt'].init_backtest()
|
||||
# Only reload data if timeframe changed.
|
||||
if (
|
||||
not ApiServer._bt_data
|
||||
or not ApiServer._bt_timerange
|
||||
not ApiServer._bt['data']
|
||||
or not ApiServer._bt['timerange']
|
||||
or lastconfig.get('timeframe') != strat.timeframe
|
||||
or lastconfig.get('timerange') != btconfig['timerange']
|
||||
):
|
||||
ApiServer._bt_data, ApiServer._bt_timerange = ApiServer._bt.load_bt_data()
|
||||
ApiServer._bt['data'], ApiServer._bt['timerange'] = ApiServer._bt[
|
||||
'bt'].load_bt_data()
|
||||
|
||||
lastconfig['timerange'] = btconfig['timerange']
|
||||
lastconfig['timeframe'] = strat.timeframe
|
||||
@@ -89,34 +93,35 @@ async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: Bac
|
||||
lastconfig['enable_protections'] = btconfig.get('enable_protections')
|
||||
lastconfig['dry_run_wallet'] = btconfig.get('dry_run_wallet')
|
||||
|
||||
ApiServer._bt.enable_protections = btconfig.get('enable_protections', False)
|
||||
ApiServer._bt.strategylist = [strat]
|
||||
ApiServer._bt.results = {}
|
||||
ApiServer._bt.load_prior_backtest()
|
||||
ApiServer._bt['bt'].enable_protections = btconfig.get('enable_protections', False)
|
||||
ApiServer._bt['bt'].strategylist = [strat]
|
||||
ApiServer._bt['bt'].results = {}
|
||||
ApiServer._bt['bt'].load_prior_backtest()
|
||||
|
||||
ApiServer._bt.abort = False
|
||||
if (ApiServer._bt.results and
|
||||
strat.get_strategy_name() in ApiServer._bt.results['strategy']):
|
||||
ApiServer._bt['bt'].abort = False
|
||||
if (ApiServer._bt['bt'].results and
|
||||
strat.get_strategy_name() in ApiServer._bt['bt'].results['strategy']):
|
||||
# When previous result hash matches - reuse that result and skip backtesting.
|
||||
logger.info(f'Reusing result of previous backtest for {strat.get_strategy_name()}')
|
||||
else:
|
||||
min_date, max_date = ApiServer._bt.backtest_one_strategy(
|
||||
strat, ApiServer._bt_data, ApiServer._bt_timerange)
|
||||
min_date, max_date = ApiServer._bt['bt'].backtest_one_strategy(
|
||||
strat, ApiServer._bt['data'], ApiServer._bt['timerange'])
|
||||
|
||||
ApiServer._bt.results = generate_backtest_stats(
|
||||
ApiServer._bt_data, ApiServer._bt.all_results,
|
||||
ApiServer._bt['bt'].results = generate_backtest_stats(
|
||||
ApiServer._bt['data'], ApiServer._bt['bt'].all_results,
|
||||
min_date=min_date, max_date=max_date)
|
||||
|
||||
if btconfig.get('export', 'none') == 'trades':
|
||||
store_backtest_stats(
|
||||
btconfig['exportfilename'], ApiServer._bt.results,
|
||||
btconfig['exportfilename'], ApiServer._bt['bt'].results,
|
||||
datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
)
|
||||
|
||||
logger.info("Backtest finished.")
|
||||
|
||||
except DependencyException as e:
|
||||
logger.info(f"Backtesting caused an error: {e}")
|
||||
except (Exception, OperationalException, DependencyException) as e:
|
||||
logger.exception(f"Backtesting caused an error: {e}")
|
||||
ApiServer._bt['bt_error'] = str(e)
|
||||
pass
|
||||
finally:
|
||||
ApiServer._bgtask_running = False
|
||||
@@ -144,13 +149,14 @@ def api_get_backtest(ws_mode=Depends(is_webserver_mode)):
|
||||
return {
|
||||
"status": "running",
|
||||
"running": True,
|
||||
"step": ApiServer._bt.progress.action if ApiServer._bt else str(BacktestState.STARTUP),
|
||||
"progress": ApiServer._bt.progress.progress if ApiServer._bt else 0,
|
||||
"step": (ApiServer._bt['bt'].progress.action if ApiServer._bt['bt']
|
||||
else str(BacktestState.STARTUP)),
|
||||
"progress": ApiServer._bt['bt'].progress.progress if ApiServer._bt['bt'] else 0,
|
||||
"trade_count": len(LocalTrade.trades),
|
||||
"status_msg": "Backtest running",
|
||||
}
|
||||
|
||||
if not ApiServer._bt:
|
||||
if not ApiServer._bt['bt']:
|
||||
return {
|
||||
"status": "not_started",
|
||||
"running": False,
|
||||
@@ -158,6 +164,14 @@ def api_get_backtest(ws_mode=Depends(is_webserver_mode)):
|
||||
"progress": 0,
|
||||
"status_msg": "Backtest not yet executed"
|
||||
}
|
||||
if ApiServer._bt['bt_error']:
|
||||
return {
|
||||
"status": "error",
|
||||
"running": False,
|
||||
"step": "",
|
||||
"progress": 0,
|
||||
"status_msg": f"Backtest failed with {ApiServer._bt['bt_error']}"
|
||||
}
|
||||
|
||||
return {
|
||||
"status": "ended",
|
||||
@@ -165,7 +179,7 @@ def api_get_backtest(ws_mode=Depends(is_webserver_mode)):
|
||||
"status_msg": "Backtest ended",
|
||||
"step": "finished",
|
||||
"progress": 1,
|
||||
"backtest_result": ApiServer._bt.results,
|
||||
"backtest_result": ApiServer._bt['bt'].results,
|
||||
}
|
||||
|
||||
|
||||
@@ -180,12 +194,12 @@ def api_delete_backtest(ws_mode=Depends(is_webserver_mode)):
|
||||
"progress": 0,
|
||||
"status_msg": "Backtest running",
|
||||
}
|
||||
if ApiServer._bt:
|
||||
ApiServer._bt.cleanup()
|
||||
del ApiServer._bt
|
||||
ApiServer._bt = None
|
||||
del ApiServer._bt_data
|
||||
ApiServer._bt_data = None
|
||||
if ApiServer._bt['bt']:
|
||||
ApiServer._bt['bt'].cleanup()
|
||||
del ApiServer._bt['bt']
|
||||
ApiServer._bt['bt'] = None
|
||||
del ApiServer._bt['data']
|
||||
ApiServer._bt['data'] = None
|
||||
logger.info("Backtesting reset")
|
||||
return {
|
||||
"status": "reset",
|
||||
@@ -206,7 +220,7 @@ def api_backtest_abort(ws_mode=Depends(is_webserver_mode)):
|
||||
"progress": 0,
|
||||
"status_msg": "Backtest ended",
|
||||
}
|
||||
ApiServer._bt.abort = True
|
||||
ApiServer._bt['bt'].abort = True
|
||||
return {
|
||||
"status": "stopping",
|
||||
"running": False,
|
||||
@@ -216,14 +230,17 @@ def api_backtest_abort(ws_mode=Depends(is_webserver_mode)):
|
||||
}
|
||||
|
||||
|
||||
@router.get('/backtest/history', response_model=List[BacktestHistoryEntry], tags=['webserver', 'backtest'])
|
||||
@router.get('/backtest/history', response_model=List[BacktestHistoryEntry],
|
||||
tags=['webserver', 'backtest'])
|
||||
def api_backtest_history(config=Depends(get_config), ws_mode=Depends(is_webserver_mode)):
|
||||
# Get backtest result history, read from metadata files
|
||||
return get_backtest_resultlist(config['user_data_dir'] / 'backtest_results')
|
||||
|
||||
|
||||
@router.get('/backtest/history/result', response_model=BacktestResponse, tags=['webserver', 'backtest'])
|
||||
def api_backtest_history_result(filename: str, strategy: str, config=Depends(get_config), ws_mode=Depends(is_webserver_mode)):
|
||||
@router.get('/backtest/history/result', response_model=BacktestResponse,
|
||||
tags=['webserver', 'backtest'])
|
||||
def api_backtest_history_result(filename: str, strategy: str, config=Depends(get_config),
|
||||
ws_mode=Depends(is_webserver_mode)):
|
||||
# Get backtest result history, read from metadata files
|
||||
fn = config['user_data_dir'] / 'backtest_results' / filename
|
||||
results: Dict[str, Any] = {
|
||||
|
@@ -3,7 +3,7 @@ from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, IntOrInf
|
||||
from freqtrade.enums import OrderTypeValues, SignalDirection, TradingMode
|
||||
|
||||
|
||||
@@ -165,9 +165,10 @@ class ShowConfig(BaseModel):
|
||||
stake_amount: str
|
||||
available_capital: Optional[float]
|
||||
stake_currency_decimals: int
|
||||
max_open_trades: int
|
||||
max_open_trades: IntOrInf
|
||||
minimal_roi: Dict[str, Any]
|
||||
stoploss: Optional[float]
|
||||
stoploss_on_exchange: bool
|
||||
trailing_stop: Optional[bool]
|
||||
trailing_stop_positive: Optional[float]
|
||||
trailing_stop_positive_offset: Optional[float]
|
||||
@@ -217,8 +218,8 @@ class TradeSchema(BaseModel):
|
||||
amount: float
|
||||
amount_requested: float
|
||||
stake_amount: float
|
||||
max_stake_amount: Optional[float]
|
||||
strategy: str
|
||||
buy_tag: Optional[str] # Deprecated
|
||||
enter_tag: Optional[str]
|
||||
timeframe: int
|
||||
fee_open: Optional[float]
|
||||
@@ -227,25 +228,33 @@ class TradeSchema(BaseModel):
|
||||
fee_close: Optional[float]
|
||||
fee_close_cost: Optional[float]
|
||||
fee_close_currency: Optional[str]
|
||||
|
||||
open_date: str
|
||||
open_timestamp: int
|
||||
open_rate: float
|
||||
open_rate_requested: Optional[float]
|
||||
open_trade_value: float
|
||||
|
||||
close_date: Optional[str]
|
||||
close_timestamp: Optional[int]
|
||||
close_rate: Optional[float]
|
||||
close_rate_requested: Optional[float]
|
||||
|
||||
close_profit: Optional[float]
|
||||
close_profit_pct: Optional[float]
|
||||
close_profit_abs: Optional[float]
|
||||
|
||||
profit_ratio: Optional[float]
|
||||
profit_pct: Optional[float]
|
||||
profit_abs: Optional[float]
|
||||
profit_fiat: Optional[float]
|
||||
sell_reason: Optional[str] # Deprecated
|
||||
|
||||
realized_profit: float
|
||||
realized_profit_ratio: Optional[float]
|
||||
|
||||
exit_reason: Optional[str]
|
||||
exit_order_status: Optional[str]
|
||||
|
||||
stop_loss_abs: Optional[float]
|
||||
stop_loss_ratio: Optional[float]
|
||||
stop_loss_pct: Optional[float]
|
||||
@@ -255,6 +264,7 @@ class TradeSchema(BaseModel):
|
||||
initial_stop_loss_abs: Optional[float]
|
||||
initial_stop_loss_ratio: Optional[float]
|
||||
initial_stop_loss_pct: Optional[float]
|
||||
|
||||
min_rate: Optional[float]
|
||||
max_rate: Optional[float]
|
||||
open_order_id: Optional[str]
|
||||
@@ -273,10 +283,11 @@ class OpenTradeSchema(TradeSchema):
|
||||
stoploss_current_dist_ratio: Optional[float]
|
||||
stoploss_entry_dist: Optional[float]
|
||||
stoploss_entry_dist_ratio: Optional[float]
|
||||
current_profit: float
|
||||
current_profit_abs: float
|
||||
current_profit_pct: float
|
||||
current_rate: float
|
||||
total_profit_abs: float
|
||||
total_profit_fiat: Optional[float]
|
||||
total_profit_ratio: Optional[float]
|
||||
|
||||
open_order: Optional[str]
|
||||
|
||||
|
||||
@@ -300,7 +311,7 @@ class LockModel(BaseModel):
|
||||
lock_timestamp: int
|
||||
pair: str
|
||||
side: str
|
||||
reason: str
|
||||
reason: Optional[str]
|
||||
|
||||
|
||||
class Locks(BaseModel):
|
||||
@@ -372,6 +383,10 @@ class StrategyListResponse(BaseModel):
|
||||
strategies: List[str]
|
||||
|
||||
|
||||
class FreqAIModelListResponse(BaseModel):
|
||||
freqaimodels: List[str]
|
||||
|
||||
|
||||
class StrategyResponse(BaseModel):
|
||||
strategy: str
|
||||
code: str
|
||||
@@ -410,15 +425,22 @@ class PairHistory(BaseModel):
|
||||
}
|
||||
|
||||
|
||||
class BacktestFreqAIInputs(BaseModel):
|
||||
identifier: str
|
||||
|
||||
|
||||
class BacktestRequest(BaseModel):
|
||||
strategy: str
|
||||
timeframe: Optional[str]
|
||||
timeframe_detail: Optional[str]
|
||||
timerange: Optional[str]
|
||||
max_open_trades: Optional[int]
|
||||
max_open_trades: Optional[IntOrInf]
|
||||
stake_amount: Optional[str]
|
||||
enable_protections: bool
|
||||
dry_run_wallet: Optional[float]
|
||||
backtest_cache: Optional[str]
|
||||
freqaimodel: Optional[str]
|
||||
freqai: Optional[BacktestFreqAIInputs]
|
||||
|
||||
|
||||
class BacktestResponse(BaseModel):
|
||||
@@ -445,5 +467,5 @@ class SysInfo(BaseModel):
|
||||
|
||||
|
||||
class Health(BaseModel):
|
||||
last_process: datetime
|
||||
last_process_ts: int
|
||||
last_process: Optional[datetime]
|
||||
last_process_ts: Optional[int]
|
||||
|
@@ -13,12 +13,13 @@ from freqtrade.rpc import RPC
|
||||
from freqtrade.rpc.api_server.api_schemas import (AvailablePairs, Balances, BlacklistPayload,
|
||||
BlacklistResponse, Count, Daily,
|
||||
DeleteLockRequest, DeleteTrade, ForceEnterPayload,
|
||||
ForceEnterResponse, ForceExitPayload, Health,
|
||||
Locks, Logs, OpenTradeSchema, PairHistory,
|
||||
PerformanceEntry, Ping, PlotConfig, Profit,
|
||||
ResultMsg, ShowConfig, Stats, StatusMsg,
|
||||
StrategyListResponse, StrategyResponse, SysInfo,
|
||||
Version, WhitelistResponse)
|
||||
ForceEnterResponse, ForceExitPayload,
|
||||
FreqAIModelListResponse, Health, Locks, Logs,
|
||||
OpenTradeSchema, PairHistory, PerformanceEntry,
|
||||
Ping, PlotConfig, Profit, ResultMsg, ShowConfig,
|
||||
Stats, StatusMsg, StrategyListResponse,
|
||||
StrategyResponse, SysInfo, Version,
|
||||
WhitelistResponse)
|
||||
from freqtrade.rpc.api_server.deps import get_config, get_exchange, get_rpc, get_rpc_optional
|
||||
from freqtrade.rpc.rpc import RPCException
|
||||
|
||||
@@ -38,7 +39,11 @@ logger = logging.getLogger(__name__)
|
||||
# 2.17: Forceentry - leverage, partial force_exit
|
||||
# 2.20: Add websocket endpoints
|
||||
# 2.21: Add new_candle messagetype
|
||||
API_VERSION = 2.21
|
||||
# 2.22: Add FreqAI to backtesting
|
||||
# 2.23: Allow plot config request in webserver mode
|
||||
# 2.24: Add cancel_open_order endpoint
|
||||
# 2.25: Add several profit values to /status endpoint
|
||||
API_VERSION = 2.25
|
||||
|
||||
# Public API, requires no auth.
|
||||
router_public = APIRouter()
|
||||
@@ -120,6 +125,12 @@ def trades_delete(tradeid: int, rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_delete(tradeid)
|
||||
|
||||
|
||||
@router.delete('/trades/{tradeid}/open-order', response_model=OpenTradeSchema, tags=['trading'])
|
||||
def cancel_open_order(tradeid: int, rpc: RPC = Depends(get_rpc)):
|
||||
rpc._rpc_cancel_open_order(tradeid)
|
||||
return rpc._rpc_trade_status([tradeid])[0]
|
||||
|
||||
|
||||
# TODO: Missing response model
|
||||
@router.get('/edge', tags=['info'])
|
||||
def edge(rpc: RPC = Depends(get_rpc)):
|
||||
@@ -246,8 +257,18 @@ def pair_history(pair: str, timeframe: str, timerange: str, strategy: str,
|
||||
|
||||
|
||||
@router.get('/plot_config', response_model=PlotConfig, tags=['candle data'])
|
||||
def plot_config(rpc: RPC = Depends(get_rpc)):
|
||||
return PlotConfig.parse_obj(rpc._rpc_plot_config())
|
||||
def plot_config(strategy: Optional[str] = None, config=Depends(get_config),
|
||||
rpc: Optional[RPC] = Depends(get_rpc_optional)):
|
||||
if not strategy:
|
||||
if not rpc:
|
||||
raise RPCException("Strategy is mandatory in webserver mode.")
|
||||
return PlotConfig.parse_obj(rpc._rpc_plot_config())
|
||||
else:
|
||||
config1 = deepcopy(config)
|
||||
config1.update({
|
||||
'strategy': strategy
|
||||
})
|
||||
return PlotConfig.parse_obj(RPC._rpc_plot_config_with_strategy(config1))
|
||||
|
||||
|
||||
@router.get('/strategies', response_model=StrategyListResponse, tags=['strategy'])
|
||||
@@ -279,6 +300,16 @@ def get_strategy(strategy: str, config=Depends(get_config)):
|
||||
}
|
||||
|
||||
|
||||
@router.get('/freqaimodels', response_model=FreqAIModelListResponse, tags=['freqai'])
|
||||
def list_freqaimodels(config=Depends(get_config)):
|
||||
from freqtrade.resolvers.freqaimodel_resolver import FreqaiModelResolver
|
||||
strategies = FreqaiModelResolver.search_all_objects(
|
||||
config, False)
|
||||
strategies = sorted(strategies, key=lambda x: x['name'])
|
||||
|
||||
return {'freqaimodels': [x['name'] for x in strategies]}
|
||||
|
||||
|
||||
@router.get('/available_pairs', response_model=AvailablePairs, tags=['candle data'])
|
||||
def list_available_pairs(timeframe: Optional[str] = None, stake_currency: Optional[str] = None,
|
||||
candletype: Optional[CandleType] = None, config=Depends(get_config)):
|
||||
@@ -316,4 +347,4 @@ def sysinfo():
|
||||
|
||||
@router.get('/health', response_model=Health, tags=['info'])
|
||||
def health(rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._health()
|
||||
return rpc.health()
|
||||
|
@@ -90,10 +90,11 @@ async def _process_consumer_request(
|
||||
|
||||
elif type == RPCRequestType.ANALYZED_DF:
|
||||
# Limit the amount of candles per dataframe to 'limit' or 1500
|
||||
limit = min(data.get('limit', 1500), 1500) if data else None
|
||||
limit = int(min(data.get('limit', 1500), 1500)) if data else None
|
||||
pair = data.get('pair', None) if data else None
|
||||
|
||||
# For every pair in the generator, send a separate message
|
||||
for message in rpc._ws_request_analyzed_df(limit):
|
||||
for message in rpc._ws_request_analyzed_df(limit, pair):
|
||||
# Format response
|
||||
response = WSAnalyzedDFMessage(data=message)
|
||||
await channel.send(response.dict(exclude_none=True))
|
||||
|
@@ -36,10 +36,13 @@ class ApiServer(RPCHandler):
|
||||
|
||||
_rpc: RPC
|
||||
# Backtesting type: Backtesting
|
||||
_bt = None
|
||||
_bt_data = None
|
||||
_bt_timerange = None
|
||||
_bt_last_config: Config = {}
|
||||
_bt: Dict[str, Any] = {
|
||||
'bt': None,
|
||||
'data': None,
|
||||
'timerange': None,
|
||||
'last_config': {},
|
||||
'bt_error': None,
|
||||
}
|
||||
_has_rpc: bool = False
|
||||
_bgtask_running: bool = False
|
||||
_config: Config = {}
|
||||
|
@@ -1,7 +1,6 @@
|
||||
# flake8: noqa: F401
|
||||
# isort: off
|
||||
from freqtrade.rpc.api_server.ws.types import WebSocketType
|
||||
from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy
|
||||
from freqtrade.rpc.api_server.ws.serializer import HybridJSONWebSocketSerializer
|
||||
from freqtrade.rpc.api_server.ws.channel import WebSocketChannel
|
||||
from freqtrade.rpc.api_server.ws.message_stream import MessageStream
|
||||
from freqtrade.rpc.api_server.ws.types import WebSocketType # noqa: F401
|
||||
from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy # noqa: F401
|
||||
from freqtrade.rpc.api_server.ws.serializer import HybridJSONWebSocketSerializer # noqa: F401
|
||||
from freqtrade.rpc.api_server.ws.channel import WebSocketChannel # noqa: F401
|
||||
from freqtrade.rpc.api_server.ws.message_stream import MessageStream # noqa: F401
|
||||
|
@@ -27,7 +27,8 @@ class WebSocketChannel:
|
||||
self,
|
||||
websocket: WebSocketType,
|
||||
channel_id: Optional[str] = None,
|
||||
serializer_cls: Type[WebSocketSerializer] = HybridJSONWebSocketSerializer
|
||||
serializer_cls: Type[WebSocketSerializer] = HybridJSONWebSocketSerializer,
|
||||
send_throttle: float = 0.01
|
||||
):
|
||||
self.channel_id = channel_id if channel_id else uuid4().hex[:8]
|
||||
self._websocket = WebSocketProxy(websocket)
|
||||
@@ -41,6 +42,7 @@ class WebSocketChannel:
|
||||
self._send_times: Deque[float] = deque([], maxlen=10)
|
||||
# High limit defaults to 3 to start
|
||||
self._send_high_limit = 3
|
||||
self._send_throttle = send_throttle
|
||||
|
||||
# The subscribed message types
|
||||
self._subscriptions: List[str] = []
|
||||
@@ -106,7 +108,8 @@ class WebSocketChannel:
|
||||
|
||||
# Explicitly give control back to event loop as
|
||||
# websockets.send does not
|
||||
await asyncio.sleep(0.01)
|
||||
# Also throttles how fast we send
|
||||
await asyncio.sleep(self._send_throttle)
|
||||
|
||||
async def recv(self):
|
||||
"""
|
||||
|
@@ -47,7 +47,7 @@ class WSWhitelistRequest(WSRequestSchema):
|
||||
|
||||
class WSAnalyzedDFRequest(WSRequestSchema):
|
||||
type: RPCRequestType = RPCRequestType.ANALYZED_DF
|
||||
data: Dict[str, Any] = {"limit": 1500}
|
||||
data: Dict[str, Any] = {"limit": 1500, "pair": None}
|
||||
|
||||
|
||||
# ------------------------------ MESSAGE SCHEMAS ----------------------------
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user