merge upstream
This commit is contained in:
@@ -12,7 +12,7 @@ from freqtrade.constants import DEFAULT_CONFIG
|
||||
|
||||
ARGS_COMMON = ["verbosity", "logfile", "version", "config", "datadir", "user_data_dir"]
|
||||
|
||||
ARGS_STRATEGY = ["strategy", "strategy_path"]
|
||||
ARGS_STRATEGY = ["strategy", "strategy_path", "recursive_strategy_search"]
|
||||
|
||||
ARGS_TRADE = ["db_url", "sd_notify", "dry_run", "dry_run_wallet", "fee"]
|
||||
|
||||
@@ -37,7 +37,8 @@ ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + ["hyperopt", "hyperopt_path",
|
||||
|
||||
ARGS_EDGE = ARGS_COMMON_OPTIMIZE + ["stoploss_range"]
|
||||
|
||||
ARGS_LIST_STRATEGIES = ["strategy_path", "print_one_column", "print_colorized"]
|
||||
ARGS_LIST_STRATEGIES = ["strategy_path", "print_one_column", "print_colorized",
|
||||
"recursive_strategy_search"]
|
||||
|
||||
ARGS_LIST_HYPEROPTS = ["hyperopt_path", "print_one_column", "print_colorized"]
|
||||
|
||||
@@ -71,7 +72,8 @@ ARGS_LIST_DATA = ["exchange", "dataformat_ohlcv", "pairs", "trading_mode"]
|
||||
|
||||
ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "new_pairs_days", "include_inactive",
|
||||
"timerange", "download_trades", "exchange", "timeframes",
|
||||
"erase", "dataformat_ohlcv", "dataformat_trades", "trading_mode"]
|
||||
"erase", "dataformat_ohlcv", "dataformat_trades", "trading_mode",
|
||||
"prepend_data"]
|
||||
|
||||
ARGS_PLOT_DATAFRAME = ["pairs", "indicators1", "indicators2", "plot_limit",
|
||||
"db_url", "trade_source", "export", "exportfilename",
|
||||
|
||||
@@ -83,6 +83,11 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
help='Reset sample files to their original state.',
|
||||
action='store_true',
|
||||
),
|
||||
"recursive_strategy_search": Arg(
|
||||
'--recursive-strategy-search',
|
||||
help='Recursively search for a strategy in the strategies folder.',
|
||||
action='store_true',
|
||||
),
|
||||
# Main options
|
||||
"strategy": Arg(
|
||||
'-s', '--strategy',
|
||||
@@ -438,6 +443,11 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
default=['1m', '5m'],
|
||||
nargs='+',
|
||||
),
|
||||
"prepend_data": Arg(
|
||||
'--prepend',
|
||||
help='Allow data prepending.',
|
||||
action='store_true',
|
||||
),
|
||||
"erase": Arg(
|
||||
'--erase',
|
||||
help='Clean all existing data for the selected exchange/pairs/timeframes.',
|
||||
|
||||
@@ -85,6 +85,7 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
new_pairs_days=config['new_pairs_days'],
|
||||
erase=bool(config.get('erase')), data_format=config['dataformat_ohlcv'],
|
||||
trading_mode=config.get('trading_mode', 'spot'),
|
||||
prepend=config.get('prepend_data', False)
|
||||
)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
|
||||
@@ -41,7 +41,7 @@ def start_list_exchanges(args: Dict[str, Any]) -> None:
|
||||
print(tabulate(exchanges, headers=['Exchange name', 'Valid', 'reason']))
|
||||
|
||||
|
||||
def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
||||
def _print_objs_tabular(objs: List, print_colorized: bool, base_dir: Path) -> None:
|
||||
if print_colorized:
|
||||
colorama_init(autoreset=True)
|
||||
red = Fore.RED
|
||||
@@ -55,7 +55,7 @@ def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
||||
names = [s['name'] for s in objs]
|
||||
objs_to_print = [{
|
||||
'name': s['name'] if s['name'] else "--",
|
||||
'location': s['location'].name,
|
||||
'location': s['location'].relative_to(base_dir),
|
||||
'status': (red + "LOAD FAILED" + reset if s['class'] is None
|
||||
else "OK" if names.count(s['name']) == 1
|
||||
else yellow + "DUPLICATE NAME" + reset)
|
||||
@@ -77,7 +77,8 @@ def start_list_strategies(args: Dict[str, Any]) -> None:
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
directory = Path(config.get('strategy_path', config['user_data_dir'] / USERPATH_STRATEGIES))
|
||||
strategy_objs = StrategyResolver.search_all_objects(directory, not args['print_one_column'])
|
||||
strategy_objs = StrategyResolver.search_all_objects(
|
||||
directory, not args['print_one_column'], config.get('recursive_strategy_search', False))
|
||||
# Sort alphabetically
|
||||
strategy_objs = sorted(strategy_objs, key=lambda x: x['name'])
|
||||
for obj in strategy_objs:
|
||||
@@ -89,7 +90,7 @@ def start_list_strategies(args: Dict[str, Any]) -> None:
|
||||
if args['print_one_column']:
|
||||
print('\n'.join([s['name'] for s in strategy_objs]))
|
||||
else:
|
||||
_print_objs_tabular(strategy_objs, config.get('print_colorized', False))
|
||||
_print_objs_tabular(strategy_objs, config.get('print_colorized', False), directory)
|
||||
|
||||
|
||||
def start_list_timeframes(args: Dict[str, Any]) -> None:
|
||||
|
||||
@@ -16,4 +16,4 @@ class PeriodicCache(TTLCache):
|
||||
return ts - offset
|
||||
|
||||
# Init with smlight offset
|
||||
super().__init__(maxsize=maxsize, ttl=ttl-1e-5, timer=local_timer, getsizeof=getsizeof)
|
||||
super().__init__(maxsize=maxsize, ttl=ttl - 1e-5, timer=local_timer, getsizeof=getsizeof)
|
||||
|
||||
@@ -22,6 +22,6 @@ def setup_utils_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str
|
||||
|
||||
# Ensure these modes are using Dry-run
|
||||
config['dry_run'] = True
|
||||
validate_config_consistency(config)
|
||||
validate_config_consistency(config, preliminary=True)
|
||||
|
||||
return config
|
||||
|
||||
@@ -39,7 +39,7 @@ def _extend_validator(validator_class):
|
||||
FreqtradeValidator = _extend_validator(Draft4Validator)
|
||||
|
||||
|
||||
def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate the configuration follow the Config Schema
|
||||
:param conf: Config in JSON format
|
||||
@@ -49,7 +49,10 @@ def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
if conf.get('runmode', RunMode.OTHER) in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
conf_schema['required'] = constants.SCHEMA_TRADE_REQUIRED
|
||||
elif conf.get('runmode', RunMode.OTHER) in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED
|
||||
if preliminary:
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED
|
||||
else:
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED_FINAL
|
||||
else:
|
||||
conf_schema['required'] = constants.SCHEMA_MINIMAL_REQUIRED
|
||||
try:
|
||||
@@ -64,7 +67,7 @@ def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
)
|
||||
|
||||
|
||||
def validate_config_consistency(conf: Dict[str, Any]) -> None:
|
||||
def validate_config_consistency(conf: Dict[str, Any], preliminary: bool = False) -> None:
|
||||
"""
|
||||
Validate the configuration consistency.
|
||||
Should be ran after loading both configuration and strategy,
|
||||
@@ -85,7 +88,7 @@ def validate_config_consistency(conf: Dict[str, Any]) -> None:
|
||||
|
||||
# validate configuration before returning
|
||||
logger.info('Validating configuration ...')
|
||||
validate_config_schema(conf)
|
||||
validate_config_schema(conf, preliminary=preliminary)
|
||||
|
||||
|
||||
def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
|
||||
|
||||
@@ -248,6 +248,12 @@ class Configuration:
|
||||
self._args_to_config(config, argname='strategy_list',
|
||||
logstring='Using strategy list of {} strategies', logfun=len)
|
||||
|
||||
self._args_to_config(
|
||||
config,
|
||||
argname='recursive_strategy_search',
|
||||
logstring='Recursively searching for a strategy in the strategies folder.',
|
||||
)
|
||||
|
||||
self._args_to_config(config, argname='timeframe',
|
||||
logstring='Overriding timeframe with Command line argument')
|
||||
|
||||
@@ -387,6 +393,8 @@ class Configuration:
|
||||
self._args_to_config(config, argname='trade_source',
|
||||
logstring='Using trades from: {}')
|
||||
|
||||
self._args_to_config(config, argname='prepend_data',
|
||||
logstring='Prepend detected. Allowing data prepending.')
|
||||
self._args_to_config(config, argname='erase',
|
||||
logstring='Erase detected. Deleting existing data.')
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ def log_config_error_range(path: str, errmsg: str) -> str:
|
||||
offset = int(offsetlist[0])
|
||||
text = Path(path).read_text()
|
||||
# Fetch an offset of 80 characters around the error line
|
||||
subtext = text[offset-min(80, offset):offset+80]
|
||||
subtext = text[offset - min(80, offset):offset + 80]
|
||||
segments = subtext.split('\n')
|
||||
if len(segments) > 3:
|
||||
# Remove first and last lines, to avoid odd truncations
|
||||
@@ -75,19 +75,41 @@ def load_config_file(path: str) -> Dict[str, Any]:
|
||||
return config
|
||||
|
||||
|
||||
def load_from_files(files: List[str]) -> Dict[str, Any]:
|
||||
|
||||
def load_from_files(files: List[str], base_path: Path = None, level: int = 0) -> Dict[str, Any]:
|
||||
"""
|
||||
Recursively load configuration files if specified.
|
||||
Sub-files are assumed to be relative to the initial config.
|
||||
"""
|
||||
config: Dict[str, Any] = {}
|
||||
if level > 5:
|
||||
raise OperationalException("Config loop detected.")
|
||||
|
||||
if not files:
|
||||
return deepcopy(MINIMAL_CONFIG)
|
||||
|
||||
files_loaded = []
|
||||
# We expect here a list of config filenames
|
||||
for path in files:
|
||||
logger.info(f'Using config: {path} ...')
|
||||
# Merge config options, overwriting old values
|
||||
config = deep_merge_dicts(load_config_file(path), config)
|
||||
for filename in files:
|
||||
logger.info(f'Using config: {filename} ...')
|
||||
if filename == '-':
|
||||
# Immediately load stdin and return
|
||||
return load_config_file(filename)
|
||||
file = Path(filename)
|
||||
if base_path:
|
||||
# Prepend basepath to allow for relative assignments
|
||||
file = base_path / file
|
||||
|
||||
config['config_files'] = files
|
||||
config_tmp = load_config_file(str(file))
|
||||
if 'add_config_files' in config_tmp:
|
||||
config_sub = load_from_files(
|
||||
config_tmp['add_config_files'], file.resolve().parent, level + 1)
|
||||
files_loaded.extend(config_sub.get('config_files', []))
|
||||
config_tmp = deep_merge_dicts(config_tmp, config_sub)
|
||||
|
||||
files_loaded.insert(0, str(file))
|
||||
|
||||
# Merge config options, overwriting prior values
|
||||
config = deep_merge_dicts(config_tmp, config)
|
||||
|
||||
config['config_files'] = files_loaded
|
||||
|
||||
return config
|
||||
|
||||
@@ -14,7 +14,7 @@ PROCESS_THROTTLE_SECS = 5 # sec
|
||||
HYPEROPT_EPOCH = 100 # epochs
|
||||
RETRY_TIMEOUT = 30 # sec
|
||||
TIMEOUT_UNITS = ['minutes', 'seconds']
|
||||
EXPORT_OPTIONS = ['none', 'trades']
|
||||
EXPORT_OPTIONS = ['none', 'trades', 'signals']
|
||||
DEFAULT_DB_PROD_URL = 'sqlite:///tradesv3.sqlite'
|
||||
DEFAULT_DB_DRYRUN_URL = 'sqlite:///tradesv3.dryrun.sqlite'
|
||||
UNLIMITED_STAKE_AMOUNT = 'unlimited'
|
||||
@@ -28,7 +28,8 @@ HYPEROPT_LOSS_BUILTIN = ['ShortTradeDurHyperOptLoss', 'OnlyProfitHyperOptLoss',
|
||||
'SharpeHyperOptLoss', 'SharpeHyperOptLossDaily',
|
||||
'SortinoHyperOptLoss', 'SortinoHyperOptLossDaily',
|
||||
'CalmarHyperOptLoss',
|
||||
'MaxDrawDownHyperOptLoss', 'ProfitDrawDownHyperOptLoss']
|
||||
'MaxDrawDownHyperOptLoss', 'MaxDrawDownRelativeHyperOptLoss',
|
||||
'ProfitDrawDownHyperOptLoss']
|
||||
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList',
|
||||
'AgeFilter', 'OffsetFilter', 'PerformanceFilter',
|
||||
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
|
||||
@@ -91,15 +92,14 @@ SUPPORTED_FIAT = [
|
||||
]
|
||||
|
||||
MINIMAL_CONFIG = {
|
||||
'stake_currency': '',
|
||||
'dry_run': True,
|
||||
'exchange': {
|
||||
'name': '',
|
||||
'key': '',
|
||||
'secret': '',
|
||||
'pair_whitelist': [],
|
||||
'ccxt_async_config': {
|
||||
'enableRateLimit': True,
|
||||
"stake_currency": "",
|
||||
"dry_run": True,
|
||||
"exchange": {
|
||||
"name": "",
|
||||
"key": "",
|
||||
"secret": "",
|
||||
"pair_whitelist": [],
|
||||
"ccxt_async_config": {
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -463,6 +463,10 @@ SCHEMA_BACKTEST_REQUIRED = [
|
||||
'dataformat_ohlcv',
|
||||
'dataformat_trades',
|
||||
]
|
||||
SCHEMA_BACKTEST_REQUIRED_FINAL = SCHEMA_BACKTEST_REQUIRED + [
|
||||
'stoploss',
|
||||
'minimal_roi',
|
||||
]
|
||||
|
||||
SCHEMA_MINIMAL_REQUIRED = [
|
||||
'exchange',
|
||||
|
||||
@@ -5,14 +5,15 @@ import logging
|
||||
from copy import copy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.constants import LAST_BT_RESULT_FN
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.misc import get_backtest_metadata_filename, json_load
|
||||
from freqtrade.misc import json_load
|
||||
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
||||
from freqtrade.persistence import LocalTrade, Trade, init_db
|
||||
|
||||
|
||||
@@ -149,7 +150,14 @@ def load_backtest_stats(filename: Union[Path, str]) -> Dict[str, Any]:
|
||||
return data
|
||||
|
||||
|
||||
def _load_and_merge_backtest_result(strategy_name: str, filename: Path, results: Dict[str, Any]):
|
||||
def load_and_merge_backtest_result(strategy_name: str, filename: Path, results: Dict[str, Any]):
|
||||
"""
|
||||
Load one strategy from multi-strategy result
|
||||
and merge it with results
|
||||
:param strategy_name: Name of the strategy contained in the result
|
||||
:param filename: Backtest-result-filename to load
|
||||
:param results: dict to merge the result to.
|
||||
"""
|
||||
bt_data = load_backtest_stats(filename)
|
||||
for k in ('metadata', 'strategy'):
|
||||
results[k][strategy_name] = bt_data[k][strategy_name]
|
||||
@@ -160,6 +168,30 @@ def _load_and_merge_backtest_result(strategy_name: str, filename: Path, results:
|
||||
break
|
||||
|
||||
|
||||
def _get_backtest_files(dirname: Path) -> List[Path]:
|
||||
return list(reversed(sorted(dirname.glob('backtest-result-*-[0-9][0-9].json'))))
|
||||
|
||||
|
||||
def get_backtest_resultlist(dirname: Path):
|
||||
"""
|
||||
Get list of backtest results read from metadata files
|
||||
"""
|
||||
results = []
|
||||
for filename in _get_backtest_files(dirname):
|
||||
metadata = load_backtest_metadata(filename)
|
||||
if not metadata:
|
||||
continue
|
||||
for s, v in metadata.items():
|
||||
results.append({
|
||||
'filename': filename.name,
|
||||
'strategy': s,
|
||||
'run_id': v['run_id'],
|
||||
'backtest_start_time': v['backtest_start_time'],
|
||||
|
||||
})
|
||||
return results
|
||||
|
||||
|
||||
def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, str],
|
||||
min_backtest_date: datetime = None) -> Dict[str, Any]:
|
||||
"""
|
||||
@@ -179,7 +211,7 @@ def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, s
|
||||
}
|
||||
|
||||
# Weird glob expression here avoids including .meta.json files.
|
||||
for filename in reversed(sorted(dirname.glob('backtest-result-*-[0-9][0-9].json'))):
|
||||
for filename in _get_backtest_files(dirname):
|
||||
metadata = load_backtest_metadata(filename)
|
||||
if not metadata:
|
||||
# Files are sorted from newest to oldest. When file without metadata is encountered it
|
||||
@@ -193,14 +225,7 @@ def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, s
|
||||
continue
|
||||
|
||||
if min_backtest_date is not None:
|
||||
try:
|
||||
backtest_date = strategy_metadata['backtest_start_time']
|
||||
except KeyError:
|
||||
# TODO: this can be removed starting from feb 2022
|
||||
# The metadata-file without start_time was only available in develop
|
||||
# and was never included in an official release.
|
||||
# Older metadata format without backtest time, too old to consider.
|
||||
return results
|
||||
backtest_date = strategy_metadata['backtest_start_time']
|
||||
backtest_date = datetime.fromtimestamp(backtest_date, tz=timezone.utc)
|
||||
if backtest_date < min_backtest_date:
|
||||
# Do not use a cached result for this strategy as first result is too old.
|
||||
@@ -209,7 +234,7 @@ def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, s
|
||||
|
||||
if strategy_metadata['run_id'] == run_id:
|
||||
del run_ids[strategy_name]
|
||||
_load_and_merge_backtest_result(strategy_name, filename, results)
|
||||
load_and_merge_backtest_result(strategy_name, filename, results)
|
||||
|
||||
if len(run_ids) == 0:
|
||||
break
|
||||
@@ -375,157 +400,3 @@ def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame,
|
||||
trades = trades.loc[(trades['open_date'] >= trades_start) &
|
||||
(trades['close_date'] <= trades_stop)]
|
||||
return trades
|
||||
|
||||
|
||||
def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close") -> float:
|
||||
"""
|
||||
Calculate market change based on "column".
|
||||
Calculation is done by taking the first non-null and the last non-null element of each column
|
||||
and calculating the pctchange as "(last - first) / first".
|
||||
Then the results per pair are combined as mean.
|
||||
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return:
|
||||
"""
|
||||
tmp_means = []
|
||||
for pair, df in data.items():
|
||||
start = df[column].dropna().iloc[0]
|
||||
end = df[column].dropna().iloc[-1]
|
||||
tmp_means.append((end - start) / start)
|
||||
|
||||
return float(np.mean(tmp_means))
|
||||
|
||||
|
||||
def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
||||
column: str = "close") -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return: DataFrame with the column renamed to the dict key, and a column
|
||||
named mean, containing the mean of all pairs.
|
||||
:raise: ValueError if no data is provided.
|
||||
"""
|
||||
df_comb = pd.concat([data[pair].set_index('date').rename(
|
||||
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
||||
|
||||
df_comb['mean'] = df_comb.mean(axis=1)
|
||||
|
||||
return df_comb
|
||||
|
||||
|
||||
def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
||||
timeframe: str) -> pd.DataFrame:
|
||||
"""
|
||||
Adds a column `col_name` with the cumulative profit for the given trades array.
|
||||
:param df: DataFrame with date index
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)
|
||||
:param col_name: Column name that will be assigned the results
|
||||
:param timeframe: Timeframe used during the operations
|
||||
:return: Returns df with one additional column, col_name, containing the cumulative profit.
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
timeframe_minutes = timeframe_to_minutes(timeframe)
|
||||
# Resample to timeframe to make sure trades match candles
|
||||
_trades_sum = trades.resample(f'{timeframe_minutes}min', on='close_date'
|
||||
)[['profit_abs']].sum()
|
||||
df.loc[:, col_name] = _trades_sum['profit_abs'].cumsum()
|
||||
# Set first value to 0
|
||||
df.loc[df.iloc[0].name, col_name] = 0
|
||||
# FFill to get continuous
|
||||
df[col_name] = df[col_name].ffill()
|
||||
return df
|
||||
|
||||
|
||||
def _calc_drawdown_series(profit_results: pd.DataFrame, *, date_col: str, value_col: str
|
||||
) -> pd.DataFrame:
|
||||
max_drawdown_df = pd.DataFrame()
|
||||
max_drawdown_df['cumulative'] = profit_results[value_col].cumsum()
|
||||
max_drawdown_df['high_value'] = max_drawdown_df['cumulative'].cummax()
|
||||
max_drawdown_df['drawdown'] = max_drawdown_df['cumulative'] - max_drawdown_df['high_value']
|
||||
max_drawdown_df['date'] = profit_results.loc[:, date_col]
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_underwater(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_ratio'
|
||||
):
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
:param date_col: Column in DataFrame to use for dates (defaults to 'close_date')
|
||||
:param value_col: Column in DataFrame to use for values (defaults to 'profit_ratio')
|
||||
:return: Tuple (float, highdate, lowdate, highvalue, lowvalue) with absolute max drawdown,
|
||||
high and low time and high and low value.
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(profit_results, date_col=date_col, value_col=value_col)
|
||||
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_abs', starting_balance: float = 0
|
||||
) -> Tuple[float, pd.Timestamp, pd.Timestamp, float, float, float]:
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
:param date_col: Column in DataFrame to use for dates (defaults to 'close_date')
|
||||
:param value_col: Column in DataFrame to use for values (defaults to 'profit_abs')
|
||||
:param starting_balance: Portfolio starting balance - properly calculate relative drawdown.
|
||||
:return: Tuple (float, highdate, lowdate, highvalue, lowvalue, relative_drawdown)
|
||||
with absolute max drawdown, high and low time and high and low value,
|
||||
and the relative account drawdown
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(profit_results, date_col=date_col, value_col=value_col)
|
||||
|
||||
idxmin = max_drawdown_df['drawdown'].idxmin()
|
||||
if idxmin == 0:
|
||||
raise ValueError("No losing trade, therefore no drawdown.")
|
||||
high_date = profit_results.loc[max_drawdown_df.iloc[:idxmin]['high_value'].idxmax(), date_col]
|
||||
low_date = profit_results.loc[idxmin, date_col]
|
||||
high_val = max_drawdown_df.loc[max_drawdown_df.iloc[:idxmin]
|
||||
['high_value'].idxmax(), 'cumulative']
|
||||
low_val = max_drawdown_df.loc[idxmin, 'cumulative']
|
||||
max_drawdown_rel = 0.0
|
||||
if high_val + starting_balance != 0:
|
||||
max_drawdown_rel = (high_val - low_val) / (high_val + starting_balance)
|
||||
|
||||
return (
|
||||
abs(min(max_drawdown_df['drawdown'])),
|
||||
high_date,
|
||||
low_date,
|
||||
high_val,
|
||||
low_val,
|
||||
max_drawdown_rel
|
||||
)
|
||||
|
||||
|
||||
def calculate_csum(trades: pd.DataFrame, starting_balance: float = 0) -> Tuple[float, float]:
|
||||
"""
|
||||
Calculate min/max cumsum of trades, to show if the wallet/stake amount ratio is sane
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_percent)
|
||||
:param starting_balance: Add starting balance to results, to show the wallets high / low points
|
||||
:return: Tuple (float, float) with cumsum of profit_abs
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
|
||||
csum_df = pd.DataFrame()
|
||||
csum_df['sum'] = trades['profit_abs'].cumsum()
|
||||
csum_min = csum_df['sum'].min() + starting_balance
|
||||
csum_max = csum_df['sum'].max() + starting_balance
|
||||
|
||||
return csum_min, csum_max
|
||||
|
||||
@@ -139,8 +139,9 @@ def _load_cached_data_for_updating(
|
||||
timeframe: str,
|
||||
timerange: Optional[TimeRange],
|
||||
data_handler: IDataHandler,
|
||||
candle_type: CandleType
|
||||
) -> Tuple[DataFrame, Optional[int]]:
|
||||
candle_type: CandleType,
|
||||
prepend: bool = False,
|
||||
) -> Tuple[DataFrame, Optional[int], Optional[int]]:
|
||||
"""
|
||||
Load cached data to download more data.
|
||||
If timerange is passed in, checks whether data from an before the stored data will be
|
||||
@@ -150,9 +151,12 @@ def _load_cached_data_for_updating(
|
||||
Note: Only used by download_pair_history().
|
||||
"""
|
||||
start = None
|
||||
end = None
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
if timerange.stoptype == 'date':
|
||||
end = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
|
||||
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||
data = data_handler.ohlcv_load(pair, timeframe=timeframe,
|
||||
@@ -160,14 +164,17 @@ def _load_cached_data_for_updating(
|
||||
drop_incomplete=True, warn_no_data=False,
|
||||
candle_type=candle_type)
|
||||
if not data.empty:
|
||||
if start and start < data.iloc[0]['date']:
|
||||
if not prepend and start and start < data.iloc[0]['date']:
|
||||
# Earlier data than existing data requested, redownload all
|
||||
data = DataFrame(columns=DEFAULT_DATAFRAME_COLUMNS)
|
||||
else:
|
||||
start = data.iloc[-1]['date']
|
||||
|
||||
if prepend:
|
||||
end = data.iloc[0]['date']
|
||||
else:
|
||||
start = data.iloc[-1]['date']
|
||||
start_ms = int(start.timestamp() * 1000) if start else None
|
||||
return data, start_ms
|
||||
end_ms = int(end.timestamp() * 1000) if end else None
|
||||
return data, start_ms, end_ms
|
||||
|
||||
|
||||
def _download_pair_history(pair: str, *,
|
||||
@@ -179,6 +186,8 @@ def _download_pair_history(pair: str, *,
|
||||
data_handler: IDataHandler = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
candle_type: CandleType,
|
||||
erase: bool = False,
|
||||
prepend: bool = False,
|
||||
) -> bool:
|
||||
"""
|
||||
Download latest candles from the exchange for the pair and timeframe passed in parameters
|
||||
@@ -186,25 +195,31 @@ def _download_pair_history(pair: str, *,
|
||||
exists in a cache. If timerange starts earlier than the data in the cache,
|
||||
the full data will be redownloaded
|
||||
|
||||
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
|
||||
|
||||
:param pair: pair to download
|
||||
:param timeframe: Timeframe (e.g "5m")
|
||||
:param timerange: range of time to download
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
:param erase: Erase existing data
|
||||
:return: bool with success state
|
||||
"""
|
||||
data_handler = get_datahandler(datadir, data_handler=data_handler)
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
f'Download history data for pair: "{pair}" ({process}), timeframe: {timeframe}, '
|
||||
f'candle type: {candle_type} and store in {datadir}.'
|
||||
)
|
||||
if erase:
|
||||
if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
||||
logger.info(f'Deleting existing data for pair {pair}, {timeframe}, {candle_type}.')
|
||||
|
||||
data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange,
|
||||
data_handler=data_handler,
|
||||
candle_type=candle_type)
|
||||
data, since_ms, until_ms = _load_cached_data_for_updating(
|
||||
pair, timeframe, timerange,
|
||||
data_handler=data_handler,
|
||||
candle_type=candle_type,
|
||||
prepend=prepend)
|
||||
|
||||
logger.info(f'({process}) - Download history data for "{pair}", {timeframe}, '
|
||||
f'{candle_type} and store in {datadir}.'
|
||||
f'From {format_ms_time(since_ms) if since_ms else "start"} to '
|
||||
f'{format_ms_time(until_ms) if until_ms else "now"}'
|
||||
)
|
||||
|
||||
logger.debug("Current Start: %s",
|
||||
f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
||||
@@ -219,6 +234,7 @@ def _download_pair_history(pair: str, *,
|
||||
days=-new_pairs_days).int_timestamp * 1000,
|
||||
is_new_pair=data.empty,
|
||||
candle_type=candle_type,
|
||||
until_ms=until_ms if until_ms else None
|
||||
)
|
||||
# TODO: Maybe move parsing to exchange class (?)
|
||||
new_dataframe = ohlcv_to_dataframe(new_data, timeframe, pair,
|
||||
@@ -251,6 +267,7 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
||||
timerange: Optional[TimeRange] = None,
|
||||
new_pairs_days: int = 30, erase: bool = False,
|
||||
data_format: str = None,
|
||||
prepend: bool = False,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Refresh stored ohlcv data for backtesting and hyperopt operations.
|
||||
@@ -267,35 +284,28 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
||||
continue
|
||||
for timeframe in timeframes:
|
||||
|
||||
if erase:
|
||||
if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
||||
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
|
||||
logger.info(f'Downloading pair {pair}, interval {timeframe}.')
|
||||
process = f'{idx}/{len(pairs)}'
|
||||
_download_pair_history(pair=pair, process=process,
|
||||
datadir=datadir, exchange=exchange,
|
||||
timerange=timerange, data_handler=data_handler,
|
||||
timeframe=str(timeframe), new_pairs_days=new_pairs_days,
|
||||
candle_type=candle_type)
|
||||
candle_type=candle_type,
|
||||
erase=erase, prepend=prepend)
|
||||
if trading_mode == 'futures':
|
||||
# Predefined candletype (and timeframe) depending on exchange
|
||||
# Downloads what is necessary to backtest based on futures data.
|
||||
timeframe = exchange._ft_has['mark_ohlcv_timeframe']
|
||||
tf_mark = exchange._ft_has['mark_ohlcv_timeframe']
|
||||
fr_candle_type = CandleType.from_string(exchange._ft_has['mark_ohlcv_price'])
|
||||
# All exchanges need FundingRate for futures trading.
|
||||
# The timeframe is aligned to the mark-price timeframe.
|
||||
for funding_candle_type in (CandleType.FUNDING_RATE, fr_candle_type):
|
||||
# TODO: this could be in most parts to the above.
|
||||
if erase:
|
||||
if data_handler.ohlcv_purge(pair, timeframe, candle_type=funding_candle_type):
|
||||
logger.info(
|
||||
f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
_download_pair_history(pair=pair, process=process,
|
||||
datadir=datadir, exchange=exchange,
|
||||
timerange=timerange, data_handler=data_handler,
|
||||
timeframe=str(timeframe), new_pairs_days=new_pairs_days,
|
||||
candle_type=funding_candle_type)
|
||||
timeframe=str(tf_mark), new_pairs_days=new_pairs_days,
|
||||
candle_type=funding_candle_type,
|
||||
erase=erase, prepend=prepend)
|
||||
|
||||
return pairs_not_available
|
||||
|
||||
@@ -313,8 +323,9 @@ def _download_trades_history(exchange: Exchange,
|
||||
try:
|
||||
|
||||
until = None
|
||||
if (timerange and timerange.starttype == 'date'):
|
||||
since = timerange.startts * 1000
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
since = timerange.startts * 1000
|
||||
if timerange.stoptype == 'date':
|
||||
until = timerange.stopts * 1000
|
||||
else:
|
||||
|
||||
@@ -5,7 +5,7 @@ It's subclasses handle and storing data from disk.
|
||||
"""
|
||||
import logging
|
||||
import re
|
||||
from abc import ABC, abstractclassmethod, abstractmethod
|
||||
from abc import ABC, abstractmethod
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
@@ -38,7 +38,8 @@ class IDataHandler(ABC):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractclassmethod
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def ohlcv_get_available_data(
|
||||
cls, datadir: Path, trading_mode: TradingMode) -> ListPairsWithTimeframes:
|
||||
"""
|
||||
@@ -48,7 +49,8 @@ class IDataHandler(ABC):
|
||||
:return: List of Tuples of (pair, timeframe)
|
||||
"""
|
||||
|
||||
@abstractclassmethod
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]:
|
||||
"""
|
||||
Returns a list of all pairs with ohlcv data available in this datadir
|
||||
@@ -118,7 +120,8 @@ class IDataHandler(ABC):
|
||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
"""
|
||||
|
||||
@abstractclassmethod
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def trades_get_pairs(cls, datadir: Path) -> List[str]:
|
||||
"""
|
||||
Returns a list of all pairs for which trade data is available in this
|
||||
|
||||
192
freqtrade/data/metrics.py
Normal file
192
freqtrade/data/metrics.py
Normal file
@@ -0,0 +1,192 @@
|
||||
import logging
|
||||
from typing import Dict, Tuple
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close") -> float:
|
||||
"""
|
||||
Calculate market change based on "column".
|
||||
Calculation is done by taking the first non-null and the last non-null element of each column
|
||||
and calculating the pctchange as "(last - first) / first".
|
||||
Then the results per pair are combined as mean.
|
||||
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return:
|
||||
"""
|
||||
tmp_means = []
|
||||
for pair, df in data.items():
|
||||
start = df[column].dropna().iloc[0]
|
||||
end = df[column].dropna().iloc[-1]
|
||||
tmp_means.append((end - start) / start)
|
||||
|
||||
return float(np.mean(tmp_means))
|
||||
|
||||
|
||||
def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
||||
column: str = "close") -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return: DataFrame with the column renamed to the dict key, and a column
|
||||
named mean, containing the mean of all pairs.
|
||||
:raise: ValueError if no data is provided.
|
||||
"""
|
||||
df_comb = pd.concat([data[pair].set_index('date').rename(
|
||||
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
||||
|
||||
df_comb['mean'] = df_comb.mean(axis=1)
|
||||
|
||||
return df_comb
|
||||
|
||||
|
||||
def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
||||
timeframe: str) -> pd.DataFrame:
|
||||
"""
|
||||
Adds a column `col_name` with the cumulative profit for the given trades array.
|
||||
:param df: DataFrame with date index
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)
|
||||
:param col_name: Column name that will be assigned the results
|
||||
:param timeframe: Timeframe used during the operations
|
||||
:return: Returns df with one additional column, col_name, containing the cumulative profit.
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
timeframe_minutes = timeframe_to_minutes(timeframe)
|
||||
# Resample to timeframe to make sure trades match candles
|
||||
_trades_sum = trades.resample(f'{timeframe_minutes}min', on='close_date'
|
||||
)[['profit_abs']].sum()
|
||||
df.loc[:, col_name] = _trades_sum['profit_abs'].cumsum()
|
||||
# Set first value to 0
|
||||
df.loc[df.iloc[0].name, col_name] = 0
|
||||
# FFill to get continuous
|
||||
df[col_name] = df[col_name].ffill()
|
||||
return df
|
||||
|
||||
|
||||
def _calc_drawdown_series(profit_results: pd.DataFrame, *, date_col: str, value_col: str,
|
||||
starting_balance: float) -> pd.DataFrame:
|
||||
max_drawdown_df = pd.DataFrame()
|
||||
max_drawdown_df['cumulative'] = profit_results[value_col].cumsum()
|
||||
max_drawdown_df['high_value'] = max_drawdown_df['cumulative'].cummax()
|
||||
max_drawdown_df['drawdown'] = max_drawdown_df['cumulative'] - max_drawdown_df['high_value']
|
||||
max_drawdown_df['date'] = profit_results.loc[:, date_col]
|
||||
if starting_balance:
|
||||
cumulative_balance = starting_balance + max_drawdown_df['cumulative']
|
||||
max_balance = starting_balance + max_drawdown_df['high_value']
|
||||
max_drawdown_df['drawdown_relative'] = ((max_balance - cumulative_balance) / max_balance)
|
||||
else:
|
||||
# NOTE: This is not completely accurate,
|
||||
# but might good enough if starting_balance is not available
|
||||
max_drawdown_df['drawdown_relative'] = (
|
||||
(max_drawdown_df['high_value'] - max_drawdown_df['cumulative'])
|
||||
/ max_drawdown_df['high_value'])
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_underwater(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_ratio', starting_balance: float = 0.0
|
||||
):
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
:param date_col: Column in DataFrame to use for dates (defaults to 'close_date')
|
||||
:param value_col: Column in DataFrame to use for values (defaults to 'profit_ratio')
|
||||
:return: Tuple (float, highdate, lowdate, highvalue, lowvalue) with absolute max drawdown,
|
||||
high and low time and high and low value.
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(
|
||||
profit_results,
|
||||
date_col=date_col,
|
||||
value_col=value_col,
|
||||
starting_balance=starting_balance)
|
||||
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_abs', starting_balance: float = 0,
|
||||
relative: bool = False
|
||||
) -> Tuple[float, pd.Timestamp, pd.Timestamp, float, float, float]:
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
:param date_col: Column in DataFrame to use for dates (defaults to 'close_date')
|
||||
:param value_col: Column in DataFrame to use for values (defaults to 'profit_abs')
|
||||
:param starting_balance: Portfolio starting balance - properly calculate relative drawdown.
|
||||
:return: Tuple (float, highdate, lowdate, highvalue, lowvalue, relative_drawdown)
|
||||
with absolute max drawdown, high and low time and high and low value,
|
||||
and the relative account drawdown
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(
|
||||
profit_results,
|
||||
date_col=date_col,
|
||||
value_col=value_col,
|
||||
starting_balance=starting_balance
|
||||
)
|
||||
|
||||
idxmin = max_drawdown_df['drawdown_relative'].idxmax() if relative \
|
||||
else max_drawdown_df['drawdown'].idxmin()
|
||||
if idxmin == 0:
|
||||
raise ValueError("No losing trade, therefore no drawdown.")
|
||||
high_date = profit_results.loc[max_drawdown_df.iloc[:idxmin]['high_value'].idxmax(), date_col]
|
||||
low_date = profit_results.loc[idxmin, date_col]
|
||||
high_val = max_drawdown_df.loc[max_drawdown_df.iloc[:idxmin]
|
||||
['high_value'].idxmax(), 'cumulative']
|
||||
low_val = max_drawdown_df.loc[idxmin, 'cumulative']
|
||||
max_drawdown_rel = max_drawdown_df.loc[idxmin, 'drawdown_relative']
|
||||
|
||||
return (
|
||||
abs(max_drawdown_df.loc[idxmin, 'drawdown']),
|
||||
high_date,
|
||||
low_date,
|
||||
high_val,
|
||||
low_val,
|
||||
max_drawdown_rel
|
||||
)
|
||||
|
||||
|
||||
def calculate_csum(trades: pd.DataFrame, starting_balance: float = 0) -> Tuple[float, float]:
|
||||
"""
|
||||
Calculate min/max cumsum of trades, to show if the wallet/stake amount ratio is sane
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_percent)
|
||||
:param starting_balance: Add starting balance to results, to show the wallets high / low points
|
||||
:return: Tuple (float, float) with cumsum of profit_abs
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
|
||||
csum_df = pd.DataFrame()
|
||||
csum_df['sum'] = trades['profit_abs'].cumsum()
|
||||
csum_min = csum_df['sum'].min() + starting_balance
|
||||
csum_max = csum_df['sum'].max() + starting_balance
|
||||
|
||||
return csum_min, csum_max
|
||||
|
||||
|
||||
def calculate_cagr(days_passed: int, starting_balance: float, final_balance: float) -> float:
|
||||
"""
|
||||
Calculate CAGR
|
||||
:param days_passed: Days passed between start and ending balance
|
||||
:param starting_balance: Starting balance
|
||||
:param final_balance: Final balance to calculate CAGR against
|
||||
:return: CAGR
|
||||
"""
|
||||
return (final_balance / starting_balance) ** (1 / (days_passed / 365)) - 1
|
||||
@@ -95,6 +95,7 @@ class Binance(Exchange):
|
||||
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||
since_ms: int, candle_type: CandleType,
|
||||
is_new_pair: bool = False, raise_: bool = False,
|
||||
until_ms: int = None
|
||||
) -> Tuple[str, str, str, List]:
|
||||
"""
|
||||
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
|
||||
@@ -115,7 +116,8 @@ class Binance(Exchange):
|
||||
since_ms=since_ms,
|
||||
is_new_pair=is_new_pair,
|
||||
raise_=raise_,
|
||||
candle_type=candle_type
|
||||
candle_type=candle_type,
|
||||
until_ms=until_ms,
|
||||
)
|
||||
|
||||
def funding_fee_cutoff(self, open_date: datetime):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,7 @@ import logging
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import ceil
|
||||
from threading import Lock
|
||||
from typing import Any, Coroutine, Dict, List, Literal, Optional, Tuple, Union
|
||||
|
||||
import arrow
|
||||
@@ -64,6 +65,7 @@ class Exchange:
|
||||
"ohlcv_params": {},
|
||||
"ohlcv_candle_limit": 500,
|
||||
"ohlcv_partial_candle": True,
|
||||
"ohlcv_require_since": False,
|
||||
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
|
||||
"ohlcv_volume_currency": "base", # "base" or "quote"
|
||||
"tickers_have_quoteVolume": True,
|
||||
@@ -95,6 +97,9 @@ class Exchange:
|
||||
self._markets: Dict = {}
|
||||
self._trading_fees: Dict[str, Any] = {}
|
||||
self._leverage_tiers: Dict[str, List[Dict]] = {}
|
||||
# Lock event loop. This is necessary to avoid race-conditions when using force* commands
|
||||
# Due to funding fee fetching.
|
||||
self._loop_lock = Lock()
|
||||
self.loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self.loop)
|
||||
self._config: Dict = {}
|
||||
@@ -166,7 +171,7 @@ class Exchange:
|
||||
self._api_async = self._init_ccxt(
|
||||
exchange_config, ccxt_async, ccxt_kwargs=ccxt_async_config)
|
||||
|
||||
logger.info('Using Exchange "%s"', self.name)
|
||||
logger.info(f'Using Exchange "{self.name}"')
|
||||
|
||||
if validate:
|
||||
# Check if timeframe is available
|
||||
@@ -341,15 +346,11 @@ class Exchange:
|
||||
return sorted(set([x['quote'] for _, x in markets.items()]))
|
||||
|
||||
def get_pair_quote_currency(self, pair: str) -> str:
|
||||
"""
|
||||
Return a pair's quote currency
|
||||
"""
|
||||
""" Return a pair's quote currency (base/quote:settlement) """
|
||||
return self.markets.get(pair, {}).get('quote', '')
|
||||
|
||||
def get_pair_base_currency(self, pair: str) -> str:
|
||||
"""
|
||||
Return a pair's base currency
|
||||
"""
|
||||
""" Return a pair's base currency (base/quote:settlement) """
|
||||
return self.markets.get(pair, {}).get('base', '')
|
||||
|
||||
def market_is_future(self, market: Dict[str, Any]) -> bool:
|
||||
@@ -372,6 +373,9 @@ class Exchange:
|
||||
return (
|
||||
market.get('quote', None) is not None
|
||||
and market.get('base', None) is not None
|
||||
and (self.precisionMode != TICK_SIZE
|
||||
# Too low precision will falsify calculations
|
||||
or market.get('precision', {}).get('price', None) > 1e-11)
|
||||
and ((self.trading_mode == TradingMode.SPOT and self.market_is_spot(market))
|
||||
or (self.trading_mode == TradingMode.MARGIN and self.market_is_margin(market))
|
||||
or (self.trading_mode == TradingMode.FUTURES and self.market_is_future(market)))
|
||||
@@ -555,7 +559,7 @@ class Exchange:
|
||||
# Therefore we also show that.
|
||||
raise OperationalException(
|
||||
f"The ccxt library does not provide the list of timeframes "
|
||||
f"for the exchange \"{self.name}\" and this exchange "
|
||||
f"for the exchange {self.name} and this exchange "
|
||||
f"is therefore not supported. ccxt fetchOHLCV: {self.exchange_has('fetchOHLCV')}")
|
||||
|
||||
if timeframe and (timeframe not in self.timeframes):
|
||||
@@ -655,7 +659,7 @@ class Exchange:
|
||||
Re-implementation of ccxt internal methods - ensuring we can test the result is correct
|
||||
based on our definitions.
|
||||
"""
|
||||
if self.markets[pair]['precision']['amount']:
|
||||
if self.markets[pair]['precision']['amount'] is not None:
|
||||
amount = float(decimal_to_precision(amount, rounding_mode=TRUNCATE,
|
||||
precision=self.markets[pair]['precision']['amount'],
|
||||
counting_mode=self.precisionMode,
|
||||
@@ -785,7 +789,9 @@ class Exchange:
|
||||
rate: float, leverage: float, params: Dict = {},
|
||||
stop_loss: bool = False) -> Dict[str, Any]:
|
||||
order_id = f'dry_run_{side}_{datetime.now().timestamp()}'
|
||||
_amount = self.amount_to_precision(pair, amount)
|
||||
# Rounding here must respect to contract sizes
|
||||
_amount = self._contracts_to_amount(
|
||||
pair, self.amount_to_precision(pair, self._amount_to_contracts(pair, amount)))
|
||||
dry_order: Dict[str, Any] = {
|
||||
'id': order_id,
|
||||
'symbol': pair,
|
||||
@@ -1671,7 +1677,8 @@ class Exchange:
|
||||
|
||||
def get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||
since_ms: int, candle_type: CandleType,
|
||||
is_new_pair: bool = False) -> List:
|
||||
is_new_pair: bool = False,
|
||||
until_ms: int = None) -> List:
|
||||
"""
|
||||
Get candle history using asyncio and returns the list of candles.
|
||||
Handles all async work for this.
|
||||
@@ -1679,13 +1686,14 @@ class Exchange:
|
||||
:param pair: Pair to download
|
||||
:param timeframe: Timeframe to get data for
|
||||
:param since_ms: Timestamp in milliseconds to get history from
|
||||
:param until_ms: Timestamp in milliseconds to get history up to
|
||||
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||
:return: List with candle (OHLCV) data
|
||||
"""
|
||||
pair, _, _, data = self.loop.run_until_complete(
|
||||
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
||||
since_ms=since_ms, is_new_pair=is_new_pair,
|
||||
candle_type=candle_type))
|
||||
since_ms=since_ms, until_ms=until_ms,
|
||||
is_new_pair=is_new_pair, candle_type=candle_type))
|
||||
logger.info(f"Downloaded data for {pair} with length {len(data)}.")
|
||||
return data
|
||||
|
||||
@@ -1706,6 +1714,7 @@ class Exchange:
|
||||
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||
since_ms: int, candle_type: CandleType,
|
||||
is_new_pair: bool = False, raise_: bool = False,
|
||||
until_ms: int = None
|
||||
) -> Tuple[str, str, str, List]:
|
||||
"""
|
||||
Download historic ohlcv
|
||||
@@ -1721,7 +1730,7 @@ class Exchange:
|
||||
)
|
||||
input_coroutines = [self._async_get_candle_history(
|
||||
pair, timeframe, candle_type, since) for since in
|
||||
range(since_ms, arrow.utcnow().int_timestamp * 1000, one_call)]
|
||||
range(since_ms, until_ms or (arrow.utcnow().int_timestamp * 1000), one_call)]
|
||||
|
||||
data: List = []
|
||||
# Chunk requests into batches of 100 to avoid overwelming ccxt Throttling
|
||||
@@ -1746,7 +1755,8 @@ class Exchange:
|
||||
def _build_coroutine(self, pair: str, timeframe: str, candle_type: CandleType,
|
||||
since_ms: Optional[int]) -> Coroutine:
|
||||
|
||||
if not since_ms and self.required_candle_call_count > 1:
|
||||
if (not since_ms
|
||||
and (self._ft_has["ohlcv_require_since"] or self.required_candle_call_count > 1)):
|
||||
# Multiple calls for one pair - to get more history
|
||||
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(timeframe)
|
||||
move_to = one_call * self.required_candle_call_count
|
||||
@@ -1806,7 +1816,8 @@ class Exchange:
|
||||
async def gather_stuff():
|
||||
return await asyncio.gather(*input_coro, return_exceptions=True)
|
||||
|
||||
results = self.loop.run_until_complete(gather_stuff())
|
||||
with self._loop_lock:
|
||||
results = self.loop.run_until_complete(gather_stuff())
|
||||
|
||||
for res in results:
|
||||
if isinstance(res, Exception):
|
||||
@@ -1865,17 +1876,18 @@ class Exchange:
|
||||
pair, timeframe, since_ms, s
|
||||
)
|
||||
params = deepcopy(self._ft_has.get('ohlcv_params', {}))
|
||||
candle_limit = self.ohlcv_candle_limit(timeframe)
|
||||
if candle_type != CandleType.SPOT:
|
||||
params.update({'price': candle_type})
|
||||
if candle_type != CandleType.FUNDING_RATE:
|
||||
data = await self._api_async.fetch_ohlcv(
|
||||
pair, timeframe=timeframe, since=since_ms,
|
||||
limit=self.ohlcv_candle_limit(timeframe), params=params)
|
||||
limit=candle_limit, params=params)
|
||||
else:
|
||||
# Funding rate
|
||||
data = await self._api_async.fetch_funding_rate_history(
|
||||
pair, since=since_ms,
|
||||
limit=self.ohlcv_candle_limit(timeframe))
|
||||
limit=candle_limit)
|
||||
# Convert funding rate to candle pattern
|
||||
data = [[x['timestamp'], x['fundingRate'], 0, 0, 0, 0] for x in data]
|
||||
# Some exchanges sort OHLCV in ASC order and others in DESC.
|
||||
@@ -2062,9 +2074,10 @@ class Exchange:
|
||||
if not self.exchange_has("fetchTrades"):
|
||||
raise OperationalException("This exchange does not support downloading Trades.")
|
||||
|
||||
return self.loop.run_until_complete(
|
||||
self._async_get_trade_history(pair=pair, since=since,
|
||||
until=until, from_id=from_id))
|
||||
with self._loop_lock:
|
||||
return self.loop.run_until_complete(
|
||||
self._async_get_trade_history(pair=pair, since=since,
|
||||
until=until, from_id=from_id))
|
||||
|
||||
@retrier
|
||||
def _get_funding_fees_from_exchange(self, pair: str, since: Union[datetime, int]) -> float:
|
||||
@@ -2173,8 +2186,8 @@ class Exchange:
|
||||
def parse_leverage_tier(self, tier) -> Dict:
|
||||
info = tier.get('info', {})
|
||||
return {
|
||||
'min': tier['notionalFloor'],
|
||||
'max': tier['notionalCap'],
|
||||
'min': tier['minNotional'],
|
||||
'max': tier['maxNotional'],
|
||||
'mmr': tier['maintenanceMarginRate'],
|
||||
'lev': tier['maxLeverage'],
|
||||
'maintAmt': float(info['cum']) if 'cum' in info else None,
|
||||
@@ -2213,7 +2226,7 @@ class Exchange:
|
||||
lev = tier['lev']
|
||||
|
||||
if tier_index < len(pair_tiers) - 1:
|
||||
next_tier = pair_tiers[tier_index+1]
|
||||
next_tier = pair_tiers[tier_index + 1]
|
||||
next_floor = next_tier['min'] / next_tier['lev']
|
||||
if next_floor > stake_amount: # Next tier min too high for stake amount
|
||||
return min((tier['max'] / stake_amount), lev)
|
||||
|
||||
@@ -20,6 +20,7 @@ class Ftx(Exchange):
|
||||
_ft_has: Dict = {
|
||||
"stoploss_on_exchange": True,
|
||||
"ohlcv_candle_limit": 1500,
|
||||
"ohlcv_require_since": True,
|
||||
"ohlcv_volume_currency": "quote",
|
||||
"mark_ohlcv_price": "index",
|
||||
"mark_ohlcv_timeframe": "1h",
|
||||
|
||||
@@ -123,6 +123,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
self._schedule.every().day.at(t).do(update)
|
||||
self.last_process = datetime(1970, 1, 1, tzinfo=timezone.utc)
|
||||
|
||||
self.strategy.bot_start()
|
||||
|
||||
def notify_status(self, msg: str) -> None:
|
||||
"""
|
||||
Public method for users of this class (worker, etc.) to send notifications
|
||||
@@ -400,7 +402,10 @@ class FreqtradeBot(LoggingMixin):
|
||||
logger.info("No currency pair in active pair whitelist, "
|
||||
"but checking to exit open trades.")
|
||||
return trades_created
|
||||
if PairLocks.is_global_lock():
|
||||
if PairLocks.is_global_lock(side='*'):
|
||||
# This only checks for total locks (both sides).
|
||||
# per-side locks will be evaluated by `is_pair_locked` within create_trade,
|
||||
# once the direction for the trade is clear.
|
||||
lock = PairLocks.get_pair_longest_lock('*')
|
||||
if lock:
|
||||
self.log_once(f"Global pairlock active until "
|
||||
@@ -434,16 +439,6 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
analyzed_df, _ = self.dataprovider.get_analyzed_dataframe(pair, self.strategy.timeframe)
|
||||
nowtime = analyzed_df.iloc[-1]['date'] if len(analyzed_df) > 0 else None
|
||||
if self.strategy.is_pair_locked(pair, nowtime):
|
||||
lock = PairLocks.get_pair_longest_lock(pair, nowtime)
|
||||
if lock:
|
||||
self.log_once(f"Pair {pair} is still locked until "
|
||||
f"{lock.lock_end_time.strftime(constants.DATETIME_PRINT_FORMAT)} "
|
||||
f"due to {lock.reason}.",
|
||||
logger.info)
|
||||
else:
|
||||
self.log_once(f"Pair {pair} is still locked.", logger.info)
|
||||
return False
|
||||
|
||||
# get_free_open_trades is checked before create_trade is called
|
||||
# but it is still used here to prevent opening too many trades within one iteration
|
||||
@@ -459,7 +454,18 @@ class FreqtradeBot(LoggingMixin):
|
||||
)
|
||||
|
||||
if signal:
|
||||
if self.strategy.is_pair_locked(pair, candle_date=nowtime, side=signal):
|
||||
lock = PairLocks.get_pair_longest_lock(pair, nowtime, signal)
|
||||
if lock:
|
||||
self.log_once(f"Pair {pair} {lock.side} is locked until "
|
||||
f"{lock.lock_end_time.strftime(constants.DATETIME_PRINT_FORMAT)} "
|
||||
f"due to {lock.reason}.",
|
||||
logger.info)
|
||||
else:
|
||||
self.log_once(f"Pair {pair} is currently locked.", logger.info)
|
||||
return False
|
||||
stake_amount = self.wallets.get_trade_stake_amount(pair, self.edge)
|
||||
|
||||
bid_check_dom = self.config.get('entry_pricing', {}).get('check_depth_of_market', {})
|
||||
if ((bid_check_dom.get('enabled', False)) and
|
||||
(bid_check_dom.get('bids_to_ask_delta', 0) > 0)):
|
||||
@@ -606,7 +612,6 @@ class FreqtradeBot(LoggingMixin):
|
||||
Executes a limit buy for the given pair
|
||||
:param pair: pair for which we want to create a LIMIT_BUY
|
||||
:param stake_amount: amount of stake-currency for the pair
|
||||
:param leverage: amount of leverage applied to this trade
|
||||
:return: True if a buy order is created, false if it fails.
|
||||
"""
|
||||
time_in_force = self.strategy.order_time_in_force['entry']
|
||||
@@ -632,6 +637,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
amount = (stake_amount / enter_limit_requested) * leverage
|
||||
order_type = ordertype or self.strategy.order_types['entry']
|
||||
|
||||
if not pos_adjust and not strategy_safe_wrapper(
|
||||
self.strategy.confirm_trade_entry, default_retval=True)(
|
||||
pair=pair, order_type=order_type, amount=amount, rate=enter_limit_requested,
|
||||
@@ -684,18 +690,9 @@ class FreqtradeBot(LoggingMixin):
|
||||
amount = safe_value_fallback(order, 'filled', 'amount')
|
||||
enter_limit_filled_price = safe_value_fallback(order, 'average', 'price')
|
||||
|
||||
# TODO: this might be unnecessary, as we're calling it in update_trade_state.
|
||||
isolated_liq = self.exchange.get_liquidation_price(
|
||||
leverage=leverage,
|
||||
pair=pair,
|
||||
amount=amount,
|
||||
open_rate=enter_limit_filled_price,
|
||||
is_short=is_short
|
||||
)
|
||||
interest_rate = self.exchange.get_interest_rate()
|
||||
|
||||
# Fee is applied twice because we make a LIMIT_BUY and LIMIT_SELL
|
||||
fee = self.exchange.get_fee(symbol=pair, taker_or_maker='maker')
|
||||
base_currency = self.exchange.get_pair_base_currency(pair)
|
||||
open_date = datetime.now(timezone.utc)
|
||||
funding_fees = self.exchange.get_funding_fees(
|
||||
pair=pair, amount=amount, is_short=is_short, open_date=open_date)
|
||||
@@ -703,6 +700,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
if trade is None:
|
||||
trade = Trade(
|
||||
pair=pair,
|
||||
base_currency=base_currency,
|
||||
stake_currency=self.config['stake_currency'],
|
||||
stake_amount=stake_amount,
|
||||
amount=amount,
|
||||
is_open=True,
|
||||
@@ -719,8 +718,6 @@ class FreqtradeBot(LoggingMixin):
|
||||
timeframe=timeframe_to_minutes(self.config['timeframe']),
|
||||
leverage=leverage,
|
||||
is_short=is_short,
|
||||
interest_rate=interest_rate,
|
||||
liquidation_price=isolated_liq,
|
||||
trading_mode=self.trading_mode,
|
||||
funding_fees=funding_fees
|
||||
)
|
||||
@@ -732,6 +729,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
trade.open_order_id = order_id
|
||||
|
||||
trade.orders.append(order_obj)
|
||||
trade.recalc_trade_from_orders()
|
||||
Trade.query.session.add(trade)
|
||||
Trade.commit()
|
||||
|
||||
@@ -747,8 +745,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
else:
|
||||
logger.info(f"DCA order {order_status}, will wait for resolution: {trade}")
|
||||
|
||||
# Update fees if order is non-opened
|
||||
if order_status in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
# Update fees if order is closed
|
||||
if order_status == 'closed':
|
||||
self.update_trade_state(trade, order_id, order)
|
||||
|
||||
return True
|
||||
@@ -1396,7 +1394,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
default_retval=proposed_limit_rate)(
|
||||
pair=trade.pair, trade=trade,
|
||||
current_time=datetime.now(timezone.utc),
|
||||
proposed_rate=proposed_limit_rate, current_profit=current_profit)
|
||||
proposed_rate=proposed_limit_rate, current_profit=current_profit,
|
||||
exit_tag=exit_check.exit_reason)
|
||||
|
||||
limit = self.get_valid_price(custom_exit_price, proposed_limit_rate)
|
||||
|
||||
@@ -1644,21 +1643,21 @@ class FreqtradeBot(LoggingMixin):
|
||||
if not trade.is_open:
|
||||
if send_msg and not stoploss_order and not trade.open_order_id:
|
||||
self._notify_exit(trade, '', True, sub_trade=sub_trade, order=order_obj)
|
||||
self.handle_protections(trade.pair)
|
||||
self.handle_protections(trade.pair, trade.trade_direction)
|
||||
elif send_msg and not trade.open_order_id:
|
||||
# Enter fill
|
||||
self._notify_enter(trade, order_obj, fill=True, sub_trade=sub_trade)
|
||||
|
||||
return False
|
||||
|
||||
def handle_protections(self, pair: str) -> None:
|
||||
prot_trig = self.protections.stop_per_pair(pair)
|
||||
def handle_protections(self, pair: str, side: LongShort) -> None:
|
||||
prot_trig = self.protections.stop_per_pair(pair, side=side)
|
||||
if prot_trig:
|
||||
msg = {'type': RPCMessageType.PROTECTION_TRIGGER, }
|
||||
msg.update(prot_trig.to_json())
|
||||
self.rpc.send_msg(msg)
|
||||
|
||||
prot_trig_glb = self.protections.global_stop()
|
||||
prot_trig_glb = self.protections.global_stop(side=side)
|
||||
if prot_trig_glb:
|
||||
msg = {'type': RPCMessageType.PROTECTION_TRIGGER_GLOBAL, }
|
||||
msg.update(prot_trig_glb.to_json())
|
||||
|
||||
@@ -31,13 +31,13 @@ def interest(
|
||||
"""
|
||||
exchange_name = exchange_name.lower()
|
||||
if exchange_name == "binance":
|
||||
return borrowed * rate * ceil(hours)/twenty_four
|
||||
return borrowed * rate * ceil(hours) / twenty_four
|
||||
elif exchange_name == "kraken":
|
||||
# Rounded based on https://kraken-fees-calculator.github.io/
|
||||
return borrowed * rate * (one+ceil(hours/four))
|
||||
return borrowed * rate * (one + ceil(hours / four))
|
||||
elif exchange_name == "ftx":
|
||||
# As Explained under #Interest rates section in
|
||||
# https://help.ftx.com/hc/en-us/articles/360053007671-Spot-Margin-Trading-Explainer
|
||||
return borrowed * rate * ceil(hours)/twenty_four
|
||||
return borrowed * rate * ceil(hours) / twenty_four
|
||||
else:
|
||||
raise OperationalException(f"Leverage not available on {exchange_name} with freqtrade")
|
||||
|
||||
@@ -2,13 +2,11 @@
|
||||
Various tool function for Freqtrade and scripts
|
||||
"""
|
||||
import gzip
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterator, List, Union
|
||||
from typing import Any, Iterator, List
|
||||
from typing.io import IO
|
||||
from urllib.parse import urlparse
|
||||
|
||||
@@ -86,6 +84,22 @@ def file_dump_json(filename: Path, data: Any, is_zip: bool = False, log: bool =
|
||||
logger.debug(f'done json to "{filename}"')
|
||||
|
||||
|
||||
def file_dump_joblib(filename: Path, data: Any, log: bool = True) -> None:
|
||||
"""
|
||||
Dump object data into a file
|
||||
:param filename: file to create
|
||||
:param data: Object data to save
|
||||
:return:
|
||||
"""
|
||||
import joblib
|
||||
|
||||
if log:
|
||||
logger.info(f'dumping joblib to "{filename}"')
|
||||
with open(filename, 'wb') as fp:
|
||||
joblib.dump(data, fp)
|
||||
logger.debug(f'done joblib dump to "{filename}"')
|
||||
|
||||
|
||||
def json_load(datafile: IO) -> Any:
|
||||
"""
|
||||
load data with rapidjson
|
||||
@@ -126,7 +140,7 @@ def format_ms_time(date: int) -> str:
|
||||
convert MS date to readable format.
|
||||
: epoch-string in ms
|
||||
"""
|
||||
return datetime.fromtimestamp(date/1000.0).strftime('%Y-%m-%dT%H:%M:%S')
|
||||
return datetime.fromtimestamp(date / 1000.0).strftime('%Y-%m-%dT%H:%M:%S')
|
||||
|
||||
|
||||
def deep_merge_dicts(source, destination, allow_null_overrides: bool = True):
|
||||
@@ -235,34 +249,3 @@ def parse_db_uri_for_logging(uri: str):
|
||||
return uri
|
||||
pwd = parsed_db_uri.netloc.split(':')[1].split('@')[0]
|
||||
return parsed_db_uri.geturl().replace(f':{pwd}@', ':*****@')
|
||||
|
||||
|
||||
def get_strategy_run_id(strategy) -> str:
|
||||
"""
|
||||
Generate unique identification hash for a backtest run. Identical config and strategy file will
|
||||
always return an identical hash.
|
||||
:param strategy: strategy object.
|
||||
:return: hex string id.
|
||||
"""
|
||||
digest = hashlib.sha1()
|
||||
config = deepcopy(strategy.config)
|
||||
|
||||
# Options that have no impact on results of individual backtest.
|
||||
not_important_keys = ('strategy_list', 'original_config', 'telegram', 'api_server')
|
||||
for k in not_important_keys:
|
||||
if k in config:
|
||||
del config[k]
|
||||
|
||||
# Explicitly allow NaN values (e.g. max_open_trades).
|
||||
# as it does not matter for getting the hash.
|
||||
digest.update(rapidjson.dumps(config, default=str,
|
||||
number_mode=rapidjson.NM_NAN).encode('utf-8'))
|
||||
with open(strategy.__file__, 'rb') as fp:
|
||||
digest.update(fp.read())
|
||||
return digest.hexdigest().lower()
|
||||
|
||||
|
||||
def get_backtest_metadata_filename(filename: Union[Path, str]) -> Path:
|
||||
"""Return metadata filename for specified backtest results file."""
|
||||
filename = Path(filename)
|
||||
return filename.parent / Path(f'{filename.stem}.meta{filename.suffix}')
|
||||
|
||||
40
freqtrade/optimize/backtest_caching.py
Normal file
40
freqtrade/optimize/backtest_caching.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import hashlib
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
import rapidjson
|
||||
|
||||
|
||||
def get_strategy_run_id(strategy) -> str:
|
||||
"""
|
||||
Generate unique identification hash for a backtest run. Identical config and strategy file will
|
||||
always return an identical hash.
|
||||
:param strategy: strategy object.
|
||||
:return: hex string id.
|
||||
"""
|
||||
digest = hashlib.sha1()
|
||||
config = deepcopy(strategy.config)
|
||||
|
||||
# Options that have no impact on results of individual backtest.
|
||||
not_important_keys = ('strategy_list', 'original_config', 'telegram', 'api_server')
|
||||
for k in not_important_keys:
|
||||
if k in config:
|
||||
del config[k]
|
||||
|
||||
# Explicitly allow NaN values (e.g. max_open_trades).
|
||||
# as it does not matter for getting the hash.
|
||||
digest.update(rapidjson.dumps(config, default=str,
|
||||
number_mode=rapidjson.NM_NAN).encode('utf-8'))
|
||||
# Include _ft_params_from_file - so changing parameter files cause cache eviction
|
||||
digest.update(rapidjson.dumps(
|
||||
strategy._ft_params_from_file, default=str, number_mode=rapidjson.NM_NAN).encode('utf-8'))
|
||||
with open(strategy.__file__, 'rb') as fp:
|
||||
digest.update(fp.read())
|
||||
return digest.hexdigest().lower()
|
||||
|
||||
|
||||
def get_backtest_metadata_filename(filename: Union[Path, str]) -> Path:
|
||||
"""Return metadata filename for specified backtest results file."""
|
||||
filename = Path(filename)
|
||||
return filename.parent / Path(f'{filename.stem}.meta{filename.suffix}')
|
||||
@@ -9,6 +9,7 @@ from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import pandas as pd
|
||||
from numpy import nan
|
||||
from pandas import DataFrame
|
||||
|
||||
@@ -19,13 +20,15 @@ from freqtrade.data import history
|
||||
from freqtrade.data.btanalysis import find_existing_backtest_stats, trade_list_to_dataframe
|
||||
from freqtrade.data.converter import trim_dataframe, trim_dataframes
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.enums import BacktestState, CandleType, ExitCheckTuple, ExitType, TradingMode
|
||||
from freqtrade.enums import (BacktestState, CandleType, ExitCheckTuple, ExitType, RunMode,
|
||||
TradingMode)
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_seconds
|
||||
from freqtrade.misc import get_strategy_run_id
|
||||
from freqtrade.mixins import LoggingMixin
|
||||
from freqtrade.optimize.backtest_caching import get_strategy_run_id
|
||||
from freqtrade.optimize.bt_progress import BTProgress
|
||||
from freqtrade.optimize.optimize_reports import (generate_backtest_stats, show_backtest_results,
|
||||
store_backtest_signal_candles,
|
||||
store_backtest_stats)
|
||||
from freqtrade.persistence import LocalTrade, Order, PairLocks, Trade
|
||||
from freqtrade.plugins.pairlistmanager import PairListManager
|
||||
@@ -51,6 +54,11 @@ ESHORT_IDX = 8 # Exit short
|
||||
ENTER_TAG_IDX = 9
|
||||
EXIT_TAG_IDX = 10
|
||||
|
||||
# Every change to this headers list must evaluate further usages of the resulting tuple
|
||||
# and eventually change the constants for indexes at the top
|
||||
HEADERS = ['date', 'open', 'high', 'low', 'close', 'enter_long', 'exit_long',
|
||||
'enter_short', 'exit_short', 'enter_tag', 'exit_tag']
|
||||
|
||||
|
||||
class Backtesting:
|
||||
"""
|
||||
@@ -73,6 +81,8 @@ class Backtesting:
|
||||
self.run_ids: Dict[str, str] = {}
|
||||
self.strategylist: List[IStrategy] = []
|
||||
self.all_results: Dict[str, Dict] = {}
|
||||
self.processed_dfs: Dict[str, Dict] = {}
|
||||
|
||||
self._exchange_name = self.config['exchange']['name']
|
||||
self.exchange = ExchangeResolver.load_exchange(self._exchange_name, self.config)
|
||||
self.dataprovider = DataProvider(self.config, self.exchange)
|
||||
@@ -174,9 +184,10 @@ class Backtesting:
|
||||
# Attach Wallets to Strategy baseclass
|
||||
strategy.wallets = self.wallets
|
||||
# Set stoploss_on_exchange to false for backtesting,
|
||||
# since a "perfect" stoploss-sell is assumed anyway
|
||||
# since a "perfect" stoploss-exit is assumed anyway
|
||||
# And the regular "stoploss" function would not apply to that case
|
||||
self.strategy.order_types['stoploss_on_exchange'] = False
|
||||
self.strategy.bot_start()
|
||||
|
||||
def _load_protections(self, strategy: IStrategy):
|
||||
if self.config.get('enable_protections', False):
|
||||
@@ -259,10 +270,18 @@ class Backtesting:
|
||||
candle_type=CandleType.from_string(self.exchange._ft_has["mark_ohlcv_price"])
|
||||
)
|
||||
# Combine data to avoid combining the data per trade.
|
||||
unavailable_pairs = []
|
||||
for pair in self.pairlists.whitelist:
|
||||
if pair not in self.exchange._leverage_tiers:
|
||||
unavailable_pairs.append(pair)
|
||||
continue
|
||||
self.futures_data[pair] = funding_rates_dict[pair].merge(
|
||||
mark_rates_dict[pair], on='date', how="inner", suffixes=["_fund", "_mark"])
|
||||
|
||||
if unavailable_pairs:
|
||||
raise OperationalException(
|
||||
f"Pairs {', '.join(unavailable_pairs)} got no leverage tiers available. "
|
||||
"It is therefore impossible to backtest with this pair at the moment.")
|
||||
else:
|
||||
self.futures_data = {}
|
||||
|
||||
@@ -300,10 +319,7 @@ class Backtesting:
|
||||
:param processed: a processed dictionary with format {pair, data}, which gets cleared to
|
||||
optimize memory usage!
|
||||
"""
|
||||
# Every change to this headers list must evaluate further usages of the resulting tuple
|
||||
# and eventually change the constants for indexes at the top
|
||||
headers = ['date', 'open', 'high', 'low', 'close', 'enter_long', 'exit_long',
|
||||
'enter_short', 'exit_short', 'enter_tag', 'exit_tag']
|
||||
|
||||
data: Dict = {}
|
||||
self.progress.init_step(BacktestState.CONVERT, len(processed))
|
||||
|
||||
@@ -315,7 +331,7 @@ class Backtesting:
|
||||
|
||||
if not pair_data.empty:
|
||||
# Cleanup from prior runs
|
||||
pair_data.drop(headers[5:] + ['buy', 'sell'], axis=1, errors='ignore')
|
||||
pair_data.drop(HEADERS[5:] + ['buy', 'sell'], axis=1, errors='ignore')
|
||||
|
||||
df_analyzed = self.strategy.advise_exit(
|
||||
self.strategy.advise_entry(pair_data, {'pair': pair}),
|
||||
@@ -328,13 +344,13 @@ class Backtesting:
|
||||
self.dataprovider._set_cached_df(
|
||||
pair, self.timeframe, df_analyzed, self.config['candle_type_def'])
|
||||
|
||||
# Create a copy of the dataframe before shifting, that way the buy signal/tag
|
||||
# Create a copy of the dataframe before shifting, that way the entry signal/tag
|
||||
# remains on the correct candle for callbacks.
|
||||
df_analyzed = df_analyzed.copy()
|
||||
|
||||
# To avoid using data from future, we use buy/sell signals shifted
|
||||
# To avoid using data from future, we use entry/exit signals shifted
|
||||
# from the previous candle
|
||||
for col in headers[5:]:
|
||||
for col in HEADERS[5:]:
|
||||
tag_col = col in ('enter_tag', 'exit_tag')
|
||||
if col in df_analyzed.columns:
|
||||
df_analyzed.loc[:, col] = df_analyzed.loc[:, col].replace(
|
||||
@@ -346,27 +362,27 @@ class Backtesting:
|
||||
|
||||
# Convert from Pandas to list for performance reasons
|
||||
# (Looping Pandas is slow.)
|
||||
data[pair] = df_analyzed[headers].values.tolist() if not df_analyzed.empty else []
|
||||
data[pair] = df_analyzed[HEADERS].values.tolist() if not df_analyzed.empty else []
|
||||
return data
|
||||
|
||||
def _get_close_rate(self, row: Tuple, trade: LocalTrade, sell: ExitCheckTuple,
|
||||
def _get_close_rate(self, row: Tuple, trade: LocalTrade, exit: ExitCheckTuple,
|
||||
trade_dur: int) -> float:
|
||||
"""
|
||||
Get close rate for backtesting result
|
||||
"""
|
||||
# Special handling if high or low hit STOP_LOSS or ROI
|
||||
if sell.exit_type in (ExitType.STOP_LOSS, ExitType.TRAILING_STOP_LOSS):
|
||||
return self._get_close_rate_for_stoploss(row, trade, sell, trade_dur)
|
||||
elif sell.exit_type == (ExitType.ROI):
|
||||
return self._get_close_rate_for_roi(row, trade, sell, trade_dur)
|
||||
if exit.exit_type in (ExitType.STOP_LOSS, ExitType.TRAILING_STOP_LOSS):
|
||||
return self._get_close_rate_for_stoploss(row, trade, exit, trade_dur)
|
||||
elif exit.exit_type == (ExitType.ROI):
|
||||
return self._get_close_rate_for_roi(row, trade, exit, trade_dur)
|
||||
else:
|
||||
return row[OPEN_IDX]
|
||||
|
||||
def _get_close_rate_for_stoploss(self, row: Tuple, trade: LocalTrade, sell: ExitCheckTuple,
|
||||
def _get_close_rate_for_stoploss(self, row: Tuple, trade: LocalTrade, exit: ExitCheckTuple,
|
||||
trade_dur: int) -> float:
|
||||
# our stoploss was already lower than candle high,
|
||||
# possibly due to a cancelled trade exit.
|
||||
# sell at open price.
|
||||
# exit at open price.
|
||||
is_short = trade.is_short or False
|
||||
leverage = trade.leverage or 1.0
|
||||
side_1 = -1 if is_short else 1
|
||||
@@ -380,7 +396,7 @@ class Backtesting:
|
||||
# Special case: trailing triggers within same candle as trade opened. Assume most
|
||||
# pessimistic price movement, which is moving just enough to arm stoploss and
|
||||
# immediately going down to stop price.
|
||||
if sell.exit_type == ExitType.TRAILING_STOP_LOSS and trade_dur == 0:
|
||||
if exit.exit_type == ExitType.TRAILING_STOP_LOSS and trade_dur == 0:
|
||||
if (
|
||||
not self.strategy.use_custom_stoploss and self.strategy.trailing_stop
|
||||
and self.strategy.trailing_only_offset_is_reached
|
||||
@@ -399,7 +415,7 @@ class Backtesting:
|
||||
else:
|
||||
assert stop_rate < row[HIGH_IDX]
|
||||
|
||||
# Limit lower-end to candle low to avoid sells below the low.
|
||||
# Limit lower-end to candle low to avoid exits below the low.
|
||||
# This still remains "worst case" - but "worst realistic case".
|
||||
if is_short:
|
||||
return min(row[HIGH_IDX], stop_rate)
|
||||
@@ -409,7 +425,7 @@ class Backtesting:
|
||||
# Set close_rate to stoploss
|
||||
return trade.stop_loss
|
||||
|
||||
def _get_close_rate_for_roi(self, row: Tuple, trade: LocalTrade, sell: ExitCheckTuple,
|
||||
def _get_close_rate_for_roi(self, row: Tuple, trade: LocalTrade, exit: ExitCheckTuple,
|
||||
trade_dur: int) -> float:
|
||||
is_short = trade.is_short or False
|
||||
leverage = trade.leverage or 1.0
|
||||
@@ -417,7 +433,7 @@ class Backtesting:
|
||||
roi_entry, roi = self.strategy.min_roi_reached_entry(trade_dur)
|
||||
if roi is not None and roi_entry is not None:
|
||||
if roi == -1 and roi_entry % self.timeframe_min == 0:
|
||||
# When forceselling with ROI=-1, the roi time will always be equal to trade_dur.
|
||||
# When force_exiting with ROI=-1, the roi time will always be equal to trade_dur.
|
||||
# If that entry is a multiple of the timeframe (so on candle open)
|
||||
# - we'll use open instead of close
|
||||
return row[OPEN_IDX]
|
||||
@@ -434,7 +450,7 @@ class Backtesting:
|
||||
and roi_entry % self.timeframe_min == 0
|
||||
and is_new_roi):
|
||||
# new ROI entry came into effect.
|
||||
# use Open rate if open_rate > calculated sell rate
|
||||
# use Open rate if open_rate > calculated exit rate
|
||||
return row[OPEN_IDX]
|
||||
|
||||
if (trade_dur == 0 and (
|
||||
@@ -457,11 +473,11 @@ class Backtesting:
|
||||
# ROI on opening candles with custom pricing can only
|
||||
# trigger if the entry was at Open or lower wick.
|
||||
# details: https: // github.com/freqtrade/freqtrade/issues/6261
|
||||
# If open_rate is < open, only allow sells below the close on red candles.
|
||||
# If open_rate is < open, only allow exits below the close on red candles.
|
||||
raise ValueError("Opening candle ROI on red candles.")
|
||||
|
||||
# Use the maximum between close_rate and low as we
|
||||
# cannot sell outside of a candle.
|
||||
# cannot exit outside of a candle.
|
||||
# Applies when a new ROI setting comes in place and the whole candle is above that.
|
||||
return min(max(close_rate, row[LOW_IDX]), row[HIGH_IDX])
|
||||
|
||||
@@ -509,7 +525,7 @@ class Backtesting:
|
||||
""" Rate is within candle, therefore filled"""
|
||||
return row[LOW_IDX] <= rate <= row[HIGH_IDX]
|
||||
|
||||
def _get_sell_trade_entry_for_candle(self, trade: LocalTrade,
|
||||
def _get_exit_trade_entry_for_candle(self, trade: LocalTrade,
|
||||
row: Tuple) -> Optional[LocalTrade]:
|
||||
|
||||
# Check if we need to adjust our current positions
|
||||
@@ -521,33 +537,33 @@ class Backtesting:
|
||||
if check_adjust_entry:
|
||||
trade = self._get_adjust_trade_entry_for_candle(trade, row)
|
||||
|
||||
sell_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX]
|
||||
exit_ = row[ESHORT_IDX] if trade.is_short else row[ELONG_IDX]
|
||||
sell = self.strategy.should_exit(
|
||||
trade, row[OPEN_IDX], sell_candle_time, # type: ignore
|
||||
enter=enter, exit_=exit_,
|
||||
exit_sig = row[ESHORT_IDX] if trade.is_short else row[ELONG_IDX]
|
||||
exit_ = self.strategy.should_exit(
|
||||
trade, row[OPEN_IDX], exit_candle_time, # type: ignore
|
||||
enter=enter, exit_=exit_sig,
|
||||
low=row[LOW_IDX], high=row[HIGH_IDX]
|
||||
)
|
||||
|
||||
if sell.exit_flag:
|
||||
trade.close_date = sell_candle_time
|
||||
if exit_.exit_flag:
|
||||
trade.close_date = exit_candle_time
|
||||
|
||||
trade_dur = int((trade.close_date_utc - trade.open_date_utc).total_seconds() // 60)
|
||||
try:
|
||||
close_rate = self._get_close_rate(row, trade, sell, trade_dur)
|
||||
close_rate = self._get_close_rate(row, trade, exit_, trade_dur)
|
||||
except ValueError:
|
||||
return None
|
||||
# call the custom exit price,with default value as previous close_rate
|
||||
current_profit = trade.calc_profit_ratio(close_rate)
|
||||
order_type = self.strategy.order_types['exit']
|
||||
if sell.exit_type in (ExitType.EXIT_SIGNAL, ExitType.CUSTOM_EXIT):
|
||||
# Custom exit pricing only for sell-signals
|
||||
if exit_.exit_type in (ExitType.EXIT_SIGNAL, ExitType.CUSTOM_EXIT):
|
||||
# Custom exit pricing only for exit-signals
|
||||
if order_type == 'limit':
|
||||
close_rate = strategy_safe_wrapper(self.strategy.custom_exit_price,
|
||||
default_retval=close_rate)(
|
||||
pair=trade.pair, trade=trade,
|
||||
current_time=sell_candle_time,
|
||||
current_time=exit_candle_time,
|
||||
proposed_rate=close_rate, current_profit=current_profit)
|
||||
# We can't place orders lower than current low.
|
||||
# freqtrade does not support this in live, and the order would fill immediately
|
||||
@@ -562,12 +578,12 @@ class Backtesting:
|
||||
pair=trade.pair, trade=trade, order_type='limit', amount=trade.amount,
|
||||
rate=close_rate,
|
||||
time_in_force=time_in_force,
|
||||
sell_reason=sell.exit_reason, # deprecated
|
||||
exit_reason=sell.exit_reason,
|
||||
current_time=sell_candle_time):
|
||||
sell_reason=exit_.exit_reason, # deprecated
|
||||
exit_reason=exit_.exit_reason,
|
||||
current_time=exit_candle_time):
|
||||
return None
|
||||
|
||||
trade.exit_reason = sell.exit_reason
|
||||
trade.exit_reason = exit_.exit_reason
|
||||
|
||||
# Checks and adds an exit tag, after checking that the length of the
|
||||
# row has the length for an exit tag column
|
||||
@@ -575,6 +591,7 @@ class Backtesting:
|
||||
len(row) > EXIT_TAG_IDX
|
||||
and row[EXIT_TAG_IDX] is not None
|
||||
and len(row[EXIT_TAG_IDX]) > 0
|
||||
and exit_.exit_type in (ExitType.EXIT_SIGNAL,)
|
||||
):
|
||||
trade.exit_reason = row[EXIT_TAG_IDX]
|
||||
|
||||
@@ -585,14 +602,14 @@ class Backtesting:
|
||||
def _exit_trade(self, trade: LocalTrade, sell_row: Tuple,
|
||||
close_rate: float, amount: float = None) -> Optional[LocalTrade]:
|
||||
self.order_id_counter += 1
|
||||
sell_candle_time = sell_row[DATE_IDX].to_pydatetime()
|
||||
exit_candle_time = sell_row[DATE_IDX].to_pydatetime()
|
||||
order_type = self.strategy.order_types['exit']
|
||||
amount = amount or trade.amount
|
||||
order = Order(
|
||||
id=self.order_id_counter,
|
||||
ft_trade_id=trade.id,
|
||||
order_date=sell_candle_time,
|
||||
order_update_date=sell_candle_time,
|
||||
order_date=exit_candle_time,
|
||||
order_update_date=exit_candle_time,
|
||||
ft_is_open=True,
|
||||
ft_pair=trade.pair,
|
||||
order_id=str(self.order_id_counter),
|
||||
@@ -611,8 +628,8 @@ class Backtesting:
|
||||
trade.orders.append(order)
|
||||
return trade
|
||||
|
||||
def _get_sell_trade_entry(self, trade: LocalTrade, row: Tuple) -> Optional[LocalTrade]:
|
||||
sell_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
def _get_exit_trade_entry(self, trade: LocalTrade, row: Tuple) -> Optional[LocalTrade]:
|
||||
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
trade.funding_fees = self.exchange.calculate_funding_fees(
|
||||
@@ -620,37 +637,35 @@ class Backtesting:
|
||||
amount=trade.amount,
|
||||
is_short=trade.is_short,
|
||||
open_date=trade.open_date_utc,
|
||||
close_date=sell_candle_time,
|
||||
close_date=exit_candle_time,
|
||||
)
|
||||
|
||||
if self.timeframe_detail and trade.pair in self.detail_data:
|
||||
sell_candle_end = sell_candle_time + timedelta(minutes=self.timeframe_min)
|
||||
exit_candle_end = exit_candle_time + timedelta(minutes=self.timeframe_min)
|
||||
|
||||
detail_data = self.detail_data[trade.pair]
|
||||
detail_data = detail_data.loc[
|
||||
(detail_data['date'] >= sell_candle_time) &
|
||||
(detail_data['date'] < sell_candle_end)
|
||||
(detail_data['date'] >= exit_candle_time) &
|
||||
(detail_data['date'] < exit_candle_end)
|
||||
].copy()
|
||||
if len(detail_data) == 0:
|
||||
# Fall back to "regular" data if no detail data was found for this candle
|
||||
return self._get_sell_trade_entry_for_candle(trade, row)
|
||||
return self._get_exit_trade_entry_for_candle(trade, row)
|
||||
detail_data.loc[:, 'enter_long'] = row[LONG_IDX]
|
||||
detail_data.loc[:, 'exit_long'] = row[ELONG_IDX]
|
||||
detail_data.loc[:, 'enter_short'] = row[SHORT_IDX]
|
||||
detail_data.loc[:, 'exit_short'] = row[ESHORT_IDX]
|
||||
detail_data.loc[:, 'enter_tag'] = row[ENTER_TAG_IDX]
|
||||
detail_data.loc[:, 'exit_tag'] = row[EXIT_TAG_IDX]
|
||||
headers = ['date', 'open', 'high', 'low', 'close', 'enter_long', 'exit_long',
|
||||
'enter_short', 'exit_short', 'enter_tag', 'exit_tag']
|
||||
for det_row in detail_data[headers].values.tolist():
|
||||
res = self._get_sell_trade_entry_for_candle(trade, det_row)
|
||||
for det_row in detail_data[HEADERS].values.tolist():
|
||||
res = self._get_exit_trade_entry_for_candle(trade, det_row)
|
||||
if res:
|
||||
return res
|
||||
|
||||
return None
|
||||
|
||||
else:
|
||||
return self._get_sell_trade_entry_for_candle(trade, row)
|
||||
return self._get_exit_trade_entry_for_candle(trade, row)
|
||||
|
||||
def get_valid_price_and_stake(
|
||||
self, pair: str, row: Tuple, propose_rate: float, stake_amount: Optional[float],
|
||||
@@ -665,7 +680,7 @@ class Backtesting:
|
||||
proposed_rate=propose_rate, entry_tag=entry_tag,
|
||||
side=direction,
|
||||
) # default value is the open rate
|
||||
# We can't place orders higher than current high (otherwise it'd be a stop limit buy)
|
||||
# We can't place orders higher than current high (otherwise it'd be a stop limit entry)
|
||||
# which freqtrade does not support in live.
|
||||
if direction == "short":
|
||||
propose_rate = max(propose_rate, row[LOW_IDX])
|
||||
@@ -746,6 +761,7 @@ class Backtesting:
|
||||
|
||||
if stake_amount and (not min_stake_amount or stake_amount > min_stake_amount):
|
||||
self.order_id_counter += 1
|
||||
base_currency = self.exchange.get_pair_base_currency(pair)
|
||||
amount = round((stake_amount / propose_rate) * leverage, 8)
|
||||
is_short = (direction == 'short')
|
||||
# Necessary for Margin trading. Disabled until support is enabled.
|
||||
@@ -758,6 +774,8 @@ class Backtesting:
|
||||
id=self.trade_id_counter,
|
||||
open_order_id=self.order_id_counter,
|
||||
pair=pair,
|
||||
base_currency=base_currency,
|
||||
stake_currency=self.config['stake_currency'],
|
||||
open_rate=propose_rate,
|
||||
open_rate_requested=propose_rate,
|
||||
open_date=current_time,
|
||||
@@ -826,13 +844,13 @@ class Backtesting:
|
||||
if len(open_trades[pair]) > 0:
|
||||
for trade in open_trades[pair]:
|
||||
if trade.open_order_id and trade.nr_of_successful_entries == 0:
|
||||
# Ignore trade if buy-order did not fill yet
|
||||
# Ignore trade if entry-order did not fill yet
|
||||
continue
|
||||
sell_row = data[pair][-1]
|
||||
exit_row = data[pair][-1]
|
||||
|
||||
trade.close_date = sell_row[DATE_IDX].to_pydatetime()
|
||||
trade.close_date = exit_row[DATE_IDX].to_pydatetime()
|
||||
trade.exit_reason = ExitType.FORCE_EXIT.value
|
||||
trade.close(sell_row[OPEN_IDX], show_msg=False)
|
||||
trade.close(exit_row[OPEN_IDX], show_msg=False)
|
||||
LocalTrade.close_bt_trade(trade)
|
||||
# Deepcopy object to have wallets update correctly
|
||||
trade1 = deepcopy(trade)
|
||||
@@ -862,10 +880,11 @@ class Backtesting:
|
||||
return 'short'
|
||||
return None
|
||||
|
||||
def run_protections(self, enable_protections, pair: str, current_time: datetime):
|
||||
def run_protections(
|
||||
self, enable_protections, pair: str, current_time: datetime, side: LongShort):
|
||||
if enable_protections:
|
||||
self.protections.stop_per_pair(pair, current_time)
|
||||
self.protections.global_stop(current_time)
|
||||
self.protections.stop_per_pair(pair, current_time, side)
|
||||
self.protections.global_stop(current_time, side)
|
||||
|
||||
def check_order_cancel(self, trade: LocalTrade, current_time) -> bool:
|
||||
"""
|
||||
@@ -882,7 +901,7 @@ class Backtesting:
|
||||
# Remove trade due to entry timeout expiration.
|
||||
return True
|
||||
else:
|
||||
# Close additional buy order
|
||||
# Close additional entry order
|
||||
del trade.orders[trade.orders.index(order)]
|
||||
if order.side == trade.exit_side:
|
||||
self.timedout_exit_orders += 1
|
||||
@@ -895,7 +914,7 @@ class Backtesting:
|
||||
self, data: Dict, pair: str, row_index: int, current_time: datetime) -> Optional[Tuple]:
|
||||
try:
|
||||
# Row is treated as "current incomplete candle".
|
||||
# Buy / sell signals are shifted by 1 to compensate for this.
|
||||
# entry / exit signals are shifted by 1 to compensate for this.
|
||||
row = data[pair][row_index]
|
||||
except IndexError:
|
||||
# missing Data for one pair at the end.
|
||||
@@ -960,14 +979,14 @@ class Backtesting:
|
||||
self.dataprovider._set_dataframe_max_index(row_index)
|
||||
|
||||
for t in list(open_trades[pair]):
|
||||
# 1. Cancel expired buy/sell orders.
|
||||
# 1. Cancel expired entry/exit orders.
|
||||
if self.check_order_cancel(t, current_time):
|
||||
# Close trade due to buy timeout expiration.
|
||||
# Close trade due to entry timeout expiration.
|
||||
open_trade_count -= 1
|
||||
open_trades[pair].remove(t)
|
||||
self.wallets.update()
|
||||
|
||||
# 2. Process buys.
|
||||
# 2. Process entries.
|
||||
# without positionstacking, we can only have one open trade per pair.
|
||||
# max_open_trades must be respected
|
||||
# don't open on the last row
|
||||
@@ -977,13 +996,13 @@ class Backtesting:
|
||||
and self.trade_slot_available(max_open_trades, open_trade_count_start)
|
||||
and current_time != end_date
|
||||
and trade_dir is not None
|
||||
and not PairLocks.is_pair_locked(pair, row[DATE_IDX])
|
||||
and not PairLocks.is_pair_locked(pair, row[DATE_IDX], trade_dir)
|
||||
):
|
||||
trade = self._enter_trade(pair, row, trade_dir)
|
||||
if trade:
|
||||
# TODO: hacky workaround to avoid opening > max_open_trades
|
||||
# This emulates previous behavior - not sure if this is correct
|
||||
# Prevents buying if the trade-slot was freed in this candle
|
||||
# Prevents entering if the trade-slot was freed in this candle
|
||||
open_trade_count_start += 1
|
||||
open_trade_count += 1
|
||||
# logger.debug(f"{pair} - Emulate creation of new trade: {trade}.")
|
||||
@@ -998,11 +1017,11 @@ class Backtesting:
|
||||
LocalTrade.add_bt_trade(trade)
|
||||
self.wallets.update()
|
||||
|
||||
# 4. Create sell orders (if any)
|
||||
# 4. Create exit orders (if any)
|
||||
if not trade.open_order_id:
|
||||
self._get_sell_trade_entry(trade, row) # Place sell order if necessary
|
||||
self._get_exit_trade_entry(trade, row) # Place exit order if necessary
|
||||
|
||||
# 5. Process sell orders.
|
||||
# 5. Process exit orders.
|
||||
order = trade.select_order(trade.exit_side, is_open=True)
|
||||
if order and self._get_order_filled(order.price, row):
|
||||
trade.open_order_id = None
|
||||
@@ -1015,13 +1034,14 @@ class Backtesting:
|
||||
trade.close_date = current_time
|
||||
trade.close(order.price, show_msg=False)
|
||||
|
||||
# logger.debug(f"{pair} - Backtesting sell {trade}")
|
||||
# logger.debug(f"{pair} - Backtesting exit {trade}")
|
||||
open_trade_count -= 1
|
||||
open_trades[pair].remove(trade)
|
||||
LocalTrade.close_bt_trade(trade)
|
||||
trades.append(trade)
|
||||
self.run_protections(enable_protections, pair, current_time)
|
||||
self.wallets.update()
|
||||
self.run_protections(
|
||||
enable_protections, pair, current_time, trade.trade_direction)
|
||||
|
||||
# Move time one configured time_interval ahead.
|
||||
self.progress.increment()
|
||||
@@ -1045,7 +1065,7 @@ class Backtesting:
|
||||
timerange: TimeRange):
|
||||
self.progress.init_step(BacktestState.ANALYZE, 0)
|
||||
|
||||
logger.info("Running backtesting for Strategy %s", strat.get_strategy_name())
|
||||
logger.info(f"Running backtesting for Strategy {strat.get_strategy_name()}")
|
||||
backtest_start_time = datetime.now(timezone.utc)
|
||||
self._set_strategy(strat)
|
||||
|
||||
@@ -1071,7 +1091,7 @@ class Backtesting:
|
||||
"No data left after adjusting for startup candles.")
|
||||
|
||||
# Use preprocessed_tmp for date generation (the trimmed dataframe).
|
||||
# Backtesting will re-trim the dataframes after buy/sell signal generation.
|
||||
# Backtesting will re-trim the dataframes after entry/exit signal generation.
|
||||
min_date, max_date = history.get_timerange(preprocessed_tmp)
|
||||
logger.info(f'Backtesting with data from {min_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||
f'up to {max_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||
@@ -1093,8 +1113,31 @@ class Backtesting:
|
||||
})
|
||||
self.all_results[self.strategy.get_strategy_name()] = results
|
||||
|
||||
if (self.config.get('export', 'none') == 'signals' and
|
||||
self.dataprovider.runmode == RunMode.BACKTEST):
|
||||
self._generate_trade_signal_candles(preprocessed_tmp, results)
|
||||
|
||||
return min_date, max_date
|
||||
|
||||
def _generate_trade_signal_candles(self, preprocessed_df, bt_results):
|
||||
signal_candles_only = {}
|
||||
for pair in preprocessed_df.keys():
|
||||
signal_candles_only_df = DataFrame()
|
||||
|
||||
pairdf = preprocessed_df[pair]
|
||||
resdf = bt_results['results']
|
||||
pairresults = resdf.loc[(resdf["pair"] == pair)]
|
||||
|
||||
if pairdf.shape[0] > 0:
|
||||
for t, v in pairresults.open_date.items():
|
||||
allinds = pairdf.loc[(pairdf['date'] < v)]
|
||||
signal_inds = allinds.iloc[[-1]]
|
||||
signal_candles_only_df = pd.concat([signal_candles_only_df, signal_inds])
|
||||
|
||||
signal_candles_only[pair] = signal_candles_only_df
|
||||
|
||||
self.processed_dfs[self.strategy.get_strategy_name()] = signal_candles_only
|
||||
|
||||
def _get_min_cached_backtest_date(self):
|
||||
min_backtest_date = None
|
||||
backtest_cache_age = self.config.get('backtest_cache', constants.BACKTEST_CACHE_DEFAULT)
|
||||
@@ -1153,9 +1196,13 @@ class Backtesting:
|
||||
else:
|
||||
self.results = results
|
||||
|
||||
if self.config.get('export', 'none') == 'trades':
|
||||
if self.config.get('export', 'none') in ('trades', 'signals'):
|
||||
store_backtest_stats(self.config['exportfilename'], self.results)
|
||||
|
||||
if (self.config.get('export', 'none') == 'signals' and
|
||||
self.dataprovider.runmode == RunMode.BACKTEST):
|
||||
store_backtest_signal_candles(self.config['exportfilename'], self.processed_dfs)
|
||||
|
||||
# Results may be mixed up now. Sort them so they follow --strategy-list order.
|
||||
if 'strategy_list' in self.config and len(self.results) > 0:
|
||||
self.results['strategy_comparison'] = sorted(
|
||||
|
||||
@@ -44,6 +44,7 @@ class EdgeCli:
|
||||
|
||||
self.edge._timerange = TimeRange.parse_timerange(None if self.config.get(
|
||||
'timerange') is None else str(self.config.get('timerange')))
|
||||
self.strategy.bot_start()
|
||||
|
||||
def start(self) -> None:
|
||||
result = self.edge.calculate(self.config['exchange']['pair_whitelist'])
|
||||
|
||||
@@ -10,7 +10,7 @@ import warnings
|
||||
from datetime import datetime, timezone
|
||||
from math import ceil
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import progressbar
|
||||
import rapidjson
|
||||
@@ -290,7 +290,7 @@ class Hyperopt:
|
||||
self.assign_params(params_dict, 'protection')
|
||||
|
||||
if HyperoptTools.has_space(self.config, 'roi'):
|
||||
self.backtesting.strategy.minimal_roi = ( # type: ignore
|
||||
self.backtesting.strategy.minimal_roi = (
|
||||
self.custom_hyperopt.generate_roi_table(params_dict))
|
||||
|
||||
if HyperoptTools.has_space(self.config, 'stoploss'):
|
||||
@@ -409,6 +409,51 @@ class Hyperopt:
|
||||
# Store non-trimmed data - will be trimmed after signal generation.
|
||||
dump(preprocessed, self.data_pickle_file)
|
||||
|
||||
def get_asked_points(self, n_points: int) -> Tuple[List[List[Any]], List[bool]]:
|
||||
"""
|
||||
Enforce points returned from `self.opt.ask` have not been already evaluated
|
||||
|
||||
Steps:
|
||||
1. Try to get points using `self.opt.ask` first
|
||||
2. Discard the points that have already been evaluated
|
||||
3. Retry using `self.opt.ask` up to 3 times
|
||||
4. If still some points are missing in respect to `n_points`, random sample some points
|
||||
5. Repeat until at least `n_points` points in the `asked_non_tried` list
|
||||
6. Return a list with length truncated at `n_points`
|
||||
"""
|
||||
def unique_list(a_list):
|
||||
new_list = []
|
||||
for item in a_list:
|
||||
if item not in new_list:
|
||||
new_list.append(item)
|
||||
return new_list
|
||||
i = 0
|
||||
asked_non_tried: List[List[Any]] = []
|
||||
is_random: List[bool] = []
|
||||
while i < 5 and len(asked_non_tried) < n_points:
|
||||
if i < 3:
|
||||
self.opt.cache_ = {}
|
||||
asked = unique_list(self.opt.ask(n_points=n_points * 5))
|
||||
is_random = [False for _ in range(len(asked))]
|
||||
else:
|
||||
asked = unique_list(self.opt.space.rvs(n_samples=n_points * 5))
|
||||
is_random = [True for _ in range(len(asked))]
|
||||
is_random += [rand for x, rand in zip(asked, is_random)
|
||||
if x not in self.opt.Xi
|
||||
and x not in asked_non_tried]
|
||||
asked_non_tried += [x for x in asked
|
||||
if x not in self.opt.Xi
|
||||
and x not in asked_non_tried]
|
||||
i += 1
|
||||
|
||||
if asked_non_tried:
|
||||
return (
|
||||
asked_non_tried[:min(len(asked_non_tried), n_points)],
|
||||
is_random[:min(len(asked_non_tried), n_points)]
|
||||
)
|
||||
else:
|
||||
return self.opt.ask(n_points=n_points), [False for _ in range(n_points)]
|
||||
|
||||
def start(self) -> None:
|
||||
self.random_state = self._set_random_state(self.config.get('hyperopt_random_state', None))
|
||||
logger.info(f"Using optimizer random state: {self.random_state}")
|
||||
@@ -420,9 +465,10 @@ class Hyperopt:
|
||||
|
||||
# We don't need exchange instance anymore while running hyperopt
|
||||
self.backtesting.exchange.close()
|
||||
self.backtesting.exchange._api = None # type: ignore
|
||||
self.backtesting.exchange._api_async = None # type: ignore
|
||||
self.backtesting.exchange._api = None
|
||||
self.backtesting.exchange._api_async = None
|
||||
self.backtesting.exchange.loop = None # type: ignore
|
||||
self.backtesting.exchange._loop_lock = None # type: ignore
|
||||
# self.backtesting.exchange = None # type: ignore
|
||||
self.backtesting.pairlists = None # type: ignore
|
||||
|
||||
@@ -473,7 +519,7 @@ class Hyperopt:
|
||||
n_rest = (i + 1) * jobs - self.total_epochs
|
||||
current_jobs = jobs - n_rest if n_rest > 0 else jobs
|
||||
|
||||
asked = self.opt.ask(n_points=current_jobs)
|
||||
asked, is_random = self.get_asked_points(n_points=current_jobs)
|
||||
f_val = self.run_optimizer_parallel(parallel, asked, i)
|
||||
self.opt.tell(asked, [v['loss'] for v in f_val])
|
||||
|
||||
@@ -492,6 +538,7 @@ class Hyperopt:
|
||||
# evaluations can take different time. Here they are aligned in the
|
||||
# order they will be shown to the user.
|
||||
val['is_best'] = is_best
|
||||
val['is_random'] = is_random[j]
|
||||
self.print_results(val)
|
||||
|
||||
if is_best:
|
||||
|
||||
@@ -10,7 +10,7 @@ from typing import Any, Dict
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.btanalysis import calculate_max_drawdown
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from datetime import datetime
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.btanalysis import calculate_max_drawdown
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
MaxDrawDownRelativeHyperOptLoss
|
||||
|
||||
This module defines the alternative HyperOptLoss class which can be used for
|
||||
Hyperoptimization.
|
||||
"""
|
||||
from typing import Dict
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.metrics import calculate_underwater
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
class MaxDrawDownRelativeHyperOptLoss(IHyperOptLoss):
|
||||
|
||||
"""
|
||||
Defines the loss function for hyperopt.
|
||||
|
||||
This implementation optimizes for max draw down and profit
|
||||
Less max drawdown more profit -> Lower return value
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def hyperopt_loss_function(results: DataFrame, config: Dict,
|
||||
*args, **kwargs) -> float:
|
||||
|
||||
"""
|
||||
Objective function.
|
||||
|
||||
Uses profit ratio weighted max_drawdown when drawdown is available.
|
||||
Otherwise directly optimizes profit ratio.
|
||||
"""
|
||||
total_profit = results['profit_abs'].sum()
|
||||
try:
|
||||
drawdown_df = calculate_underwater(
|
||||
results,
|
||||
value_col='profit_abs',
|
||||
starting_balance=config['dry_run_wallet']
|
||||
)
|
||||
max_drawdown = abs(min(drawdown_df['drawdown']))
|
||||
relative_drawdown = max(drawdown_df['drawdown_relative'])
|
||||
if max_drawdown == 0:
|
||||
return -total_profit
|
||||
return -total_profit / max_drawdown / relative_drawdown
|
||||
except (Exception, ValueError):
|
||||
return -total_profit
|
||||
@@ -9,7 +9,7 @@ individual needs.
|
||||
"""
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.btanalysis import calculate_max_drawdown
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
@@ -19,11 +19,11 @@ class IHyperOptLoss(ABC):
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
||||
def hyperopt_loss_function(*, results: DataFrame, trade_count: int,
|
||||
min_date: datetime, max_date: datetime,
|
||||
config: Dict, processed: Dict[str, DataFrame],
|
||||
backtest_stats: Dict[str, Any],
|
||||
*args, **kwargs) -> float:
|
||||
**kwargs) -> float:
|
||||
"""
|
||||
Objective function, returns smaller number for better results
|
||||
"""
|
||||
|
||||
@@ -41,7 +41,8 @@ class HyperoptTools():
|
||||
"""
|
||||
from freqtrade.resolvers.strategy_resolver import StrategyResolver
|
||||
directory = Path(config.get('strategy_path', config['user_data_dir'] / USERPATH_STRATEGIES))
|
||||
strategy_objs = StrategyResolver.search_all_objects(directory, False)
|
||||
strategy_objs = StrategyResolver.search_all_objects(
|
||||
directory, False, config.get('recursive_strategy_search', False))
|
||||
strategies = [s for s in strategy_objs if s['name'] == strategy_name]
|
||||
if strategies:
|
||||
strategy = strategies[0]
|
||||
@@ -310,6 +311,8 @@ class HyperoptTools():
|
||||
if not has_drawdown:
|
||||
# Ensure compatibility with older versions of hyperopt results
|
||||
trials['results_metrics.max_drawdown_account'] = None
|
||||
if 'is_random' not in trials.columns:
|
||||
trials['is_random'] = False
|
||||
|
||||
# New mode, using backtest result for metrics
|
||||
trials['results_metrics.winsdrawslosses'] = trials.apply(
|
||||
@@ -322,12 +325,12 @@ class HyperoptTools():
|
||||
'results_metrics.profit_total', 'results_metrics.holding_avg',
|
||||
'results_metrics.max_drawdown',
|
||||
'results_metrics.max_drawdown_account', 'results_metrics.max_drawdown_abs',
|
||||
'loss', 'is_initial_point', 'is_best']]
|
||||
'loss', 'is_initial_point', 'is_random', 'is_best']]
|
||||
|
||||
trials.columns = [
|
||||
'Best', 'Epoch', 'Trades', ' Win Draw Loss', 'Avg profit',
|
||||
'Total profit', 'Profit', 'Avg duration', 'max_drawdown', 'max_drawdown_account',
|
||||
'max_drawdown_abs', 'Objective', 'is_initial_point', 'is_best'
|
||||
'max_drawdown_abs', 'Objective', 'is_initial_point', 'is_random', 'is_best'
|
||||
]
|
||||
|
||||
return trials
|
||||
@@ -349,9 +352,11 @@ class HyperoptTools():
|
||||
trials = HyperoptTools.prepare_trials_columns(trials, has_account_drawdown)
|
||||
|
||||
trials['is_profit'] = False
|
||||
trials.loc[trials['is_initial_point'], 'Best'] = '* '
|
||||
trials.loc[trials['is_initial_point'] | trials['is_random'], 'Best'] = '* '
|
||||
trials.loc[trials['is_best'], 'Best'] = 'Best'
|
||||
trials.loc[trials['is_initial_point'] & trials['is_best'], 'Best'] = '* Best'
|
||||
trials.loc[
|
||||
(trials['is_initial_point'] | trials['is_random']) & trials['is_best'],
|
||||
'Best'] = '* Best'
|
||||
trials.loc[trials['Total profit'] > 0, 'is_profit'] = True
|
||||
trials['Trades'] = trials['Trades'].astype(str)
|
||||
# perc_multi = 1 if legacy_mode else 100
|
||||
@@ -390,8 +395,8 @@ class HyperoptTools():
|
||||
lambda x: '{} {}'.format(
|
||||
round_coin_value(x['Total profit'], stake_currency, keep_trailing_zeros=True),
|
||||
f"({x['Profit']:,.2%})".rjust(10, ' ')
|
||||
).rjust(25+len(stake_currency))
|
||||
if x['Total profit'] != 0.0 else '--'.rjust(25+len(stake_currency)),
|
||||
).rjust(25 + len(stake_currency))
|
||||
if x['Total profit'] != 0.0 else '--'.rjust(25 + len(stake_currency)),
|
||||
axis=1
|
||||
)
|
||||
trials = trials.drop(columns=['Total profit'])
|
||||
@@ -399,15 +404,15 @@ class HyperoptTools():
|
||||
if print_colorized:
|
||||
for i in range(len(trials)):
|
||||
if trials.loc[i]['is_profit']:
|
||||
for j in range(len(trials.loc[i])-3):
|
||||
for j in range(len(trials.loc[i]) - 3):
|
||||
trials.iat[i, j] = "{}{}{}".format(Fore.GREEN,
|
||||
str(trials.loc[i][j]), Fore.RESET)
|
||||
if trials.loc[i]['is_best'] and highlight_best:
|
||||
for j in range(len(trials.loc[i])-3):
|
||||
for j in range(len(trials.loc[i]) - 3):
|
||||
trials.iat[i, j] = "{}{}{}".format(Style.BRIGHT,
|
||||
str(trials.loc[i][j]), Style.RESET_ALL)
|
||||
|
||||
trials = trials.drop(columns=['is_initial_point', 'is_best', 'is_profit'])
|
||||
trials = trials.drop(columns=['is_initial_point', 'is_best', 'is_profit', 'is_random'])
|
||||
if remove_header > 0:
|
||||
table = tabulate.tabulate(
|
||||
trials.to_dict(orient='list'), tablefmt='orgtbl',
|
||||
@@ -459,7 +464,7 @@ class HyperoptTools():
|
||||
'loss', 'is_initial_point', 'is_best']
|
||||
perc_multi = 100
|
||||
|
||||
param_metrics = [("params_dict."+param) for param in results[0]['params_dict'].keys()]
|
||||
param_metrics = [("params_dict." + param) for param in results[0]['params_dict'].keys()]
|
||||
trials = trials[base_metrics + param_metrics]
|
||||
|
||||
base_columns = ['Best', 'Epoch', 'Trades', 'Avg profit', 'Median profit', 'Total profit',
|
||||
|
||||
@@ -9,10 +9,10 @@ from pandas import DataFrame, to_datetime
|
||||
from tabulate import tabulate
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN, UNLIMITED_STAKE_AMOUNT
|
||||
from freqtrade.data.btanalysis import (calculate_csum, calculate_market_change,
|
||||
calculate_max_drawdown)
|
||||
from freqtrade.misc import (decimals_per_coin, file_dump_json, get_backtest_metadata_filename,
|
||||
round_coin_value)
|
||||
from freqtrade.data.metrics import (calculate_cagr, calculate_csum, calculate_market_change,
|
||||
calculate_max_drawdown)
|
||||
from freqtrade.misc import decimals_per_coin, file_dump_joblib, file_dump_json, round_coin_value
|
||||
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -45,6 +45,29 @@ def store_backtest_stats(recordfilename: Path, stats: Dict[str, DataFrame]) -> N
|
||||
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
|
||||
|
||||
|
||||
def store_backtest_signal_candles(recordfilename: Path, candles: Dict[str, Dict]) -> Path:
|
||||
"""
|
||||
Stores backtest trade signal candles
|
||||
:param recordfilename: Path object, which can either be a filename or a directory.
|
||||
Filenames will be appended with a timestamp right before the suffix
|
||||
while for directories, <directory>/backtest-result-<datetime>_signals.pkl will be used
|
||||
as filename
|
||||
:param stats: Dict containing the backtesting signal candles
|
||||
"""
|
||||
if recordfilename.is_dir():
|
||||
filename = (recordfilename /
|
||||
f'backtest-result-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}_signals.pkl')
|
||||
else:
|
||||
filename = Path.joinpath(
|
||||
recordfilename.parent,
|
||||
f'{recordfilename.stem}-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}_signals.pkl'
|
||||
)
|
||||
|
||||
file_dump_joblib(filename, candles)
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
def _get_line_floatfmt(stake_currency: str) -> List[str]:
|
||||
"""
|
||||
Generate floatformat (goes in line with _generate_result_line())
|
||||
@@ -241,7 +264,7 @@ def generate_edge_table(results: dict) -> str:
|
||||
|
||||
# Ignore type as floatfmt does allow tuples but mypy does not know that
|
||||
return tabulate(tabular_data, headers=headers,
|
||||
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right") # type: ignore
|
||||
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right")
|
||||
|
||||
|
||||
def _get_resample_from_period(period: str) -> str:
|
||||
@@ -423,6 +446,7 @@ def generate_strategy_stats(pairlist: List[str],
|
||||
'profit_total_abs': results['profit_abs'].sum(),
|
||||
'profit_total_long_abs': results.loc[~results['is_short'], 'profit_abs'].sum(),
|
||||
'profit_total_short_abs': results.loc[results['is_short'], 'profit_abs'].sum(),
|
||||
'cagr': calculate_cagr(backtest_days, start_balance, content['final_balance']),
|
||||
'backtest_start': min_date.strftime(DATETIME_PRINT_FORMAT),
|
||||
'backtest_start_ts': int(min_date.timestamp() * 1000),
|
||||
'backtest_end': max_date.strftime(DATETIME_PRINT_FORMAT),
|
||||
@@ -474,9 +498,12 @@ def generate_strategy_stats(pairlist: List[str],
|
||||
(drawdown_abs, drawdown_start, drawdown_end, high_val, low_val,
|
||||
max_drawdown) = calculate_max_drawdown(
|
||||
results, value_col='profit_abs', starting_balance=start_balance)
|
||||
(_, _, _, _, _, max_relative_drawdown) = calculate_max_drawdown(
|
||||
results, value_col='profit_abs', starting_balance=start_balance, relative=True)
|
||||
strat_stats.update({
|
||||
'max_drawdown': max_drawdown_legacy, # Deprecated - do not use
|
||||
'max_drawdown_account': max_drawdown,
|
||||
'max_relative_drawdown': max_relative_drawdown,
|
||||
'max_drawdown_abs': drawdown_abs,
|
||||
'drawdown_start': drawdown_start.strftime(DATETIME_PRINT_FORMAT),
|
||||
'drawdown_start_ts': drawdown_start.timestamp() * 1000,
|
||||
@@ -497,6 +524,7 @@ def generate_strategy_stats(pairlist: List[str],
|
||||
strat_stats.update({
|
||||
'max_drawdown': 0.0,
|
||||
'max_drawdown_account': 0.0,
|
||||
'max_relative_drawdown': 0.0,
|
||||
'max_drawdown_abs': 0.0,
|
||||
'max_drawdown_low': 0.0,
|
||||
'max_drawdown_high': 0.0,
|
||||
@@ -705,6 +733,26 @@ def text_table_add_metrics(strat_results: Dict) -> str:
|
||||
strat_results['stake_currency'])),
|
||||
] if strat_results.get('trade_count_short', 0) > 0 else []
|
||||
|
||||
drawdown_metrics = []
|
||||
if 'max_relative_drawdown' in strat_results:
|
||||
# Compatibility to show old hyperopt results
|
||||
drawdown_metrics.append(
|
||||
('Max % of account underwater', f"{strat_results['max_relative_drawdown']:.2%}")
|
||||
)
|
||||
drawdown_metrics.extend([
|
||||
('Absolute Drawdown (Account)', f"{strat_results['max_drawdown_account']:.2%}")
|
||||
if 'max_drawdown_account' in strat_results else (
|
||||
'Drawdown', f"{strat_results['max_drawdown']:.2%}"),
|
||||
('Absolute Drawdown', round_coin_value(strat_results['max_drawdown_abs'],
|
||||
strat_results['stake_currency'])),
|
||||
('Drawdown high', round_coin_value(strat_results['max_drawdown_high'],
|
||||
strat_results['stake_currency'])),
|
||||
('Drawdown low', round_coin_value(strat_results['max_drawdown_low'],
|
||||
strat_results['stake_currency'])),
|
||||
('Drawdown Start', strat_results['drawdown_start']),
|
||||
('Drawdown End', strat_results['drawdown_end']),
|
||||
])
|
||||
|
||||
# Newly added fields should be ignored if they are missing in strat_results. hyperopt-show
|
||||
# command stores these results and newer version of freqtrade must be able to handle old
|
||||
# results with missing new fields.
|
||||
@@ -723,6 +771,7 @@ def text_table_add_metrics(strat_results: Dict) -> str:
|
||||
('Absolute profit ', round_coin_value(strat_results['profit_total_abs'],
|
||||
strat_results['stake_currency'])),
|
||||
('Total profit %', f"{strat_results['profit_total']:.2%}"),
|
||||
('CAGR %', f"{strat_results['cagr']:.2%}" if 'cagr' in strat_results else 'N/A'),
|
||||
('Trades per day', strat_results['trades_per_day']),
|
||||
('Avg. daily profit %',
|
||||
f"{(strat_results['profit_total'] / strat_results['backtest_days']):.2%}"),
|
||||
@@ -759,18 +808,7 @@ def text_table_add_metrics(strat_results: Dict) -> str:
|
||||
('Max balance', round_coin_value(strat_results['csum_max'],
|
||||
strat_results['stake_currency'])),
|
||||
|
||||
# Compatibility to show old hyperopt results
|
||||
('Drawdown (Account)', f"{strat_results['max_drawdown_account']:.2%}")
|
||||
if 'max_drawdown_account' in strat_results else (
|
||||
'Drawdown', f"{strat_results['max_drawdown']:.2%}"),
|
||||
('Drawdown', round_coin_value(strat_results['max_drawdown_abs'],
|
||||
strat_results['stake_currency'])),
|
||||
('Drawdown high', round_coin_value(strat_results['max_drawdown_high'],
|
||||
strat_results['stake_currency'])),
|
||||
('Drawdown low', round_coin_value(strat_results['max_drawdown_low'],
|
||||
strat_results['stake_currency'])),
|
||||
('Drawdown Start', strat_results['drawdown_start']),
|
||||
('Drawdown End', strat_results['drawdown_end']),
|
||||
*drawdown_metrics,
|
||||
('Market change', f"{strat_results['market_change']:.2%}"),
|
||||
]
|
||||
|
||||
|
||||
@@ -3,11 +3,13 @@ from typing import List
|
||||
|
||||
from sqlalchemy import inspect, text
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_table_names_for_table(inspector, tabletype):
|
||||
def get_table_names_for_table(inspector, tabletype) -> List[str]:
|
||||
return [t for t in inspector.get_table_names() if t.startswith(tabletype)]
|
||||
|
||||
|
||||
@@ -19,7 +21,7 @@ def get_column_def(columns: List, column: str, default: str) -> str:
|
||||
return default if not has_column(columns, column) else column
|
||||
|
||||
|
||||
def get_backup_name(tabs, backup_prefix: str):
|
||||
def get_backup_name(tabs: List[str], backup_prefix: str):
|
||||
table_back_name = backup_prefix
|
||||
for i, table_back_name in enumerate(tabs):
|
||||
table_back_name = f'{backup_prefix}{i}'
|
||||
@@ -54,10 +56,22 @@ def set_sequence_ids(engine, order_id, trade_id):
|
||||
connection.execute(text(f"ALTER SEQUENCE trades_id_seq RESTART WITH {trade_id}"))
|
||||
|
||||
|
||||
def drop_index_on_table(engine, inspector, table_bak_name):
|
||||
with engine.begin() as connection:
|
||||
# drop indexes on backup table in new session
|
||||
for index in inspector.get_indexes(table_bak_name):
|
||||
if engine.name == 'mysql':
|
||||
connection.execute(text(f"drop index {index['name']} on {table_bak_name}"))
|
||||
else:
|
||||
connection.execute(text(f"drop index {index['name']}"))
|
||||
|
||||
|
||||
def migrate_trades_and_orders_table(
|
||||
decl_base, inspector, engine,
|
||||
trade_back_name: str, cols: List,
|
||||
order_back_name: str, cols_order: List):
|
||||
base_currency = get_column_def(cols, 'base_currency', 'null')
|
||||
stake_currency = get_column_def(cols, 'stake_currency', 'null')
|
||||
fee_open = get_column_def(cols, 'fee_open', 'fee')
|
||||
fee_open_cost = get_column_def(cols, 'fee_open_cost', 'null')
|
||||
fee_open_currency = get_column_def(cols, 'fee_open_currency', 'null')
|
||||
@@ -112,13 +126,7 @@ def migrate_trades_and_orders_table(
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text(f"alter table trades rename to {trade_back_name}"))
|
||||
|
||||
with engine.begin() as connection:
|
||||
# drop indexes on backup table in new session
|
||||
for index in inspector.get_indexes(trade_back_name):
|
||||
if engine.name == 'mysql':
|
||||
connection.execute(text(f"drop index {index['name']} on {trade_back_name}"))
|
||||
else:
|
||||
connection.execute(text(f"drop index {index['name']}"))
|
||||
drop_index_on_table(engine, inspector, trade_back_name)
|
||||
|
||||
order_id, trade_id = get_last_sequence_ids(engine, trade_back_name, order_back_name)
|
||||
|
||||
@@ -130,7 +138,7 @@ def migrate_trades_and_orders_table(
|
||||
# Copy data back - following the correct schema
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text(f"""insert into trades
|
||||
(id, exchange, pair, is_open,
|
||||
(id, exchange, pair, base_currency, stake_currency, is_open,
|
||||
fee_open, fee_open_cost, fee_open_currency,
|
||||
fee_close, fee_close_cost, fee_close_currency, open_rate,
|
||||
open_rate_requested, close_rate, close_rate_requested, close_profit,
|
||||
@@ -142,7 +150,8 @@ def migrate_trades_and_orders_table(
|
||||
trading_mode, leverage, liquidation_price, is_short,
|
||||
interest_rate, funding_fees
|
||||
)
|
||||
select id, lower(exchange), pair,
|
||||
select id, lower(exchange), pair, {base_currency} base_currency,
|
||||
{stake_currency} stake_currency,
|
||||
is_open, {fee_open} fee_open, {fee_open_cost} fee_open_cost,
|
||||
{fee_open_currency} fee_open_currency, {fee_close} fee_close,
|
||||
{fee_close_cost} fee_close_cost, {fee_close_currency} fee_close_currency,
|
||||
@@ -154,10 +163,10 @@ def migrate_trades_and_orders_table(
|
||||
{initial_stop_loss_pct} initial_stop_loss_pct,
|
||||
{stoploss_order_id} stoploss_order_id, {stoploss_last_update} stoploss_last_update,
|
||||
{max_rate} max_rate, {min_rate} min_rate,
|
||||
case when {exit_reason} == 'sell_signal' then 'exit_signal'
|
||||
when {exit_reason} == 'custom_sell' then 'custom_exit'
|
||||
when {exit_reason} == 'force_sell' then 'force_exit'
|
||||
when {exit_reason} == 'emergency_sell' then 'emergency_exit'
|
||||
case when {exit_reason} = 'sell_signal' then 'exit_signal'
|
||||
when {exit_reason} = 'custom_sell' then 'custom_exit'
|
||||
when {exit_reason} = 'force_sell' then 'force_exit'
|
||||
when {exit_reason} = 'emergency_sell' then 'emergency_exit'
|
||||
else {exit_reason}
|
||||
end exit_reason,
|
||||
{exit_order_status} exit_order_status,
|
||||
@@ -173,23 +182,6 @@ def migrate_trades_and_orders_table(
|
||||
set_sequence_ids(engine, order_id, trade_id)
|
||||
|
||||
|
||||
def migrate_open_orders_to_trades(engine):
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text("""
|
||||
insert into orders (ft_trade_id, ft_pair, order_id, ft_order_side, ft_is_open)
|
||||
select id ft_trade_id, pair ft_pair, open_order_id,
|
||||
case when close_rate_requested is null then 'buy'
|
||||
else 'sell' end ft_order_side, 1 ft_is_open
|
||||
from trades
|
||||
where open_order_id is not null
|
||||
union all
|
||||
select id ft_trade_id, pair ft_pair, stoploss_order_id order_id,
|
||||
'stoploss' ft_order_side, 1 ft_is_open
|
||||
from trades
|
||||
where stoploss_order_id is not null
|
||||
"""))
|
||||
|
||||
|
||||
def drop_orders_table(engine, table_back_name: str):
|
||||
# Drop and recreate orders table as backup
|
||||
# This drops foreign keys, too.
|
||||
@@ -207,7 +199,7 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
|
||||
# sqlite does not support literals for booleans
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text(f"""
|
||||
insert into orders ( id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
insert into orders (id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
status, symbol, order_type, side, price, amount, filled, average, remaining, cost,
|
||||
order_date, order_filled_date, order_update_date, ft_fee_base)
|
||||
select id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
@@ -217,6 +209,31 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
|
||||
"""))
|
||||
|
||||
|
||||
def migrate_pairlocks_table(
|
||||
decl_base, inspector, engine,
|
||||
pairlock_back_name: str, cols: List):
|
||||
|
||||
# Schema migration necessary
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text(f"alter table pairlocks rename to {pairlock_back_name}"))
|
||||
|
||||
drop_index_on_table(engine, inspector, pairlock_back_name)
|
||||
|
||||
side = get_column_def(cols, 'side', "'*'")
|
||||
|
||||
# let SQLAlchemy create the schema as required
|
||||
decl_base.metadata.create_all(engine)
|
||||
# Copy data back - following the correct schema
|
||||
with engine.begin() as connection:
|
||||
connection.execute(text(f"""insert into pairlocks
|
||||
(id, pair, side, reason, lock_time,
|
||||
lock_end_time, active)
|
||||
select id, pair, {side} side, reason, lock_time,
|
||||
lock_end_time, active
|
||||
from {pairlock_back_name}
|
||||
"""))
|
||||
|
||||
|
||||
def set_sqlite_to_wal(engine):
|
||||
if engine.name == 'sqlite' and str(engine.url) != 'sqlite://':
|
||||
# Set Mode to
|
||||
@@ -230,24 +247,38 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||
"""
|
||||
inspector = inspect(engine)
|
||||
|
||||
cols = inspector.get_columns('trades')
|
||||
cols_trades = inspector.get_columns('trades')
|
||||
cols_orders = inspector.get_columns('orders')
|
||||
cols_pairlocks = inspector.get_columns('pairlocks')
|
||||
tabs = get_table_names_for_table(inspector, 'trades')
|
||||
table_back_name = get_backup_name(tabs, 'trades_bak')
|
||||
order_tabs = get_table_names_for_table(inspector, 'orders')
|
||||
order_table_bak_name = get_backup_name(order_tabs, 'orders_bak')
|
||||
pairlock_tabs = get_table_names_for_table(inspector, 'pairlocks')
|
||||
pairlock_table_bak_name = get_backup_name(pairlock_tabs, 'pairlocks_bak')
|
||||
|
||||
# Check if migration necessary
|
||||
# Migrates both trades and orders table!
|
||||
# if ('orders' not in previous_tables
|
||||
# or not has_column(cols_orders, 'leverage')):
|
||||
if not has_column(cols, 'exit_order_status'):
|
||||
if not has_column(cols_trades, 'base_currency'):
|
||||
logger.info(f"Running database migration for trades - "
|
||||
f"backup: {table_back_name}, {order_table_bak_name}")
|
||||
migrate_trades_and_orders_table(
|
||||
decl_base, inspector, engine, table_back_name, cols, order_table_bak_name, cols_orders)
|
||||
decl_base, inspector, engine, table_back_name, cols_trades,
|
||||
order_table_bak_name, cols_orders)
|
||||
|
||||
if not has_column(cols_pairlocks, 'side'):
|
||||
logger.info(f"Running database migration for pairlocks - "
|
||||
f"backup: {pairlock_table_bak_name}")
|
||||
|
||||
migrate_pairlocks_table(
|
||||
decl_base, inspector, engine, pairlock_table_bak_name, cols_pairlocks
|
||||
)
|
||||
if 'orders' not in previous_tables and 'trades' in previous_tables:
|
||||
logger.info('Moving open orders to Orders table.')
|
||||
migrate_open_orders_to_trades(engine)
|
||||
raise OperationalException(
|
||||
"Your database seems to be very old. "
|
||||
"Please update to freqtrade 2022.3 to migrate this database or "
|
||||
"start with a fresh database.")
|
||||
|
||||
set_sqlite_to_wal(engine)
|
||||
|
||||
@@ -8,13 +8,14 @@ from math import isclose
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from sqlalchemy import (Boolean, Column, DateTime, Enum, Float, ForeignKey, Integer, String,
|
||||
create_engine, desc, func, inspect)
|
||||
create_engine, desc, func, inspect, or_)
|
||||
from sqlalchemy.exc import NoSuchModuleError
|
||||
from sqlalchemy.orm import Query, declarative_base, relationship, scoped_session, sessionmaker
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from sqlalchemy.sql.schema import UniqueConstraint
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC, NON_OPEN_EXCHANGE_STATES
|
||||
from freqtrade.constants import (DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC, NON_OPEN_EXCHANGE_STATES,
|
||||
LongShort)
|
||||
from freqtrade.enums import ExitType, TradingMode
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
from freqtrade.leverage import interest
|
||||
@@ -281,6 +282,8 @@ class LocalTrade():
|
||||
|
||||
exchange: str = ''
|
||||
pair: str = ''
|
||||
base_currency: str = ''
|
||||
stake_currency: str = ''
|
||||
is_open: bool = True
|
||||
fee_open: float = 0.0
|
||||
fee_open_cost: Optional[float] = None
|
||||
@@ -393,12 +396,32 @@ class LocalTrade():
|
||||
return "sell"
|
||||
|
||||
@property
|
||||
def trade_direction(self) -> str:
|
||||
def trade_direction(self) -> LongShort:
|
||||
if self.is_short:
|
||||
return "short"
|
||||
else:
|
||||
return "long"
|
||||
|
||||
@property
|
||||
def safe_base_currency(self) -> str:
|
||||
"""
|
||||
Compatibility layer for asset - which can be empty for old trades.
|
||||
"""
|
||||
try:
|
||||
return self.base_currency or self.pair.split('/')[0]
|
||||
except IndexError:
|
||||
return ''
|
||||
|
||||
@property
|
||||
def safe_quote_currency(self) -> str:
|
||||
"""
|
||||
Compatibility layer for asset - which can be empty for old trades.
|
||||
"""
|
||||
try:
|
||||
return self.stake_currency or self.pair.split('/')[1].split(':')[0]
|
||||
except IndexError:
|
||||
return ''
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for key in kwargs:
|
||||
setattr(self, key, kwargs[key])
|
||||
@@ -409,12 +432,10 @@ class LocalTrade():
|
||||
|
||||
def __repr__(self):
|
||||
open_since = self.open_date.strftime(DATETIME_PRINT_FORMAT) if self.is_open else 'closed'
|
||||
leverage = self.leverage or 1.0
|
||||
is_short = self.is_short or False
|
||||
|
||||
return (
|
||||
f'Trade(id={self.id}, pair={self.pair}, amount={self.amount:.8f}, '
|
||||
f'is_short={is_short}, leverage={leverage}, '
|
||||
f'is_short={self.is_short or False}, leverage={self.leverage or 1.0}, '
|
||||
f'open_rate={self.open_rate:.8f}, open_since={open_since})'
|
||||
)
|
||||
|
||||
@@ -425,6 +446,8 @@ class LocalTrade():
|
||||
return {
|
||||
'trade_id': self.id,
|
||||
'pair': self.pair,
|
||||
'base_currency': self.safe_base_currency,
|
||||
'quote_currency': self.safe_quote_currency,
|
||||
'is_open': self.is_open,
|
||||
'exchange': self.exchange,
|
||||
'amount': round(self.amount, 8),
|
||||
@@ -1092,6 +1115,8 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
|
||||
exchange = Column(String(25), nullable=False)
|
||||
pair = Column(String(25), nullable=False, index=True)
|
||||
base_currency = Column(String(25), nullable=True)
|
||||
stake_currency = Column(String(25), nullable=True)
|
||||
is_open = Column(Boolean, nullable=False, default=True, index=True)
|
||||
fee_open = Column(Float, nullable=False, default=0.0)
|
||||
fee_open_cost = Column(Float, nullable=True)
|
||||
@@ -1445,6 +1470,8 @@ class PairLock(_DECL_BASE):
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
pair = Column(String(25), nullable=False, index=True)
|
||||
# lock direction - long, short or * (for both)
|
||||
side = Column(String(25), nullable=False, default="*")
|
||||
reason = Column(String(255), nullable=True)
|
||||
# Time the pair was locked (start time)
|
||||
lock_time = Column(DateTime, nullable=False)
|
||||
@@ -1456,11 +1483,12 @@ class PairLock(_DECL_BASE):
|
||||
def __repr__(self):
|
||||
lock_time = self.lock_time.strftime(DATETIME_PRINT_FORMAT)
|
||||
lock_end_time = self.lock_end_time.strftime(DATETIME_PRINT_FORMAT)
|
||||
return (f'PairLock(id={self.id}, pair={self.pair}, lock_time={lock_time}, '
|
||||
f'lock_end_time={lock_end_time}, reason={self.reason}, active={self.active})')
|
||||
return (
|
||||
f'PairLock(id={self.id}, pair={self.pair}, side={self.side}, lock_time={lock_time}, '
|
||||
f'lock_end_time={lock_end_time}, reason={self.reason}, active={self.active})')
|
||||
|
||||
@staticmethod
|
||||
def query_pair_locks(pair: Optional[str], now: datetime) -> Query:
|
||||
def query_pair_locks(pair: Optional[str], now: datetime, side: str = '*') -> Query:
|
||||
"""
|
||||
Get all currently active locks for this pair
|
||||
:param pair: Pair to check for. Returns all current locks if pair is empty
|
||||
@@ -1471,6 +1499,11 @@ class PairLock(_DECL_BASE):
|
||||
PairLock.active.is_(True), ]
|
||||
if pair:
|
||||
filters.append(PairLock.pair == pair)
|
||||
if side != '*':
|
||||
filters.append(or_(PairLock.side == side, PairLock.side == '*'))
|
||||
else:
|
||||
filters.append(PairLock.side == '*')
|
||||
|
||||
return PairLock.query.filter(
|
||||
*filters
|
||||
)
|
||||
@@ -1485,5 +1518,6 @@ class PairLock(_DECL_BASE):
|
||||
'lock_end_timestamp': int(self.lock_end_time.replace(tzinfo=timezone.utc
|
||||
).timestamp() * 1000),
|
||||
'reason': self.reason,
|
||||
'side': self.side,
|
||||
'active': self.active,
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ class PairLocks():
|
||||
|
||||
@staticmethod
|
||||
def lock_pair(pair: str, until: datetime, reason: str = None, *,
|
||||
now: datetime = None) -> PairLock:
|
||||
now: datetime = None, side: str = '*') -> PairLock:
|
||||
"""
|
||||
Create PairLock from now to "until".
|
||||
Uses database by default, unless PairLocks.use_db is set to False,
|
||||
@@ -40,12 +40,14 @@ class PairLocks():
|
||||
:param until: End time of the lock. Will be rounded up to the next candle.
|
||||
:param reason: Reason string that will be shown as reason for the lock
|
||||
:param now: Current timestamp. Used to determine lock start time.
|
||||
:param side: Side to lock pair, can be 'long', 'short' or '*'
|
||||
"""
|
||||
lock = PairLock(
|
||||
pair=pair,
|
||||
lock_time=now or datetime.now(timezone.utc),
|
||||
lock_end_time=timeframe_to_next_date(PairLocks.timeframe, until),
|
||||
reason=reason,
|
||||
side=side,
|
||||
active=True
|
||||
)
|
||||
if PairLocks.use_db:
|
||||
@@ -56,7 +58,8 @@ class PairLocks():
|
||||
return lock
|
||||
|
||||
@staticmethod
|
||||
def get_pair_locks(pair: Optional[str], now: Optional[datetime] = None) -> List[PairLock]:
|
||||
def get_pair_locks(
|
||||
pair: Optional[str], now: Optional[datetime] = None, side: str = '*') -> List[PairLock]:
|
||||
"""
|
||||
Get all currently active locks for this pair
|
||||
:param pair: Pair to check for. Returns all current locks if pair is empty
|
||||
@@ -67,26 +70,28 @@ class PairLocks():
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
if PairLocks.use_db:
|
||||
return PairLock.query_pair_locks(pair, now).all()
|
||||
return PairLock.query_pair_locks(pair, now, side).all()
|
||||
else:
|
||||
locks = [lock for lock in PairLocks.locks if (
|
||||
lock.lock_end_time >= now
|
||||
and lock.active is True
|
||||
and (pair is None or lock.pair == pair)
|
||||
and (lock.side == '*' or lock.side == side)
|
||||
)]
|
||||
return locks
|
||||
|
||||
@staticmethod
|
||||
def get_pair_longest_lock(pair: str, now: Optional[datetime] = None) -> Optional[PairLock]:
|
||||
def get_pair_longest_lock(
|
||||
pair: str, now: Optional[datetime] = None, side: str = '*') -> Optional[PairLock]:
|
||||
"""
|
||||
Get the lock that expires the latest for the pair given.
|
||||
"""
|
||||
locks = PairLocks.get_pair_locks(pair, now)
|
||||
locks = PairLocks.get_pair_locks(pair, now, side=side)
|
||||
locks = sorted(locks, key=lambda l: l.lock_end_time, reverse=True)
|
||||
return locks[0] if locks else None
|
||||
|
||||
@staticmethod
|
||||
def unlock_pair(pair: str, now: Optional[datetime] = None) -> None:
|
||||
def unlock_pair(pair: str, now: Optional[datetime] = None, side: str = '*') -> None:
|
||||
"""
|
||||
Release all locks for this pair.
|
||||
:param pair: Pair to unlock
|
||||
@@ -97,7 +102,7 @@ class PairLocks():
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
logger.info(f"Releasing all locks for {pair}.")
|
||||
locks = PairLocks.get_pair_locks(pair, now)
|
||||
locks = PairLocks.get_pair_locks(pair, now, side=side)
|
||||
for lock in locks:
|
||||
lock.active = False
|
||||
if PairLocks.use_db:
|
||||
@@ -134,7 +139,7 @@ class PairLocks():
|
||||
lock.active = False
|
||||
|
||||
@staticmethod
|
||||
def is_global_lock(now: Optional[datetime] = None) -> bool:
|
||||
def is_global_lock(now: Optional[datetime] = None, side: str = '*') -> bool:
|
||||
"""
|
||||
:param now: Datetime object (generated via datetime.now(timezone.utc)).
|
||||
defaults to datetime.now(timezone.utc)
|
||||
@@ -142,10 +147,10 @@ class PairLocks():
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
return len(PairLocks.get_pair_locks('*', now)) > 0
|
||||
return len(PairLocks.get_pair_locks('*', now, side)) > 0
|
||||
|
||||
@staticmethod
|
||||
def is_pair_locked(pair: str, now: Optional[datetime] = None) -> bool:
|
||||
def is_pair_locked(pair: str, now: Optional[datetime] = None, side: str = '*') -> bool:
|
||||
"""
|
||||
:param pair: Pair to check for
|
||||
:param now: Datetime object (generated via datetime.now(timezone.utc)).
|
||||
@@ -154,7 +159,10 @@ class PairLocks():
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
return len(PairLocks.get_pair_locks(pair, now)) > 0 or PairLocks.is_global_lock(now)
|
||||
return (
|
||||
len(PairLocks.get_pair_locks(pair, now, side)) > 0
|
||||
or PairLocks.is_global_lock(now, side)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_all_locks() -> List[PairLock]:
|
||||
|
||||
@@ -5,12 +5,13 @@ from typing import Any, Dict, List, Optional
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.data.btanalysis import (analyze_trade_parallelism, calculate_max_drawdown,
|
||||
calculate_underwater, combine_dataframes_with_mean,
|
||||
create_cum_profit, extract_trades_of_period, load_trades)
|
||||
from freqtrade.data.btanalysis import (analyze_trade_parallelism, extract_trades_of_period,
|
||||
load_trades)
|
||||
from freqtrade.data.converter import trim_dataframe
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.data.history import get_timerange, load_data
|
||||
from freqtrade.data.metrics import (calculate_max_drawdown, calculate_underwater,
|
||||
combine_dataframes_with_mean, create_cum_profit)
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import timeframe_to_prev_date, timeframe_to_seconds
|
||||
@@ -158,12 +159,15 @@ def add_profit(fig, row, data: pd.DataFrame, column: str, name: str) -> make_sub
|
||||
|
||||
|
||||
def add_max_drawdown(fig, row, trades: pd.DataFrame, df_comb: pd.DataFrame,
|
||||
timeframe: str) -> make_subplots:
|
||||
timeframe: str, starting_balance: float) -> make_subplots:
|
||||
"""
|
||||
Add scatter points indicating max drawdown
|
||||
"""
|
||||
try:
|
||||
_, highdate, lowdate, _, _, max_drawdown = calculate_max_drawdown(trades)
|
||||
_, highdate, lowdate, _, _, max_drawdown = calculate_max_drawdown(
|
||||
trades,
|
||||
starting_balance=starting_balance
|
||||
)
|
||||
|
||||
drawdown = go.Scatter(
|
||||
x=[highdate, lowdate],
|
||||
@@ -188,22 +192,37 @@ def add_max_drawdown(fig, row, trades: pd.DataFrame, df_comb: pd.DataFrame,
|
||||
return fig
|
||||
|
||||
|
||||
def add_underwater(fig, row, trades: pd.DataFrame) -> make_subplots:
|
||||
def add_underwater(fig, row, trades: pd.DataFrame, starting_balance: float) -> make_subplots:
|
||||
"""
|
||||
Add underwater plot
|
||||
Add underwater plots
|
||||
"""
|
||||
try:
|
||||
underwater = calculate_underwater(trades, value_col="profit_abs")
|
||||
underwater = calculate_underwater(
|
||||
trades,
|
||||
value_col="profit_abs",
|
||||
starting_balance=starting_balance
|
||||
)
|
||||
|
||||
underwater = go.Scatter(
|
||||
underwater_plot = go.Scatter(
|
||||
x=underwater['date'],
|
||||
y=underwater['drawdown'],
|
||||
name="Underwater Plot",
|
||||
fill='tozeroy',
|
||||
fillcolor='#cc362b',
|
||||
line={'color': '#cc362b'},
|
||||
line={'color': '#cc362b'}
|
||||
)
|
||||
fig.add_trace(underwater, row, 1)
|
||||
|
||||
underwater_plot_relative = go.Scatter(
|
||||
x=underwater['date'],
|
||||
y=(-underwater['drawdown_relative']),
|
||||
name="Underwater Plot (%)",
|
||||
fill='tozeroy',
|
||||
fillcolor='green',
|
||||
line={'color': 'green'}
|
||||
)
|
||||
|
||||
fig.add_trace(underwater_plot, row, 1)
|
||||
fig.add_trace(underwater_plot_relative, row + 1, 1)
|
||||
except ValueError:
|
||||
logger.warning("No trades found - not plotting underwater plot")
|
||||
return fig
|
||||
@@ -506,7 +525,8 @@ def generate_candlestick_graph(pair: str, data: pd.DataFrame, trades: pd.DataFra
|
||||
|
||||
|
||||
def generate_profit_graph(pairs: str, data: Dict[str, pd.DataFrame],
|
||||
trades: pd.DataFrame, timeframe: str, stake_currency: str) -> go.Figure:
|
||||
trades: pd.DataFrame, timeframe: str, stake_currency: str,
|
||||
starting_balance: float) -> go.Figure:
|
||||
# Combine close-values for all pairs, rename columns to "pair"
|
||||
try:
|
||||
df_comb = combine_dataframes_with_mean(data, "close")
|
||||
@@ -530,8 +550,8 @@ def generate_profit_graph(pairs: str, data: Dict[str, pd.DataFrame],
|
||||
name='Avg close price',
|
||||
)
|
||||
|
||||
fig = make_subplots(rows=5, cols=1, shared_xaxes=True,
|
||||
row_heights=[1, 1, 1, 0.5, 1],
|
||||
fig = make_subplots(rows=6, cols=1, shared_xaxes=True,
|
||||
row_heights=[1, 1, 1, 0.5, 0.75, 0.75],
|
||||
vertical_spacing=0.05,
|
||||
subplot_titles=[
|
||||
"AVG Close Price",
|
||||
@@ -539,6 +559,7 @@ def generate_profit_graph(pairs: str, data: Dict[str, pd.DataFrame],
|
||||
"Profit per pair",
|
||||
"Parallelism",
|
||||
"Underwater",
|
||||
"Relative Drawdown",
|
||||
])
|
||||
fig['layout'].update(title="Freqtrade Profit plot")
|
||||
fig['layout']['yaxis1'].update(title='Price')
|
||||
@@ -546,14 +567,16 @@ def generate_profit_graph(pairs: str, data: Dict[str, pd.DataFrame],
|
||||
fig['layout']['yaxis3'].update(title=f'Profit {stake_currency}')
|
||||
fig['layout']['yaxis4'].update(title='Trade count')
|
||||
fig['layout']['yaxis5'].update(title='Underwater Plot')
|
||||
fig['layout']['yaxis6'].update(title='Underwater Plot Relative (%)', tickformat=',.2%')
|
||||
fig['layout']['xaxis']['rangeslider'].update(visible=False)
|
||||
fig.update_layout(modebar_add=["v1hovermode", "toggleSpikeLines"])
|
||||
|
||||
fig.add_trace(avgclose, 1, 1)
|
||||
fig = add_profit(fig, 2, df_comb, 'cum_profit', 'Profit')
|
||||
fig = add_max_drawdown(fig, 2, trades, df_comb, timeframe)
|
||||
fig = add_max_drawdown(fig, 2, trades, df_comb, timeframe, starting_balance)
|
||||
fig = add_parallelism(fig, 4, trades, timeframe)
|
||||
fig = add_underwater(fig, 5, trades)
|
||||
# Two rows consumed
|
||||
fig = add_underwater(fig, 5, trades, starting_balance)
|
||||
|
||||
for pair in pairs:
|
||||
profit_col = f'cum_profit_{pair}'
|
||||
@@ -610,6 +633,7 @@ def load_and_plot_trades(config: Dict[str, Any]):
|
||||
|
||||
exchange = ExchangeResolver.load_exchange(config['exchange']['name'], config)
|
||||
IStrategy.dp = DataProvider(config, exchange)
|
||||
strategy.bot_start()
|
||||
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
|
||||
timerange = plot_elements['timerange']
|
||||
trades = plot_elements['trades']
|
||||
@@ -668,7 +692,8 @@ def plot_profit(config: Dict[str, Any]) -> None:
|
||||
# this could be useful to gauge the overall market trend
|
||||
fig = generate_profit_graph(plot_elements['pairs'], plot_elements['ohlcv'],
|
||||
trades, config['timeframe'],
|
||||
config.get('stake_currency', ''))
|
||||
config.get('stake_currency', ''),
|
||||
config.get('available_capital', config['dry_run_wallet']))
|
||||
store_plot_file(fig, filename='freqtrade-profit-plot.html',
|
||||
directory=config['user_data_dir'] / 'plot',
|
||||
auto_open=config.get('plot_auto_open', False))
|
||||
|
||||
@@ -107,7 +107,7 @@ class VolatilityFilter(IPairList):
|
||||
returns = (np.log(daily_candles.close / daily_candles.close.shift(-1)))
|
||||
returns.fillna(0, inplace=True)
|
||||
|
||||
volatility_series = returns.rolling(window=self._days).std()*np.sqrt(self._days)
|
||||
volatility_series = returns.rolling(window=self._days).std() * np.sqrt(self._days)
|
||||
volatility_avg = volatility_series.mean()
|
||||
|
||||
if self._min_volatility <= volatility_avg <= self._max_volatility:
|
||||
|
||||
@@ -5,6 +5,7 @@ import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from freqtrade.constants import LongShort
|
||||
from freqtrade.persistence import PairLocks
|
||||
from freqtrade.persistence.models import PairLock
|
||||
from freqtrade.plugins.protections import IProtection
|
||||
@@ -44,28 +45,31 @@ class ProtectionManager():
|
||||
"""
|
||||
return [{p.name: p.short_desc()} for p in self._protection_handlers]
|
||||
|
||||
def global_stop(self, now: Optional[datetime] = None) -> Optional[PairLock]:
|
||||
def global_stop(self, now: Optional[datetime] = None,
|
||||
side: LongShort = 'long') -> Optional[PairLock]:
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
result = None
|
||||
for protection_handler in self._protection_handlers:
|
||||
if protection_handler.has_global_stop:
|
||||
lock, until, reason = protection_handler.global_stop(now)
|
||||
|
||||
# Early stopping - first positive result blocks further trades
|
||||
if lock and until:
|
||||
if not PairLocks.is_global_lock(until):
|
||||
result = PairLocks.lock_pair('*', until, reason, now=now)
|
||||
lock = protection_handler.global_stop(date_now=now, side=side)
|
||||
if lock and lock.until:
|
||||
if not PairLocks.is_global_lock(lock.until, side=lock.lock_side):
|
||||
result = PairLocks.lock_pair(
|
||||
'*', lock.until, lock.reason, now=now, side=lock.lock_side)
|
||||
return result
|
||||
|
||||
def stop_per_pair(self, pair, now: Optional[datetime] = None) -> Optional[PairLock]:
|
||||
def stop_per_pair(self, pair, now: Optional[datetime] = None,
|
||||
side: LongShort = 'long') -> Optional[PairLock]:
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
result = None
|
||||
for protection_handler in self._protection_handlers:
|
||||
if protection_handler.has_local_stop:
|
||||
lock, until, reason = protection_handler.stop_per_pair(pair, now)
|
||||
if lock and until:
|
||||
if not PairLocks.is_pair_locked(pair, until):
|
||||
result = PairLocks.lock_pair(pair, until, reason, now=now)
|
||||
lock = protection_handler.stop_per_pair(
|
||||
pair=pair, date_now=now, side=side)
|
||||
if lock and lock.until:
|
||||
if not PairLocks.is_pair_locked(pair, lock.until, lock.lock_side):
|
||||
result = PairLocks.lock_pair(
|
||||
pair, lock.until, lock.reason, now=now, side=lock.lock_side)
|
||||
return result
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.constants import LongShort
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.plugins.protections import IProtection, ProtectionReturn
|
||||
|
||||
@@ -26,7 +28,7 @@ class CooldownPeriod(IProtection):
|
||||
"""
|
||||
return (f"{self.name} - Cooldown period of {self.stop_duration_str}.")
|
||||
|
||||
def _cooldown_period(self, pair: str, date_now: datetime, ) -> ProtectionReturn:
|
||||
def _cooldown_period(self, pair: str, date_now: datetime) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Get last trade for this pair
|
||||
"""
|
||||
@@ -45,11 +47,15 @@ class CooldownPeriod(IProtection):
|
||||
self.log_once(f"Cooldown for {pair} for {self.stop_duration_str}.", logger.info)
|
||||
until = self.calculate_lock_end([trade], self._stop_duration)
|
||||
|
||||
return True, until, self._reason()
|
||||
return ProtectionReturn(
|
||||
lock=True,
|
||||
until=until,
|
||||
reason=self._reason(),
|
||||
)
|
||||
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
def global_stop(self, date_now: datetime) -> ProtectionReturn:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
@@ -57,9 +63,10 @@ class CooldownPeriod(IProtection):
|
||||
If true, all pairs will be locked with <reason> until <until>
|
||||
"""
|
||||
# Not implemented for cooldown period.
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
def stop_per_pair(self, pair: str, date_now: datetime) -> ProtectionReturn:
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from freqtrade.constants import LongShort
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
from freqtrade.misc import plural
|
||||
from freqtrade.mixins import LoggingMixin
|
||||
@@ -12,7 +14,13 @@ from freqtrade.persistence import LocalTrade
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ProtectionReturn = Tuple[bool, Optional[datetime], Optional[str]]
|
||||
|
||||
@dataclass
|
||||
class ProtectionReturn:
|
||||
lock: bool
|
||||
until: datetime
|
||||
reason: Optional[str]
|
||||
lock_side: str = '*'
|
||||
|
||||
|
||||
class IProtection(LoggingMixin, ABC):
|
||||
@@ -80,14 +88,15 @@ class IProtection(LoggingMixin, ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def global_stop(self, date_now: datetime) -> ProtectionReturn:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def stop_per_pair(self, pair: str, date_now: datetime) -> ProtectionReturn:
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from freqtrade.constants import LongShort
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.plugins.protections import IProtection, ProtectionReturn
|
||||
|
||||
@@ -35,7 +36,7 @@ class LowProfitPairs(IProtection):
|
||||
return (f'{profit} < {self._required_profit} in {self.lookback_period_str}, '
|
||||
f'locking for {self.stop_duration_str}.')
|
||||
|
||||
def _low_profit(self, date_now: datetime, pair: str) -> ProtectionReturn:
|
||||
def _low_profit(self, date_now: datetime, pair: str) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Evaluate recent trades for pair
|
||||
"""
|
||||
@@ -51,7 +52,7 @@ class LowProfitPairs(IProtection):
|
||||
# trades = Trade.get_trades(filters).all()
|
||||
if len(trades) < self._trade_limit:
|
||||
# Not enough trades in the relevant period
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
profit = sum(trade.close_profit for trade in trades if trade.close_profit)
|
||||
if profit < self._required_profit:
|
||||
@@ -60,20 +61,25 @@ class LowProfitPairs(IProtection):
|
||||
f"within {self._lookback_period} minutes.", logger.info)
|
||||
until = self.calculate_lock_end(trades, self._stop_duration)
|
||||
|
||||
return True, until, self._reason(profit)
|
||||
return ProtectionReturn(
|
||||
lock=True,
|
||||
until=until,
|
||||
reason=self._reason(profit),
|
||||
)
|
||||
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
def global_stop(self, date_now: datetime) -> ProtectionReturn:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
:return: Tuple of [bool, until, reason].
|
||||
If true, all pairs will be locked with <reason> until <until>
|
||||
"""
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
def stop_per_pair(self, pair: str, date_now: datetime) -> ProtectionReturn:
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.data.btanalysis import calculate_max_drawdown
|
||||
from freqtrade.constants import LongShort
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.plugins.protections import IProtection, ProtectionReturn
|
||||
|
||||
@@ -39,7 +40,7 @@ class MaxDrawdown(IProtection):
|
||||
return (f'{drawdown} passed {self._max_allowed_drawdown} in {self.lookback_period_str}, '
|
||||
f'locking for {self.stop_duration_str}.')
|
||||
|
||||
def _max_drawdown(self, date_now: datetime) -> ProtectionReturn:
|
||||
def _max_drawdown(self, date_now: datetime) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Evaluate recent trades for drawdown ...
|
||||
"""
|
||||
@@ -51,14 +52,14 @@ class MaxDrawdown(IProtection):
|
||||
|
||||
if len(trades) < self._trade_limit:
|
||||
# Not enough trades in the relevant period
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
# Drawdown is always positive
|
||||
try:
|
||||
# TODO: This should use absolute profit calculation, considering account balance.
|
||||
drawdown, _, _, _, _, _ = calculate_max_drawdown(trades_df, value_col='close_profit')
|
||||
except ValueError:
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
if drawdown > self._max_allowed_drawdown:
|
||||
self.log_once(
|
||||
@@ -66,11 +67,15 @@ class MaxDrawdown(IProtection):
|
||||
f" within {self.lookback_period_str}.", logger.info)
|
||||
until = self.calculate_lock_end(trades, self._stop_duration)
|
||||
|
||||
return True, until, self._reason(drawdown)
|
||||
return ProtectionReturn(
|
||||
lock=True,
|
||||
until=until,
|
||||
reason=self._reason(drawdown),
|
||||
)
|
||||
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
def global_stop(self, date_now: datetime) -> ProtectionReturn:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
@@ -79,11 +84,12 @@ class MaxDrawdown(IProtection):
|
||||
"""
|
||||
return self._max_drawdown(date_now)
|
||||
|
||||
def stop_per_pair(self, pair: str, date_now: datetime) -> ProtectionReturn:
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
:return: Tuple of [bool, until, reason].
|
||||
If true, this pair will be locked with <reason> until <until>
|
||||
"""
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from freqtrade.constants import LongShort
|
||||
from freqtrade.enums import ExitType
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.plugins.protections import IProtection, ProtectionReturn
|
||||
@@ -21,6 +22,7 @@ class StoplossGuard(IProtection):
|
||||
|
||||
self._trade_limit = protection_config.get('trade_limit', 10)
|
||||
self._disable_global_stop = protection_config.get('only_per_pair', False)
|
||||
self._only_per_side = protection_config.get('only_per_side', False)
|
||||
|
||||
def short_desc(self) -> str:
|
||||
"""
|
||||
@@ -36,7 +38,8 @@ class StoplossGuard(IProtection):
|
||||
return (f'{self._trade_limit} stoplosses in {self._lookback_period} min, '
|
||||
f'locking for {self._stop_duration} min.')
|
||||
|
||||
def _stoploss_guard(self, date_now: datetime, pair: str = None) -> ProtectionReturn:
|
||||
def _stoploss_guard(
|
||||
self, date_now: datetime, pair: Optional[str], side: str) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Evaluate recent trades
|
||||
"""
|
||||
@@ -48,15 +51,24 @@ class StoplossGuard(IProtection):
|
||||
ExitType.STOPLOSS_ON_EXCHANGE.value)
|
||||
and trade.close_profit and trade.close_profit < 0)]
|
||||
|
||||
if self._only_per_side:
|
||||
# Long or short trades only
|
||||
trades = [trade for trade in trades if trade.trade_direction == side]
|
||||
|
||||
if len(trades) < self._trade_limit:
|
||||
return False, None, None
|
||||
return None
|
||||
|
||||
self.log_once(f"Trading stopped due to {self._trade_limit} "
|
||||
f"stoplosses within {self._lookback_period} minutes.", logger.info)
|
||||
until = self.calculate_lock_end(trades, self._stop_duration)
|
||||
return True, until, self._reason()
|
||||
return ProtectionReturn(
|
||||
lock=True,
|
||||
until=until,
|
||||
reason=self._reason(),
|
||||
lock_side=(side if self._only_per_side else '*')
|
||||
)
|
||||
|
||||
def global_stop(self, date_now: datetime) -> ProtectionReturn:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
@@ -64,14 +76,15 @@ class StoplossGuard(IProtection):
|
||||
If true, all pairs will be locked with <reason> until <until>
|
||||
"""
|
||||
if self._disable_global_stop:
|
||||
return False, None, None
|
||||
return self._stoploss_guard(date_now, None)
|
||||
return None
|
||||
return self._stoploss_guard(date_now, None, side)
|
||||
|
||||
def stop_per_pair(self, pair: str, date_now: datetime) -> ProtectionReturn:
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
:return: Tuple of [bool, until, reason].
|
||||
If true, this pair will be locked with <reason> until <until>
|
||||
"""
|
||||
return self._stoploss_guard(date_now, pair)
|
||||
return self._stoploss_guard(date_now, pair, side)
|
||||
|
||||
@@ -23,7 +23,7 @@ class HyperOptLossResolver(IResolver):
|
||||
object_type = IHyperOptLoss
|
||||
object_type_str = "HyperoptLoss"
|
||||
user_subdir = USERPATH_HYPEROPTS
|
||||
initial_search_path = Path(__file__).parent.parent.joinpath('optimize').resolve()
|
||||
initial_search_path = Path(__file__).parent.parent.joinpath('optimize/hyperopt_loss').resolve()
|
||||
|
||||
@staticmethod
|
||||
def load_hyperoptloss(config: Dict) -> IHyperOptLoss:
|
||||
|
||||
@@ -44,7 +44,7 @@ class IResolver:
|
||||
|
||||
@classmethod
|
||||
def build_search_paths(cls, config: Dict[str, Any], user_subdir: Optional[str] = None,
|
||||
extra_dir: Optional[str] = None) -> List[Path]:
|
||||
extra_dirs: List[str] = []) -> List[Path]:
|
||||
|
||||
abs_paths: List[Path] = []
|
||||
if cls.initial_search_path:
|
||||
@@ -53,9 +53,9 @@ class IResolver:
|
||||
if user_subdir:
|
||||
abs_paths.insert(0, config['user_data_dir'].joinpath(user_subdir))
|
||||
|
||||
if extra_dir:
|
||||
# Add extra directory to the top of the search paths
|
||||
abs_paths.insert(0, Path(extra_dir).resolve())
|
||||
# Add extra directory to the top of the search paths
|
||||
for dir in extra_dirs:
|
||||
abs_paths.insert(0, Path(dir).resolve())
|
||||
|
||||
return abs_paths
|
||||
|
||||
@@ -164,9 +164,13 @@ class IResolver:
|
||||
:return: Object instance or None
|
||||
"""
|
||||
|
||||
extra_dirs: List[str] = []
|
||||
if extra_dir:
|
||||
extra_dirs.append(extra_dir)
|
||||
|
||||
abs_paths = cls.build_search_paths(config,
|
||||
user_subdir=cls.user_subdir,
|
||||
extra_dir=extra_dir)
|
||||
extra_dirs=extra_dirs)
|
||||
|
||||
found_object = cls._load_object(paths=abs_paths, object_name=object_name,
|
||||
kwargs=kwargs)
|
||||
@@ -178,18 +182,25 @@ class IResolver:
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def search_all_objects(cls, directory: Path,
|
||||
enum_failed: bool) -> List[Dict[str, Any]]:
|
||||
def search_all_objects(cls, directory: Path, enum_failed: bool,
|
||||
recursive: bool = False) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Searches a directory for valid objects
|
||||
:param directory: Path to search
|
||||
:param enum_failed: If True, will return None for modules which fail.
|
||||
Otherwise, failing modules are skipped.
|
||||
:param recursive: Recursively walk directory tree searching for strategies
|
||||
:return: List of dicts containing 'name', 'class' and 'location' entries
|
||||
"""
|
||||
logger.debug(f"Searching for {cls.object_type.__name__} '{directory}'")
|
||||
objects = []
|
||||
for entry in directory.iterdir():
|
||||
if (
|
||||
recursive and entry.is_dir()
|
||||
and not entry.name.startswith('__')
|
||||
and not entry.name.startswith('.')
|
||||
):
|
||||
objects.extend(cls.search_all_objects(entry, enum_failed, recursive=recursive))
|
||||
# Only consider python files
|
||||
if entry.suffix != '.py':
|
||||
logger.debug('Ignoring %s', entry)
|
||||
|
||||
@@ -7,8 +7,9 @@ import logging
|
||||
import tempfile
|
||||
from base64 import urlsafe_b64decode
|
||||
from inspect import getfullargspec
|
||||
from os import walk
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from freqtrade.configuration.config_validation import validate_migrated_strategy_settings
|
||||
from freqtrade.constants import REQUIRED_ORDERTIF, REQUIRED_ORDERTYPES, USERPATH_STRATEGIES
|
||||
@@ -216,15 +217,19 @@ class StrategyResolver(IResolver):
|
||||
raise OperationalException(
|
||||
"`populate_exit_trend` or `populate_sell_trend` must be implemented.")
|
||||
|
||||
strategy._populate_fun_len = len(getfullargspec(strategy.populate_indicators).args)
|
||||
strategy._buy_fun_len = len(getfullargspec(strategy.populate_buy_trend).args)
|
||||
strategy._sell_fun_len = len(getfullargspec(strategy.populate_sell_trend).args)
|
||||
_populate_fun_len = len(getfullargspec(strategy.populate_indicators).args)
|
||||
_buy_fun_len = len(getfullargspec(strategy.populate_buy_trend).args)
|
||||
_sell_fun_len = len(getfullargspec(strategy.populate_sell_trend).args)
|
||||
if any(x == 2 for x in [
|
||||
strategy._populate_fun_len,
|
||||
strategy._buy_fun_len,
|
||||
strategy._sell_fun_len
|
||||
_populate_fun_len,
|
||||
_buy_fun_len,
|
||||
_sell_fun_len
|
||||
]):
|
||||
strategy.INTERFACE_VERSION = 1
|
||||
raise OperationalException(
|
||||
"Strategy Interface v1 is no longer supported. "
|
||||
"Please update your strategy to implement "
|
||||
"`populate_indicators`, `populate_entry_trend` and `populate_exit_trend` "
|
||||
"with the metadata argument. ")
|
||||
return strategy
|
||||
|
||||
@staticmethod
|
||||
@@ -237,10 +242,19 @@ class StrategyResolver(IResolver):
|
||||
:param extra_dir: additional directory to search for the given strategy
|
||||
:return: Strategy instance or None
|
||||
"""
|
||||
if config.get('recursive_strategy_search', False):
|
||||
extra_dirs: List[str] = [
|
||||
path[0] for path in walk(f"{config['user_data_dir']}/{USERPATH_STRATEGIES}")
|
||||
] # sub-directories
|
||||
else:
|
||||
extra_dirs = []
|
||||
|
||||
if extra_dir:
|
||||
extra_dirs.append(extra_dir)
|
||||
|
||||
abs_paths = StrategyResolver.build_search_paths(config,
|
||||
user_subdir=USERPATH_STRATEGIES,
|
||||
extra_dir=extra_dir)
|
||||
extra_dirs=extra_dirs)
|
||||
|
||||
if ":" in strategy_name:
|
||||
logger.info("loading base64 encoded strategy")
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from fastapi import APIRouter, BackgroundTasks, Depends
|
||||
|
||||
from freqtrade.configuration.config_validation import validate_config_consistency
|
||||
from freqtrade.data.btanalysis import get_backtest_resultlist, load_and_merge_backtest_result
|
||||
from freqtrade.enums import BacktestState
|
||||
from freqtrade.exceptions import DependencyException
|
||||
from freqtrade.rpc.api_server.api_schemas import BacktestRequest, BacktestResponse
|
||||
from freqtrade.rpc.api_server.api_schemas import (BacktestHistoryEntry, BacktestRequest,
|
||||
BacktestResponse)
|
||||
from freqtrade.rpc.api_server.deps import get_config, is_webserver_mode
|
||||
from freqtrade.rpc.api_server.webserver import ApiServer
|
||||
from freqtrade.rpc.rpc import RPCException
|
||||
@@ -81,6 +84,7 @@ async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: Bac
|
||||
lastconfig['enable_protections'] = btconfig.get('enable_protections')
|
||||
lastconfig['dry_run_wallet'] = btconfig.get('dry_run_wallet')
|
||||
|
||||
ApiServer._bt.strategylist = [strat]
|
||||
ApiServer._bt.results = {}
|
||||
ApiServer._bt.load_prior_backtest()
|
||||
|
||||
@@ -200,3 +204,30 @@ def api_backtest_abort(ws_mode=Depends(is_webserver_mode)):
|
||||
"progress": 0,
|
||||
"status_msg": "Backtest ended",
|
||||
}
|
||||
|
||||
|
||||
@router.get('/backtest/history', response_model=List[BacktestHistoryEntry], tags=['webserver', 'backtest'])
|
||||
def api_backtest_history(config=Depends(get_config), ws_mode=Depends(is_webserver_mode)):
|
||||
# Get backtest result history, read from metadata files
|
||||
return get_backtest_resultlist(config['user_data_dir'] / 'backtest_results')
|
||||
|
||||
|
||||
@router.get('/backtest/history/result', response_model=BacktestResponse, tags=['webserver', 'backtest'])
|
||||
def api_backtest_history_result(filename: str, strategy: str, config=Depends(get_config), ws_mode=Depends(is_webserver_mode)):
|
||||
# Get backtest result history, read from metadata files
|
||||
fn = config['user_data_dir'] / 'backtest_results' / filename
|
||||
results: Dict[str, Any] = {
|
||||
'metadata': {},
|
||||
'strategy': {},
|
||||
'strategy_comparison': [],
|
||||
}
|
||||
|
||||
load_and_merge_backtest_result(strategy, fn, results)
|
||||
return {
|
||||
"status": "ended",
|
||||
"running": False,
|
||||
"step": "",
|
||||
"progress": 1,
|
||||
"status_msg": "Historic result",
|
||||
"backtest_result": results,
|
||||
}
|
||||
|
||||
@@ -203,6 +203,8 @@ class OrderSchema(BaseModel):
|
||||
class TradeSchema(BaseModel):
|
||||
trade_id: int
|
||||
pair: str
|
||||
base_currency: str
|
||||
quote_currency: str
|
||||
is_open: bool
|
||||
is_short: bool
|
||||
exchange: str
|
||||
@@ -289,6 +291,7 @@ class LockModel(BaseModel):
|
||||
lock_time: str
|
||||
lock_timestamp: int
|
||||
pair: str
|
||||
side: str
|
||||
reason: str
|
||||
|
||||
|
||||
@@ -419,6 +422,13 @@ class BacktestResponse(BaseModel):
|
||||
backtest_result: Optional[Dict[str, Any]]
|
||||
|
||||
|
||||
class BacktestHistoryEntry(BaseModel):
|
||||
filename: str
|
||||
strategy: str
|
||||
run_id: str
|
||||
backtest_start_time: int
|
||||
|
||||
|
||||
class SysInfo(BaseModel):
|
||||
cpu_pct: List[float]
|
||||
ram_pct: float
|
||||
|
||||
@@ -35,7 +35,8 @@ logger = logging.getLogger(__name__)
|
||||
# 1.13: forcebuy supports stake_amount
|
||||
# versions 2.xx -> futures/short branch
|
||||
# 2.14: Add entry/exit orders to trade response
|
||||
API_VERSION = 2.14
|
||||
# 2.15: Add backtest history endpoints
|
||||
API_VERSION = 2.15
|
||||
|
||||
# Public API, requires no auth.
|
||||
router_public = APIRouter()
|
||||
@@ -157,7 +158,7 @@ def force_entry(payload: ForceEnterPayload, rpc: RPC = Depends(get_rpc)):
|
||||
# /forcesell is deprecated with short addition. use /forceexit instead
|
||||
@router.post('/forceexit', response_model=ResultMsg, tags=['trading'])
|
||||
@router.post('/forcesell', response_model=ResultMsg, tags=['trading'])
|
||||
def forcesell(payload: ForceExitPayload, rpc: RPC = Depends(get_rpc)):
|
||||
def forceexit(payload: ForceExitPayload, rpc: RPC = Depends(get_rpc)):
|
||||
ordertype = payload.ordertype.value if payload.ordertype else None
|
||||
return rpc._rpc_force_exit(payload.tradeid, ordertype)
|
||||
|
||||
@@ -252,7 +253,8 @@ def list_strategies(config=Depends(get_config)):
|
||||
directory = Path(config.get(
|
||||
'strategy_path', config['user_data_dir'] / USERPATH_STRATEGIES))
|
||||
from freqtrade.resolvers.strategy_resolver import StrategyResolver
|
||||
strategies = StrategyResolver.search_all_objects(directory, False)
|
||||
strategies = StrategyResolver.search_all_objects(
|
||||
directory, False, config.get('recursive_strategy_search', False))
|
||||
strategies = sorted(strategies, key=lambda x: x['name'])
|
||||
|
||||
return {'strategies': [x['name'] for x in strategies]}
|
||||
|
||||
@@ -2,7 +2,7 @@ import logging
|
||||
from ipaddress import IPv4Address
|
||||
from typing import Any, Dict
|
||||
|
||||
import rapidjson
|
||||
import orjson
|
||||
import uvicorn
|
||||
from fastapi import Depends, FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
@@ -24,7 +24,7 @@ class FTJSONResponse(JSONResponse):
|
||||
Use rapidjson for responses
|
||||
Handles NaN and Inf / -Inf in a javascript way by default.
|
||||
"""
|
||||
return rapidjson.dumps(content).encode("utf-8")
|
||||
return orjson.dumps(content, option=orjson.OPT_SERIALIZE_NUMPY)
|
||||
|
||||
|
||||
class ApiServer(RPCHandler):
|
||||
|
||||
@@ -197,7 +197,6 @@ class RPC:
|
||||
|
||||
trade_dict = trade.to_json()
|
||||
trade_dict.update(dict(
|
||||
base_currency=self._freqtrade.config['stake_currency'],
|
||||
close_profit=trade.close_profit if not trade.is_open else None,
|
||||
current_rate=current_rate,
|
||||
current_profit=current_profit, # Deprecated
|
||||
@@ -223,6 +222,7 @@ class RPC:
|
||||
def _rpc_status_table(self, stake_currency: str,
|
||||
fiat_display_currency: str) -> Tuple[List, List, float]:
|
||||
trades: List[Trade] = Trade.get_open_trades()
|
||||
nonspot = self._config.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT
|
||||
if not trades:
|
||||
raise RPCException('no active trade')
|
||||
else:
|
||||
@@ -237,7 +237,7 @@ class RPC:
|
||||
current_rate = NAN
|
||||
trade_profit = trade.calc_profit(current_rate)
|
||||
profit_str = f'{trade.calc_profit_ratio(current_rate):.2%}'
|
||||
direction_str = 'S' if trade.is_short else 'L'
|
||||
direction_str = ('S' if trade.is_short else 'L') if nonspot else ''
|
||||
if self._fiat_converter:
|
||||
fiat_profit = self._fiat_converter.convert_amount(
|
||||
trade_profit,
|
||||
@@ -267,7 +267,11 @@ class RPC:
|
||||
if self._fiat_converter:
|
||||
profitcol += " (" + fiat_display_currency + ")"
|
||||
|
||||
columns = ['ID L/S', 'Pair', 'Since', profitcol]
|
||||
columns = [
|
||||
'ID L/S' if nonspot else 'ID',
|
||||
'Pair',
|
||||
'Since',
|
||||
profitcol]
|
||||
if self._config.get('position_adjustment_enable', False):
|
||||
columns.append('# Entries')
|
||||
return trades_list, columns, fiat_profit_sum
|
||||
@@ -686,10 +690,10 @@ class RPC:
|
||||
|
||||
def _rpc_force_exit(self, trade_id: str, ordertype: Optional[str] = None) -> Dict[str, str]:
|
||||
"""
|
||||
Handler for forcesell <id>.
|
||||
Handler for forceexit <id>.
|
||||
Sells the given trade at current price
|
||||
"""
|
||||
def _exec_forcesell(trade: Trade) -> None:
|
||||
def _exec_force_exit(trade: Trade) -> None:
|
||||
# Check if there is there is an open order
|
||||
fully_canceled = False
|
||||
if trade.open_order_id:
|
||||
@@ -722,7 +726,7 @@ class RPC:
|
||||
if trade_id == 'all':
|
||||
# Execute sell for all open orders
|
||||
for trade in Trade.get_open_trades():
|
||||
_exec_forcesell(trade)
|
||||
_exec_force_exit(trade)
|
||||
Trade.commit()
|
||||
self._freqtrade.wallets.update()
|
||||
return {'result': 'Created sell orders for all open trades.'}
|
||||
@@ -735,7 +739,7 @@ class RPC:
|
||||
logger.warning('force_exit: Invalid argument received')
|
||||
raise RPCException('invalid argument')
|
||||
|
||||
_exec_forcesell(trade)
|
||||
_exec_force_exit(trade)
|
||||
Trade.commit()
|
||||
self._freqtrade.wallets.update()
|
||||
return {'result': f'Created sell order for trade {trade_id}.'}
|
||||
|
||||
@@ -103,7 +103,6 @@ class Telegram(RPCHandler):
|
||||
['/count', '/start', '/stop', '/help']
|
||||
]
|
||||
# do not allow commands with mandatory arguments and critical cmds
|
||||
# like /forcesell and /forcebuy
|
||||
# TODO: DRY! - its not good to list all valid cmds here. But otherwise
|
||||
# this needs refactoring of the whole telegram module (same
|
||||
# problem in _help()).
|
||||
@@ -116,6 +115,7 @@ class Telegram(RPCHandler):
|
||||
r'/logs$', r'/whitelist$', r'/blacklist$', r'/bl_delete$',
|
||||
r'/weekly$', r'/weekly \d+$', r'/monthly$', r'/monthly \d+$',
|
||||
r'/forcebuy$', r'/forcelong$', r'/forceshort$',
|
||||
r'/forcesell$', r'/forceexit$',
|
||||
r'/edge$', r'/health$', r'/help$', r'/version$']
|
||||
# Create keys for generation
|
||||
valid_keys_print = [k.replace('$', '') for k in valid_keys]
|
||||
@@ -197,7 +197,8 @@ class Telegram(RPCHandler):
|
||||
pattern='update_exit_reason_performance'),
|
||||
CallbackQueryHandler(self._mix_tag_performance, pattern='update_mix_tag_performance'),
|
||||
CallbackQueryHandler(self._count, pattern='update_count'),
|
||||
CallbackQueryHandler(self._force_enter_inline),
|
||||
CallbackQueryHandler(self._force_exit_inline, pattern=r"force_exit__\S+"),
|
||||
CallbackQueryHandler(self._force_enter_inline, pattern=r"\S+\/\S+"),
|
||||
]
|
||||
for handle in handles:
|
||||
self._updater.dispatcher.add_handler(handle)
|
||||
@@ -287,7 +288,7 @@ class Telegram(RPCHandler):
|
||||
else "")
|
||||
|
||||
# Check if all sell properties are available.
|
||||
# This might not be the case if the message origin is triggered by /forcesell
|
||||
# This might not be the case if the message origin is triggered by /forceexit
|
||||
if (all(prop in msg for prop in ['gain', 'fiat_currency', 'stake_currency'])
|
||||
and self._rpc._fiat_converter):
|
||||
msg['profit_fiat'] = self._rpc._fiat_converter.convert_amount(
|
||||
@@ -431,7 +432,7 @@ class Telegram(RPCHandler):
|
||||
else:
|
||||
return "\N{CROSS MARK}"
|
||||
|
||||
def _prepare_entry_details(self, filled_orders: List, base_currency: str, is_open: bool):
|
||||
def _prepare_entry_details(self, filled_orders: List, quote_currency: str, is_open: bool):
|
||||
"""
|
||||
Prepare details of trade with entry adjustment enabled
|
||||
"""
|
||||
@@ -449,7 +450,7 @@ class Telegram(RPCHandler):
|
||||
if x == 0:
|
||||
lines.append(f"*Entry #{x+1}:*")
|
||||
lines.append(
|
||||
f"*Entry Amount:* {cur_entry_amount} ({order['cost']:.8f} {base_currency})")
|
||||
f"*Entry Amount:* {cur_entry_amount} ({order['cost']:.8f} {quote_currency})")
|
||||
lines.append(f"*Average Entry Price:* {cur_entry_average}")
|
||||
else:
|
||||
sumA = 0
|
||||
@@ -464,7 +465,8 @@ class Telegram(RPCHandler):
|
||||
if prev_avg_price:
|
||||
minus_on_entry = (cur_entry_average - prev_avg_price) / prev_avg_price
|
||||
|
||||
dur_entry = cur_entry_datetime - arrow.get(filled_orders[x-1]["order_filled_date"])
|
||||
dur_entry = cur_entry_datetime - arrow.get(
|
||||
filled_orders[x - 1]["order_filled_date"])
|
||||
days = dur_entry.days
|
||||
hours, remainder = divmod(dur_entry.seconds, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
@@ -473,7 +475,7 @@ class Telegram(RPCHandler):
|
||||
lines.append("({})".format(cur_entry_datetime
|
||||
.humanize(granularity=["day", "hour", "minute"])))
|
||||
lines.append(
|
||||
f"*Entry Amount:* {cur_entry_amount} ({order['cost']:.8f} {base_currency})")
|
||||
f"*Entry Amount:* {cur_entry_amount} ({order['cost']:.8f} {quote_currency})")
|
||||
lines.append(f"*Average Entry Price:* {cur_entry_average} "
|
||||
f"({price_to_1st_entry:.2%} from 1st entry rate)")
|
||||
lines.append(f"*Order filled at:* {order['order_filled_date']}")
|
||||
@@ -516,7 +518,7 @@ class Telegram(RPCHandler):
|
||||
"*Current Pair:* {pair}",
|
||||
"*Direction:* " + ("`Short`" if r.get('is_short') else "`Long`"),
|
||||
"*Leverage:* `{leverage}`" if r.get('leverage') else "",
|
||||
"*Amount:* `{amount} ({stake_amount} {base_currency})`",
|
||||
"*Amount:* `{amount} ({stake_amount} {quote_currency})`",
|
||||
"*Enter Tag:* `{enter_tag}`" if r['enter_tag'] else "",
|
||||
"*Exit Reason:* `{exit_reason}`" if r['exit_reason'] else "",
|
||||
]
|
||||
@@ -556,7 +558,7 @@ class Telegram(RPCHandler):
|
||||
lines.append("*Open Order:* `{open_order}`")
|
||||
|
||||
lines_detail = self._prepare_entry_details(
|
||||
r['orders'], r['base_currency'], r['is_open'])
|
||||
r['orders'], r['quote_currency'], r['is_open'])
|
||||
lines.extend(lines_detail if lines_detail else "")
|
||||
|
||||
# Filter empty lines using list-comprehension
|
||||
@@ -976,23 +978,58 @@ class Telegram(RPCHandler):
|
||||
@authorized_only
|
||||
def _force_exit(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
Handler for /forcesell <id>.
|
||||
Handler for /forceexit <id>.
|
||||
Sells the given trade at current price
|
||||
:param bot: telegram bot
|
||||
:param update: message update
|
||||
:return: None
|
||||
"""
|
||||
|
||||
trade_id = context.args[0] if context.args and len(context.args) > 0 else None
|
||||
if not trade_id:
|
||||
self._send_msg("You must specify a trade-id or 'all'.")
|
||||
return
|
||||
try:
|
||||
msg = self._rpc._rpc_force_exit(trade_id)
|
||||
self._send_msg('Force_exit Result: `{result}`'.format(**msg))
|
||||
if context.args:
|
||||
trade_id = context.args[0]
|
||||
self._force_exit_action(trade_id)
|
||||
else:
|
||||
fiat_currency = self._config.get('fiat_display_currency', '')
|
||||
try:
|
||||
statlist, _, _ = self._rpc._rpc_status_table(
|
||||
self._config['stake_currency'], fiat_currency)
|
||||
except RPCException:
|
||||
self._send_msg(msg='No open trade found.')
|
||||
return
|
||||
trades = []
|
||||
for trade in statlist:
|
||||
trades.append((trade[0], f"{trade[0]} {trade[1]} {trade[2]} {trade[3]}"))
|
||||
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
trade_buttons = [
|
||||
InlineKeyboardButton(text=trade[1], callback_data=f"force_exit__{trade[0]}")
|
||||
for trade in trades]
|
||||
buttons_aligned = self._layout_inline_keyboard_onecol(trade_buttons)
|
||||
|
||||
buttons_aligned.append([InlineKeyboardButton(
|
||||
text='Cancel', callback_data='force_exit__cancel')])
|
||||
self._send_msg(msg="Which trade?", keyboard=buttons_aligned)
|
||||
|
||||
def _force_exit_action(self, trade_id):
|
||||
if trade_id != 'cancel':
|
||||
try:
|
||||
self._rpc._rpc_force_exit(trade_id)
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
|
||||
def _force_exit_inline(self, update: Update, _: CallbackContext) -> None:
|
||||
if update.callback_query:
|
||||
query = update.callback_query
|
||||
if query.data and '__' in query.data:
|
||||
# Input data is "force_exit__<tradid|cancel>"
|
||||
trade_id = query.data.split("__")[1].split(' ')[0]
|
||||
if trade_id == 'cancel':
|
||||
query.answer()
|
||||
query.edit_message_text(text="Force exit canceled.")
|
||||
return
|
||||
trade: Trade = Trade.get_trades(trade_filter=Trade.id == trade_id).first()
|
||||
query.answer()
|
||||
query.edit_message_text(text=f"Manually exiting Trade #{trade_id}, {trade.pair}")
|
||||
self._force_exit_action(trade_id)
|
||||
|
||||
def _force_enter_action(self, pair, price: Optional[float], order_side: SignalDirection):
|
||||
if pair != 'cancel':
|
||||
@@ -1012,8 +1049,13 @@ class Telegram(RPCHandler):
|
||||
self._force_enter_action(pair, None, order_side)
|
||||
|
||||
@staticmethod
|
||||
def _layout_inline_keyboard(buttons: List[InlineKeyboardButton],
|
||||
cols=3) -> List[List[InlineKeyboardButton]]:
|
||||
def _layout_inline_keyboard(
|
||||
buttons: List[InlineKeyboardButton], cols=3) -> List[List[InlineKeyboardButton]]:
|
||||
return [buttons[i:i + cols] for i in range(0, len(buttons), cols)]
|
||||
|
||||
@staticmethod
|
||||
def _layout_inline_keyboard_onecol(
|
||||
buttons: List[InlineKeyboardButton], cols=1) -> List[List[InlineKeyboardButton]]:
|
||||
return [buttons[i:i + cols] for i in range(0, len(buttons), cols)]
|
||||
|
||||
@authorized_only
|
||||
@@ -1421,7 +1463,6 @@ class Telegram(RPCHandler):
|
||||
"*/start:* `Starts the trader`\n"
|
||||
"*/stop:* Stops the trader\n"
|
||||
"*/stopbuy:* `Stops buying, but handles open trades gracefully` \n"
|
||||
# TODO: forceenter forceshort forcelong missing
|
||||
"*/forceexit <trade_id>|all:* `Instantly exits the given trade or all trades, "
|
||||
"regardless of profit`\n"
|
||||
"*/fe <trade_id>|all:* `Alias to /forceexit`"
|
||||
|
||||
@@ -23,7 +23,7 @@ class InformativeData:
|
||||
def informative(timeframe: str, asset: str = '',
|
||||
fmt: Optional[Union[str, Callable[[Any], str]]] = None,
|
||||
*,
|
||||
candle_type: Optional[CandleType] = None,
|
||||
candle_type: Optional[Union[CandleType, str]] = None,
|
||||
ffill: bool = True) -> Callable[[PopulateIndicators], PopulateIndicators]:
|
||||
"""
|
||||
A decorator for populate_indicators_Nn(self, dataframe, metadata), allowing these functions to
|
||||
|
||||
@@ -3,7 +3,6 @@ IStrategy interface
|
||||
This module defines the interface to apply for strategies
|
||||
"""
|
||||
import logging
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
@@ -44,14 +43,11 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
"""
|
||||
# Strategy interface version
|
||||
# Default to version 2
|
||||
# Version 1 is the initial interface without metadata dict
|
||||
# Version 1 is the initial interface without metadata dict - deprecated and no longer supported.
|
||||
# Version 2 populate_* include metadata dict
|
||||
# Version 3 - First version with short and leverage support
|
||||
INTERFACE_VERSION: int = 3
|
||||
|
||||
_populate_fun_len: int = 0
|
||||
_buy_fun_len: int = 0
|
||||
_sell_fun_len: int = 0
|
||||
_ft_params_from_file: Dict
|
||||
# associated minimal roi
|
||||
minimal_roi: Dict = {}
|
||||
@@ -114,7 +110,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
# Class level variables (intentional) containing
|
||||
# the dataprovider (dp) (access to other candles, historic data, ...)
|
||||
# and wallets - access to the current balance.
|
||||
dp: Optional[DataProvider]
|
||||
dp: DataProvider
|
||||
wallets: Optional[Wallets] = None
|
||||
# Filled from configuration
|
||||
stake_currency: str
|
||||
@@ -197,6 +193,13 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
"""
|
||||
return self.populate_sell_trend(dataframe, metadata)
|
||||
|
||||
def bot_start(self, **kwargs) -> None:
|
||||
"""
|
||||
Called only once after bot instantiation.
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
"""
|
||||
pass
|
||||
|
||||
def bot_loop_start(self, **kwargs) -> None:
|
||||
"""
|
||||
Called at the start of the bot iteration (one loop).
|
||||
@@ -206,18 +209,18 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
"""
|
||||
pass
|
||||
|
||||
def check_buy_timeout(self, pair: str, trade: Trade, order: dict,
|
||||
def check_buy_timeout(self, pair: str, trade: Trade, order: Order,
|
||||
current_time: datetime, **kwargs) -> bool:
|
||||
"""
|
||||
DEPRECATED: Please use `check_entry_timeout` instead.
|
||||
"""
|
||||
return False
|
||||
|
||||
def check_entry_timeout(self, pair: str, trade: Trade, order: dict,
|
||||
def check_entry_timeout(self, pair: str, trade: Trade, order: Order,
|
||||
current_time: datetime, **kwargs) -> bool:
|
||||
"""
|
||||
Check entry timeout function callback.
|
||||
This method can be used to override the enter-timeout.
|
||||
This method can be used to override the entry-timeout.
|
||||
It is called whenever a limit entry order has been created,
|
||||
and is not yet fully filled.
|
||||
Configuration options in `unfilledtimeout` will be verified before this,
|
||||
@@ -225,8 +228,8 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
|
||||
When not implemented by a strategy, this simply returns False.
|
||||
:param pair: Pair the trade is for
|
||||
:param trade: trade object.
|
||||
:param order: Order dictionary as returned from CCXT.
|
||||
:param trade: Trade object.
|
||||
:param order: Order object.
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return bool: When True is returned, then the entry order is cancelled.
|
||||
@@ -234,30 +237,30 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
return self.check_buy_timeout(
|
||||
pair=pair, trade=trade, order=order, current_time=current_time)
|
||||
|
||||
def check_sell_timeout(self, pair: str, trade: Trade, order: dict,
|
||||
def check_sell_timeout(self, pair: str, trade: Trade, order: Order,
|
||||
current_time: datetime, **kwargs) -> bool:
|
||||
"""
|
||||
DEPRECATED: Please use `check_exit_timeout` instead.
|
||||
"""
|
||||
return False
|
||||
|
||||
def check_exit_timeout(self, pair: str, trade: Trade, order: dict,
|
||||
def check_exit_timeout(self, pair: str, trade: Trade, order: Order,
|
||||
current_time: datetime, **kwargs) -> bool:
|
||||
"""
|
||||
Check sell timeout function callback.
|
||||
Check exit timeout function callback.
|
||||
This method can be used to override the exit-timeout.
|
||||
It is called whenever a (long) limit sell order or (short) limit buy
|
||||
has been created, and is not yet fully filled.
|
||||
It is called whenever a limit exit order has been created,
|
||||
and is not yet fully filled.
|
||||
Configuration options in `unfilledtimeout` will be verified before this,
|
||||
so ensure to set these timeouts high enough.
|
||||
|
||||
When not implemented by a strategy, this simply returns False.
|
||||
:param pair: Pair the trade is for
|
||||
:param trade: trade object.
|
||||
:param order: Order dictionary as returned from CCXT.
|
||||
:param trade: Trade object.
|
||||
:param order: Order object
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return bool: When True is returned, then the (long)sell/(short)buy-order is cancelled.
|
||||
:return bool: When True is returned, then the exit-order is cancelled.
|
||||
"""
|
||||
return self.check_sell_timeout(
|
||||
pair=pair, trade=trade, order=order, current_time=current_time)
|
||||
@@ -359,7 +362,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
|
||||
def custom_exit_price(self, pair: str, trade: Trade,
|
||||
current_time: datetime, proposed_rate: float,
|
||||
current_profit: float, **kwargs) -> float:
|
||||
current_profit: float, exit_tag: Optional[str], **kwargs) -> float:
|
||||
"""
|
||||
Custom exit price logic, returning the new exit price.
|
||||
|
||||
@@ -372,6 +375,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param proposed_rate: Rate, calculated based on pricing settings in exit_pricing.
|
||||
:param current_profit: Current profit (as ratio), calculated based on current_rate.
|
||||
:param exit_tag: Exit reason.
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return float: New exit price value if provided
|
||||
"""
|
||||
@@ -541,7 +545,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
"""
|
||||
return self.__class__.__name__
|
||||
|
||||
def lock_pair(self, pair: str, until: datetime, reason: str = None) -> None:
|
||||
def lock_pair(self, pair: str, until: datetime, reason: str = None, side: str = '*') -> None:
|
||||
"""
|
||||
Locks pair until a given timestamp happens.
|
||||
Locked pairs are not analyzed, and are prevented from opening new trades.
|
||||
@@ -551,8 +555,9 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
:param until: datetime in UTC until the pair should be blocked from opening new trades.
|
||||
Needs to be timezone aware `datetime.now(timezone.utc)`
|
||||
:param reason: Optional string explaining why the pair was locked.
|
||||
:param side: Side to check, can be long, short or '*'
|
||||
"""
|
||||
PairLocks.lock_pair(pair, until, reason)
|
||||
PairLocks.lock_pair(pair, until, reason, side=side)
|
||||
|
||||
def unlock_pair(self, pair: str) -> None:
|
||||
"""
|
||||
@@ -572,7 +577,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
"""
|
||||
PairLocks.unlock_reason(reason, datetime.now(timezone.utc))
|
||||
|
||||
def is_pair_locked(self, pair: str, candle_date: datetime = None) -> bool:
|
||||
def is_pair_locked(self, pair: str, *, candle_date: datetime = None, side: str = '*') -> bool:
|
||||
"""
|
||||
Checks if a pair is currently locked
|
||||
The 2nd, optional parameter ensures that locks are applied until the new candle arrives,
|
||||
@@ -580,15 +585,16 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
of 2 seconds for an entry order to happen on an old signal.
|
||||
:param pair: "Pair to check"
|
||||
:param candle_date: Date of the last candle. Optional, defaults to current date
|
||||
:param side: Side to check, can be long, short or '*'
|
||||
:returns: locking state of the pair in question.
|
||||
"""
|
||||
|
||||
if not candle_date:
|
||||
# Simple call ...
|
||||
return PairLocks.is_pair_locked(pair)
|
||||
return PairLocks.is_pair_locked(pair, side=side)
|
||||
else:
|
||||
lock_time = timeframe_to_next_date(self.timeframe, candle_date)
|
||||
return PairLocks.is_pair_locked(pair, lock_time)
|
||||
return PairLocks.is_pair_locked(pair, lock_time, side=side)
|
||||
|
||||
def analyze_ticker(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
"""
|
||||
@@ -881,14 +887,10 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
current_rate = rate
|
||||
current_profit = trade.calc_profit_ratio(current_rate)
|
||||
|
||||
if (self.exit_profit_only and current_profit <= self.exit_profit_offset):
|
||||
# exit_profit_only and profit doesn't reach the offset - ignore sell signal
|
||||
pass
|
||||
elif self.use_exit_signal and not enter:
|
||||
if exit_:
|
||||
if self.use_exit_signal:
|
||||
if exit_ and not enter:
|
||||
exit_signal = ExitType.EXIT_SIGNAL
|
||||
else:
|
||||
trade_type = "exit_short" if trade.is_short else "sell"
|
||||
custom_reason = strategy_safe_wrapper(self.custom_exit, default_retval=False)(
|
||||
pair=trade.pair, trade=trade, current_time=current_time,
|
||||
current_rate=current_rate, current_profit=current_profit)
|
||||
@@ -896,13 +898,17 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
exit_signal = ExitType.CUSTOM_EXIT
|
||||
if isinstance(custom_reason, str):
|
||||
if len(custom_reason) > CUSTOM_EXIT_MAX_LENGTH:
|
||||
logger.warning(f'Custom {trade_type} reason returned from '
|
||||
logger.warning(f'Custom exit reason returned from '
|
||||
f'custom_exit is too long and was trimmed'
|
||||
f'to {CUSTOM_EXIT_MAX_LENGTH} characters.')
|
||||
custom_reason = custom_reason[:CUSTOM_EXIT_MAX_LENGTH]
|
||||
else:
|
||||
custom_reason = None
|
||||
if exit_signal in (ExitType.CUSTOM_EXIT, ExitType.EXIT_SIGNAL):
|
||||
if (
|
||||
exit_signal == ExitType.CUSTOM_EXIT
|
||||
or (exit_signal == ExitType.EXIT_SIGNAL
|
||||
and (not self.exit_profit_only or current_profit > self.exit_profit_offset))
|
||||
):
|
||||
logger.debug(f"{trade.pair} - Sell signal received. "
|
||||
f"exit_type=ExitType.{exit_signal.name}" +
|
||||
(f", custom_reason={custom_reason}" if custom_reason else ""))
|
||||
@@ -1095,12 +1101,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
dataframe = _create_and_merge_informative_pair(
|
||||
self, dataframe, metadata, inf_data, populate_fn)
|
||||
|
||||
if self._populate_fun_len == 2:
|
||||
warnings.warn("deprecated - check out the Sample strategy to see "
|
||||
"the current function headers!", DeprecationWarning)
|
||||
return self.populate_indicators(dataframe) # type: ignore
|
||||
else:
|
||||
return self.populate_indicators(dataframe, metadata)
|
||||
return self.populate_indicators(dataframe, metadata)
|
||||
|
||||
def advise_entry(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
"""
|
||||
@@ -1114,12 +1115,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
|
||||
logger.debug(f"Populating enter signals for pair {metadata.get('pair')}.")
|
||||
|
||||
if self._buy_fun_len == 2:
|
||||
warnings.warn("deprecated - check out the Sample strategy to see "
|
||||
"the current function headers!", DeprecationWarning)
|
||||
df = self.populate_buy_trend(dataframe) # type: ignore
|
||||
else:
|
||||
df = self.populate_entry_trend(dataframe, metadata)
|
||||
df = self.populate_entry_trend(dataframe, metadata)
|
||||
if 'enter_long' not in df.columns:
|
||||
df = df.rename({'buy': 'enter_long', 'buy_tag': 'enter_tag'}, axis='columns')
|
||||
|
||||
@@ -1134,14 +1130,8 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
currently traded pair
|
||||
:return: DataFrame with exit column
|
||||
"""
|
||||
|
||||
logger.debug(f"Populating exit signals for pair {metadata.get('pair')}.")
|
||||
if self._sell_fun_len == 2:
|
||||
warnings.warn("deprecated - check out the Sample strategy to see "
|
||||
"the current function headers!", DeprecationWarning)
|
||||
df = self.populate_sell_trend(dataframe) # type: ignore
|
||||
else:
|
||||
df = self.populate_exit_trend(dataframe, metadata)
|
||||
df = self.populate_exit_trend(dataframe, metadata)
|
||||
if 'exit_long' not in df.columns:
|
||||
df = df.rename({'sell': 'exit_long'}, axis='columns')
|
||||
return df
|
||||
|
||||
@@ -56,12 +56,18 @@ def merge_informative_pair(dataframe: pd.DataFrame, informative: pd.DataFrame,
|
||||
|
||||
# Combine the 2 dataframes
|
||||
# all indicators on the informative sample MUST be calculated before this point
|
||||
dataframe = pd.merge(dataframe, informative, left_on='date',
|
||||
right_on=date_merge, how='left')
|
||||
if ffill:
|
||||
# https://pandas.pydata.org/docs/user_guide/merging.html#timeseries-friendly-merging
|
||||
# merge_ordered - ffill method is 2.5x faster than seperate ffill()
|
||||
dataframe = pd.merge_ordered(dataframe, informative, fill_method="ffill", left_on='date',
|
||||
right_on=date_merge, how='left')
|
||||
else:
|
||||
dataframe = pd.merge(dataframe, informative, left_on='date',
|
||||
right_on=date_merge, how='left')
|
||||
dataframe = dataframe.drop(date_merge, axis=1)
|
||||
|
||||
if ffill:
|
||||
dataframe = dataframe.ffill()
|
||||
# if ffill:
|
||||
# dataframe = dataframe.ffill()
|
||||
|
||||
return dataframe
|
||||
|
||||
@@ -93,9 +99,9 @@ def stoploss_from_open(
|
||||
return 1
|
||||
|
||||
if is_short is True:
|
||||
stoploss = -1+((1-open_relative_stop)/(1-current_profit))
|
||||
stoploss = -1 + ((1 - open_relative_stop) / (1 - current_profit))
|
||||
else:
|
||||
stoploss = 1-((1+open_relative_stop)/(1+current_profit))
|
||||
stoploss = 1 - ((1 + open_relative_stop) / (1 + current_profit))
|
||||
|
||||
# negative stoploss values indicate the requested stop price is higher/lower
|
||||
# (long/short) than the current price
|
||||
|
||||
@@ -32,7 +32,7 @@ def custom_entry_price(self, pair: str, current_time: 'datetime', proposed_rate:
|
||||
|
||||
def custom_exit_price(self, pair: str, trade: 'Trade',
|
||||
current_time: 'datetime', proposed_rate: float,
|
||||
current_profit: float, **kwargs) -> float:
|
||||
current_profit: float, exit_tag: Optional[str], **kwargs) -> float:
|
||||
"""
|
||||
Custom exit price logic, returning the new exit price.
|
||||
|
||||
@@ -45,6 +45,7 @@ def custom_exit_price(self, pair: str, trade: 'Trade',
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param proposed_rate: Rate, calculated based on pricing settings in exit_pricing.
|
||||
:param current_profit: Current profit (as ratio), calculated based on current_rate.
|
||||
:param exit_tag: Exit reason.
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return float: New exit price value if provided
|
||||
"""
|
||||
@@ -170,7 +171,8 @@ def confirm_trade_exit(self, pair: str, trade: 'Trade', order_type: str, amount:
|
||||
"""
|
||||
return True
|
||||
|
||||
def check_entry_timeout(self, pair: str, trade: 'Trade', order: dict, **kwargs) -> bool:
|
||||
def check_entry_timeout(self, pair: str, trade: 'Trade', order: 'Order',
|
||||
current_time: datetime, **kwargs) -> bool:
|
||||
"""
|
||||
Check entry timeout function callback.
|
||||
This method can be used to override the entry-timeout.
|
||||
@@ -183,14 +185,16 @@ def check_entry_timeout(self, pair: str, trade: 'Trade', order: dict, **kwargs)
|
||||
|
||||
When not implemented by a strategy, this simply returns False.
|
||||
:param pair: Pair the trade is for
|
||||
:param trade: trade object.
|
||||
:param order: Order dictionary as returned from CCXT.
|
||||
:param trade: Trade object.
|
||||
:param order: Order object.
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return bool: When True is returned, then the buy-order is cancelled.
|
||||
:return bool: When True is returned, then the entry order is cancelled.
|
||||
"""
|
||||
return False
|
||||
|
||||
def check_exit_timeout(self, pair: str, trade: 'Trade', order: dict, **kwargs) -> bool:
|
||||
def check_exit_timeout(self, pair: str, trade: 'Trade', order: 'Order',
|
||||
current_time: datetime, **kwargs) -> bool:
|
||||
"""
|
||||
Check exit timeout function callback.
|
||||
This method can be used to override the exit-timeout.
|
||||
@@ -203,8 +207,9 @@ def check_exit_timeout(self, pair: str, trade: 'Trade', order: dict, **kwargs) -
|
||||
|
||||
When not implemented by a strategy, this simply returns False.
|
||||
:param pair: Pair the trade is for
|
||||
:param trade: trade object.
|
||||
:param order: Order dictionary as returned from CCXT.
|
||||
:param trade: Trade object.
|
||||
:param order: Order object.
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
:return bool: When True is returned, then the exit-order is cancelled.
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user