2019-03-07 20:20:32 +00:00
|
|
|
"""
|
|
|
|
Helpers when analyzing backtest data
|
|
|
|
"""
|
2019-06-16 08:41:05 +00:00
|
|
|
import logging
|
2019-03-07 20:20:32 +00:00
|
|
|
from pathlib import Path
|
2020-03-03 06:13:11 +00:00
|
|
|
from typing import Dict, Union, Tuple
|
2019-03-07 20:20:32 +00:00
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
import pandas as pd
|
2019-11-08 05:55:07 +00:00
|
|
|
from datetime import timezone
|
2019-03-07 20:20:32 +00:00
|
|
|
|
2019-06-16 08:41:05 +00:00
|
|
|
from freqtrade import persistence
|
2019-03-07 20:20:32 +00:00
|
|
|
from freqtrade.misc import json_load
|
2019-06-16 08:41:05 +00:00
|
|
|
from freqtrade.persistence import Trade
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2019-03-07 20:20:32 +00:00
|
|
|
|
2019-03-07 20:23:53 +00:00
|
|
|
# must align with columns in backtest.py
|
|
|
|
BT_DATA_COLUMNS = ["pair", "profitperc", "open_time", "close_time", "index", "duration",
|
|
|
|
"open_rate", "close_rate", "open_at_end", "sell_reason"]
|
|
|
|
|
2019-03-07 20:20:32 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def load_backtest_data(filename: Union[Path, str]) -> pd.DataFrame:
|
2019-03-07 20:20:32 +00:00
|
|
|
"""
|
|
|
|
Load backtest data file.
|
|
|
|
:param filename: pathlib.Path object, or string pointing to the file.
|
2019-06-23 20:10:37 +00:00
|
|
|
:return: a dataframe with the analysis results
|
2019-03-07 20:20:32 +00:00
|
|
|
"""
|
|
|
|
if isinstance(filename, str):
|
|
|
|
filename = Path(filename)
|
|
|
|
|
|
|
|
if not filename.is_file():
|
2019-08-02 07:37:59 +00:00
|
|
|
raise ValueError(f"File {filename} does not exist.")
|
2019-03-07 20:20:32 +00:00
|
|
|
|
|
|
|
with filename.open() as file:
|
|
|
|
data = json_load(file)
|
|
|
|
|
2019-03-07 20:23:53 +00:00
|
|
|
df = pd.DataFrame(data, columns=BT_DATA_COLUMNS)
|
2019-03-07 20:20:32 +00:00
|
|
|
|
|
|
|
df['open_time'] = pd.to_datetime(df['open_time'],
|
|
|
|
unit='s',
|
|
|
|
utc=True,
|
|
|
|
infer_datetime_format=True
|
|
|
|
)
|
|
|
|
df['close_time'] = pd.to_datetime(df['close_time'],
|
|
|
|
unit='s',
|
|
|
|
utc=True,
|
|
|
|
infer_datetime_format=True
|
|
|
|
)
|
2020-01-02 08:51:47 +00:00
|
|
|
df['profit'] = df['close_rate'] - df['open_rate']
|
2019-03-07 20:20:32 +00:00
|
|
|
df = df.sort_values("open_time").reset_index(drop=True)
|
|
|
|
return df
|
|
|
|
|
|
|
|
|
2019-10-30 13:07:23 +00:00
|
|
|
def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataFrame:
|
2019-03-07 20:20:32 +00:00
|
|
|
"""
|
|
|
|
Find overlapping trades by expanding each trade once per period it was open
|
2019-10-30 12:35:55 +00:00
|
|
|
and then counting overlaps.
|
2019-03-07 20:20:32 +00:00
|
|
|
:param results: Results Dataframe - can be loaded
|
2019-10-30 12:35:55 +00:00
|
|
|
:param timeframe: Timeframe used for backtest
|
|
|
|
:return: dataframe with open-counts per time-period in timeframe
|
2019-03-07 20:20:32 +00:00
|
|
|
"""
|
2019-10-30 12:35:55 +00:00
|
|
|
from freqtrade.exchange import timeframe_to_minutes
|
|
|
|
timeframe_min = timeframe_to_minutes(timeframe)
|
2019-10-30 13:07:23 +00:00
|
|
|
dates = [pd.Series(pd.date_range(row[1].open_time, row[1].close_time,
|
|
|
|
freq=f"{timeframe_min}min"))
|
2019-03-07 20:20:32 +00:00
|
|
|
for row in results[['open_time', 'close_time']].iterrows()]
|
|
|
|
deltas = [len(x) for x in dates]
|
|
|
|
dates = pd.Series(pd.concat(dates).values, name='date')
|
|
|
|
df2 = pd.DataFrame(np.repeat(results.values, deltas, axis=0), columns=results.columns)
|
|
|
|
|
|
|
|
df2 = pd.concat([dates, df2], axis=1)
|
|
|
|
df2 = df2.set_index('date')
|
2019-10-30 12:35:55 +00:00
|
|
|
df_final = df2.resample(f"{timeframe_min}min")[['pair']].count()
|
|
|
|
df_final = df_final.rename({'pair': 'open_trades'}, axis=1)
|
|
|
|
return df_final
|
|
|
|
|
|
|
|
|
|
|
|
def evaluate_result_multi(results: pd.DataFrame, timeframe: str,
|
|
|
|
max_open_trades: int) -> pd.DataFrame:
|
|
|
|
"""
|
|
|
|
Find overlapping trades by expanding each trade once per period it was open
|
|
|
|
and then counting overlaps
|
|
|
|
:param results: Results Dataframe - can be loaded
|
|
|
|
:param timeframe: Frequency used for the backtest
|
|
|
|
:param max_open_trades: parameter max_open_trades used during backtest run
|
|
|
|
:return: dataframe with open-counts per time-period in freq
|
|
|
|
"""
|
2019-10-30 13:07:23 +00:00
|
|
|
df_final = analyze_trade_parallelism(results, timeframe)
|
2019-10-30 12:35:55 +00:00
|
|
|
return df_final[df_final['open_trades'] > max_open_trades]
|
2019-06-16 08:41:05 +00:00
|
|
|
|
|
|
|
|
2019-06-22 13:45:20 +00:00
|
|
|
def load_trades_from_db(db_url: str) -> pd.DataFrame:
|
2019-06-16 08:41:05 +00:00
|
|
|
"""
|
2019-06-22 13:45:20 +00:00
|
|
|
Load trades from a DB (using dburl)
|
2019-06-16 08:41:05 +00:00
|
|
|
:param db_url: Sqlite url (default format sqlite:///tradesv3.dry-run.sqlite)
|
2019-06-23 20:10:37 +00:00
|
|
|
:return: Dataframe containing Trades
|
2019-06-16 08:41:05 +00:00
|
|
|
"""
|
|
|
|
trades: pd.DataFrame = pd.DataFrame([], columns=BT_DATA_COLUMNS)
|
2019-06-22 13:45:20 +00:00
|
|
|
persistence.init(db_url, clean_open_orders=False)
|
2019-06-22 14:20:41 +00:00
|
|
|
|
2019-08-03 17:55:54 +00:00
|
|
|
columns = ["pair", "open_time", "close_time", "profit", "profitperc",
|
|
|
|
"open_rate", "close_rate", "amount", "duration", "sell_reason",
|
|
|
|
"fee_open", "fee_close", "open_rate_requested", "close_rate_requested",
|
|
|
|
"stake_amount", "max_rate", "min_rate", "id", "exchange",
|
|
|
|
"stop_loss", "initial_stop_loss", "strategy", "ticker_interval"]
|
|
|
|
|
|
|
|
trades = pd.DataFrame([(t.pair,
|
2019-11-08 05:55:07 +00:00
|
|
|
t.open_date.replace(tzinfo=timezone.utc),
|
|
|
|
t.close_date.replace(tzinfo=timezone.utc) if t.close_date else None,
|
2019-12-17 07:53:30 +00:00
|
|
|
t.calc_profit(), t.calc_profit_ratio(),
|
2019-08-03 17:55:54 +00:00
|
|
|
t.open_rate, t.close_rate, t.amount,
|
2019-10-19 11:18:52 +00:00
|
|
|
(round((t.close_date.timestamp() - t.open_date.timestamp()) / 60, 2)
|
2020-03-14 21:15:03 +00:00
|
|
|
if t.close_date else None),
|
2019-06-22 14:20:41 +00:00
|
|
|
t.sell_reason,
|
2019-08-03 17:55:54 +00:00
|
|
|
t.fee_open, t.fee_close,
|
|
|
|
t.open_rate_requested,
|
|
|
|
t.close_rate_requested,
|
|
|
|
t.stake_amount,
|
2019-06-22 14:20:41 +00:00
|
|
|
t.max_rate,
|
|
|
|
t.min_rate,
|
2019-08-03 17:55:54 +00:00
|
|
|
t.id, t.exchange,
|
|
|
|
t.stop_loss, t.initial_stop_loss,
|
|
|
|
t.strategy, t.ticker_interval
|
2019-06-22 14:20:41 +00:00
|
|
|
)
|
2019-10-29 14:01:10 +00:00
|
|
|
for t in Trade.get_trades().all()],
|
2019-06-22 14:20:41 +00:00
|
|
|
columns=columns)
|
2019-06-16 08:41:05 +00:00
|
|
|
|
|
|
|
return trades
|
2019-06-16 09:12:19 +00:00
|
|
|
|
|
|
|
|
2020-03-18 10:42:42 +00:00
|
|
|
def load_trades(source: str, db_url: str, exportfilename: Path,
|
|
|
|
no_trades: bool = False) -> pd.DataFrame:
|
2019-06-29 18:50:31 +00:00
|
|
|
"""
|
|
|
|
Based on configuration option "trade_source":
|
|
|
|
* loads data from DB (using `db_url`)
|
2019-07-03 04:26:39 +00:00
|
|
|
* loads data from backtestfile (using `exportfilename`)
|
2020-03-15 08:39:45 +00:00
|
|
|
:param source: "DB" or "file" - specify source to load from
|
|
|
|
:param db_url: sqlalchemy formatted url to a database
|
|
|
|
:param exportfilename: Json file generated by backtesting
|
2020-03-15 20:20:32 +00:00
|
|
|
:param no_trades: Skip using trades, only return backtesting data columns
|
2020-03-15 08:39:45 +00:00
|
|
|
:return: DataFrame containing trades
|
2019-06-29 18:50:31 +00:00
|
|
|
"""
|
2020-03-15 20:20:32 +00:00
|
|
|
if no_trades:
|
2020-03-14 21:15:03 +00:00
|
|
|
df = pd.DataFrame(columns=BT_DATA_COLUMNS)
|
|
|
|
return df
|
|
|
|
|
2019-08-22 18:17:36 +00:00
|
|
|
if source == "DB":
|
|
|
|
return load_trades_from_db(db_url)
|
|
|
|
elif source == "file":
|
2020-03-15 08:39:45 +00:00
|
|
|
return load_backtest_data(exportfilename)
|
2019-06-29 18:50:31 +00:00
|
|
|
|
|
|
|
|
2019-06-16 09:12:19 +00:00
|
|
|
def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame) -> pd.DataFrame:
|
|
|
|
"""
|
|
|
|
Compare trades and backtested pair DataFrames to get trades performed on backtested period
|
|
|
|
:return: the DataFrame of a trades of period
|
|
|
|
"""
|
|
|
|
trades = trades.loc[(trades['open_time'] >= dataframe.iloc[0]['date']) &
|
|
|
|
(trades['close_time'] <= dataframe.iloc[-1]['date'])]
|
|
|
|
return trades
|
2019-06-29 14:57:04 +00:00
|
|
|
|
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
|
|
|
column: str = "close") -> pd.DataFrame:
|
2019-06-30 08:04:43 +00:00
|
|
|
"""
|
|
|
|
Combine multiple dataframes "column"
|
2020-03-08 10:35:31 +00:00
|
|
|
:param data: Dict of Dataframes, dict key should be pair.
|
2019-06-30 08:04:43 +00:00
|
|
|
:param column: Column in the original dataframes to use
|
|
|
|
:return: DataFrame with the column renamed to the dict key, and a column
|
|
|
|
named mean, containing the mean of all pairs.
|
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
df_comb = pd.concat([data[pair].set_index('date').rename(
|
|
|
|
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
2019-06-30 08:04:43 +00:00
|
|
|
|
|
|
|
df_comb['mean'] = df_comb.mean(axis=1)
|
|
|
|
|
|
|
|
return df_comb
|
|
|
|
|
|
|
|
|
2019-10-28 13:24:12 +00:00
|
|
|
def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
|
|
|
timeframe: str) -> pd.DataFrame:
|
2019-06-29 14:57:04 +00:00
|
|
|
"""
|
|
|
|
Adds a column `col_name` with the cumulative profit for the given trades array.
|
2019-06-29 15:19:42 +00:00
|
|
|
:param df: DataFrame with date index
|
|
|
|
:param trades: DataFrame containing trades (requires columns close_time and profitperc)
|
2019-10-28 13:24:12 +00:00
|
|
|
:param col_name: Column name that will be assigned the results
|
|
|
|
:param timeframe: Timeframe used during the operations
|
2019-06-29 15:19:42 +00:00
|
|
|
:return: Returns df with one additional column, col_name, containing the cumulative profit.
|
2019-06-29 14:57:04 +00:00
|
|
|
"""
|
2019-10-28 13:24:12 +00:00
|
|
|
from freqtrade.exchange import timeframe_to_minutes
|
2019-11-02 19:34:39 +00:00
|
|
|
timeframe_minutes = timeframe_to_minutes(timeframe)
|
|
|
|
# Resample to timeframe to make sure trades match candles
|
|
|
|
_trades_sum = trades.resample(f'{timeframe_minutes}min', on='close_time')[['profitperc']].sum()
|
2019-10-28 13:24:12 +00:00
|
|
|
df.loc[:, col_name] = _trades_sum.cumsum()
|
2019-06-29 14:57:04 +00:00
|
|
|
# Set first value to 0
|
|
|
|
df.loc[df.iloc[0].name, col_name] = 0
|
|
|
|
# FFill to get continuous
|
|
|
|
df[col_name] = df[col_name].ffill()
|
|
|
|
return df
|
2020-03-03 06:13:11 +00:00
|
|
|
|
|
|
|
|
2020-03-03 19:18:38 +00:00
|
|
|
def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_time',
|
2020-03-03 06:23:38 +00:00
|
|
|
value_col: str = 'profitperc'
|
|
|
|
) -> Tuple[float, pd.Timestamp, pd.Timestamp]:
|
2020-03-03 06:13:11 +00:00
|
|
|
"""
|
|
|
|
Calculate max drawdown and the corresponding close dates
|
|
|
|
:param trades: DataFrame containing trades (requires columns close_time and profitperc)
|
2020-03-03 06:23:38 +00:00
|
|
|
:param date_col: Column in DataFrame to use for dates (defaults to 'close_time')
|
|
|
|
:param value_col: Column in DataFrame to use for values (defaults to 'profitperc')
|
2020-03-03 06:13:11 +00:00
|
|
|
:return: Tuple (float, highdate, lowdate) with absolute max drawdown, high and low time
|
|
|
|
:raise: ValueError if trade-dataframe was found empty.
|
|
|
|
"""
|
|
|
|
if len(trades) == 0:
|
2020-03-03 06:20:41 +00:00
|
|
|
raise ValueError("Trade dataframe empty.")
|
2020-03-03 06:23:38 +00:00
|
|
|
profit_results = trades.sort_values(date_col)
|
2020-03-03 06:13:11 +00:00
|
|
|
max_drawdown_df = pd.DataFrame()
|
2020-03-03 06:23:38 +00:00
|
|
|
max_drawdown_df['cumulative'] = profit_results[value_col].cumsum()
|
2020-03-03 06:13:11 +00:00
|
|
|
max_drawdown_df['high_value'] = max_drawdown_df['cumulative'].cummax()
|
|
|
|
max_drawdown_df['drawdown'] = max_drawdown_df['cumulative'] - max_drawdown_df['high_value']
|
2020-03-03 06:23:38 +00:00
|
|
|
|
|
|
|
high_date = profit_results.loc[max_drawdown_df['high_value'].idxmax(), date_col]
|
|
|
|
low_date = profit_results.loc[max_drawdown_df['drawdown'].idxmin(), date_col]
|
2020-03-03 06:13:11 +00:00
|
|
|
|
|
|
|
return abs(min(max_drawdown_df['drawdown'])), high_date, low_date
|