Merge branch 'freqtrade:develop' into develop
This commit is contained in:
@@ -26,7 +26,7 @@ BT_DATA_COLUMNS = ['pair', 'stake_amount', 'amount', 'open_date', 'close_date',
|
||||
'profit_ratio', 'profit_abs', 'exit_reason',
|
||||
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
|
||||
'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'enter_tag',
|
||||
'is_short', 'open_timestamp', 'close_timestamp', 'orders'
|
||||
'leverage', 'is_short', 'open_timestamp', 'close_timestamp', 'orders'
|
||||
]
|
||||
|
||||
|
||||
@@ -280,6 +280,8 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
|
||||
# Compatibility support for pre short Columns
|
||||
if 'is_short' not in df.columns:
|
||||
df['is_short'] = 0
|
||||
if 'leverage' not in df.columns:
|
||||
df['leverage'] = 1.0
|
||||
if 'enter_tag' not in df.columns:
|
||||
df['enter_tag'] = df['buy_tag']
|
||||
df = df.drop(['buy_tag'], axis=1)
|
||||
|
@@ -3,10 +3,10 @@ Functions to convert data from one format to another
|
||||
"""
|
||||
import itertools
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from operator import itemgetter
|
||||
from typing import Dict, List
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pandas import DataFrame, to_datetime
|
||||
|
||||
@@ -176,11 +176,9 @@ def trim_dataframe(df: DataFrame, timerange, df_date_col: str = 'date',
|
||||
df = df.iloc[startup_candles:, :]
|
||||
else:
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
df = df.loc[df[df_date_col] >= start, :]
|
||||
df = df.loc[df[df_date_col] >= timerange.startdt, :]
|
||||
if timerange.stoptype == 'date':
|
||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
df = df.loc[df[df_date_col] <= stop, :]
|
||||
df = df.loc[df[df_date_col] <= timerange.stopdt, :]
|
||||
return df
|
||||
|
||||
|
||||
@@ -356,3 +354,29 @@ def convert_ohlcv_format(
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
||||
src.ohlcv_purge(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||
|
||||
|
||||
def reduce_dataframe_footprint(df: DataFrame) -> DataFrame:
|
||||
"""
|
||||
Ensure all values are float32 in the incoming dataframe.
|
||||
:param df: Dataframe to be converted to float/int 32s
|
||||
:return: Dataframe converted to float/int 32s
|
||||
"""
|
||||
|
||||
logger.debug(f"Memory usage of dataframe is "
|
||||
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
|
||||
df_dtypes = df.dtypes
|
||||
for column, dtype in df_dtypes.items():
|
||||
if column in ['open', 'high', 'low', 'close', 'volume']:
|
||||
continue
|
||||
if dtype == np.float64:
|
||||
df_dtypes[column] = np.float32
|
||||
elif dtype == np.int64:
|
||||
df_dtypes[column] = np.int32
|
||||
df = df.astype(df_dtypes)
|
||||
|
||||
logger.debug(f"Memory usage after optimization is: "
|
||||
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
|
||||
return df
|
||||
|
@@ -104,13 +104,15 @@ class DataProvider:
|
||||
def _emit_df(
|
||||
self,
|
||||
pair_key: PairWithTimeframe,
|
||||
dataframe: DataFrame
|
||||
dataframe: DataFrame,
|
||||
new_candle: bool
|
||||
) -> None:
|
||||
"""
|
||||
Send this dataframe as an ANALYZED_DF message to RPC
|
||||
|
||||
:param pair_key: PairWithTimeframe tuple
|
||||
:param data: Tuple containing the DataFrame and the datetime it was cached
|
||||
:param dataframe: Dataframe to emit
|
||||
:param new_candle: This is a new candle
|
||||
"""
|
||||
if self.__rpc:
|
||||
self.__rpc.send_msg(
|
||||
@@ -123,6 +125,11 @@ class DataProvider:
|
||||
}
|
||||
}
|
||||
)
|
||||
if new_candle:
|
||||
self.__rpc.send_msg({
|
||||
'type': RPCMessageType.NEW_CANDLE,
|
||||
'data': pair_key,
|
||||
})
|
||||
|
||||
def _add_external_df(
|
||||
self,
|
||||
|
@@ -1,11 +1,12 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import joblib
|
||||
import pandas as pd
|
||||
from tabulate import tabulate
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.data.btanalysis import (get_latest_backtest_filename, load_backtest_data,
|
||||
load_backtest_stats)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
@@ -152,37 +153,55 @@ def _do_group_table_output(bigdf, glist):
|
||||
logger.warning("Invalid group mask specified.")
|
||||
|
||||
|
||||
def _print_results(analysed_trades, stratname, analysis_groups,
|
||||
enter_reason_list, exit_reason_list,
|
||||
indicator_list, columns=None):
|
||||
if columns is None:
|
||||
columns = ['pair', 'open_date', 'close_date', 'profit_abs', 'enter_reason', 'exit_reason']
|
||||
def _select_rows_within_dates(df, timerange=None, df_date_col: str = 'date'):
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
df = df.loc[(df[df_date_col] >= timerange.startdt)]
|
||||
if timerange.stoptype == 'date':
|
||||
df = df.loc[(df[df_date_col] < timerange.stopdt)]
|
||||
return df
|
||||
|
||||
bigdf = pd.DataFrame()
|
||||
|
||||
def _select_rows_by_tags(df, enter_reason_list, exit_reason_list):
|
||||
if enter_reason_list and "all" not in enter_reason_list:
|
||||
df = df.loc[(df['enter_reason'].isin(enter_reason_list))]
|
||||
|
||||
if exit_reason_list and "all" not in exit_reason_list:
|
||||
df = df.loc[(df['exit_reason'].isin(exit_reason_list))]
|
||||
return df
|
||||
|
||||
|
||||
def prepare_results(analysed_trades, stratname,
|
||||
enter_reason_list, exit_reason_list,
|
||||
timerange=None):
|
||||
res_df = pd.DataFrame()
|
||||
for pair, trades in analysed_trades[stratname].items():
|
||||
bigdf = pd.concat([bigdf, trades], ignore_index=True)
|
||||
res_df = pd.concat([res_df, trades], ignore_index=True)
|
||||
|
||||
if bigdf.shape[0] > 0 and ('enter_reason' in bigdf.columns):
|
||||
res_df = _select_rows_within_dates(res_df, timerange)
|
||||
|
||||
if res_df is not None and res_df.shape[0] > 0 and ('enter_reason' in res_df.columns):
|
||||
res_df = _select_rows_by_tags(res_df, enter_reason_list, exit_reason_list)
|
||||
|
||||
return res_df
|
||||
|
||||
|
||||
def print_results(res_df, analysis_groups, indicator_list):
|
||||
if res_df.shape[0] > 0:
|
||||
if analysis_groups:
|
||||
_do_group_table_output(bigdf, analysis_groups)
|
||||
|
||||
if enter_reason_list and "all" not in enter_reason_list:
|
||||
bigdf = bigdf.loc[(bigdf['enter_reason'].isin(enter_reason_list))]
|
||||
|
||||
if exit_reason_list and "all" not in exit_reason_list:
|
||||
bigdf = bigdf.loc[(bigdf['exit_reason'].isin(exit_reason_list))]
|
||||
_do_group_table_output(res_df, analysis_groups)
|
||||
|
||||
if "all" in indicator_list:
|
||||
print(bigdf)
|
||||
print(res_df)
|
||||
elif indicator_list is not None:
|
||||
available_inds = []
|
||||
for ind in indicator_list:
|
||||
if ind in bigdf:
|
||||
if ind in res_df:
|
||||
available_inds.append(ind)
|
||||
ilist = ["pair", "enter_reason", "exit_reason"] + available_inds
|
||||
_print_table(bigdf[ilist], sortcols=['exit_reason'], show_index=False)
|
||||
_print_table(res_df[ilist], sortcols=['exit_reason'], show_index=False)
|
||||
else:
|
||||
print("\\_ No trades to show")
|
||||
print("\\No trades to show")
|
||||
|
||||
|
||||
def _print_table(df, sortcols=None, show_index=False):
|
||||
@@ -201,27 +220,34 @@ def _print_table(df, sortcols=None, show_index=False):
|
||||
)
|
||||
|
||||
|
||||
def process_entry_exit_reasons(backtest_dir: Path,
|
||||
pairlist: List[str],
|
||||
analysis_groups: Optional[List[str]] = ["0", "1", "2"],
|
||||
enter_reason_list: Optional[List[str]] = ["all"],
|
||||
exit_reason_list: Optional[List[str]] = ["all"],
|
||||
indicator_list: Optional[List[str]] = []):
|
||||
def process_entry_exit_reasons(config: Config):
|
||||
try:
|
||||
backtest_stats = load_backtest_stats(backtest_dir)
|
||||
analysis_groups = config.get('analysis_groups', [])
|
||||
enter_reason_list = config.get('enter_reason_list', ["all"])
|
||||
exit_reason_list = config.get('exit_reason_list', ["all"])
|
||||
indicator_list = config.get('indicator_list', [])
|
||||
|
||||
timerange = TimeRange.parse_timerange(None if config.get(
|
||||
'timerange') is None else str(config.get('timerange')))
|
||||
|
||||
backtest_stats = load_backtest_stats(config['exportfilename'])
|
||||
|
||||
for strategy_name, results in backtest_stats['strategy'].items():
|
||||
trades = load_backtest_data(backtest_dir, strategy_name)
|
||||
trades = load_backtest_data(config['exportfilename'], strategy_name)
|
||||
|
||||
if not trades.empty:
|
||||
signal_candles = _load_signal_candles(backtest_dir)
|
||||
analysed_trades_dict = _process_candles_and_indicators(pairlist, strategy_name,
|
||||
trades, signal_candles)
|
||||
_print_results(analysed_trades_dict,
|
||||
strategy_name,
|
||||
analysis_groups,
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
indicator_list)
|
||||
signal_candles = _load_signal_candles(config['exportfilename'])
|
||||
analysed_trades_dict = _process_candles_and_indicators(
|
||||
config['exchange']['pair_whitelist'], strategy_name,
|
||||
trades, signal_candles)
|
||||
|
||||
res_df = prepare_results(analysed_trades_dict, strategy_name,
|
||||
enter_reason_list, exit_reason_list,
|
||||
timerange=timerange)
|
||||
|
||||
print_results(res_df,
|
||||
analysis_groups,
|
||||
indicator_list)
|
||||
|
||||
except ValueError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
import operator
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
@@ -160,9 +160,9 @@ def _load_cached_data_for_updating(
|
||||
end = None
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
start = timerange.startdt
|
||||
if timerange.stoptype == 'date':
|
||||
end = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
end = timerange.stopdt
|
||||
|
||||
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||
data = data_handler.ohlcv_load(pair, timeframe=timeframe,
|
||||
|
@@ -102,6 +102,11 @@ class IDataHandler(ABC):
|
||||
:return: (min, max)
|
||||
"""
|
||||
data = self._ohlcv_load(pair, timeframe, None, candle_type)
|
||||
if data.empty:
|
||||
return (
|
||||
datetime.fromtimestamp(0, tz=timezone.utc),
|
||||
datetime.fromtimestamp(0, tz=timezone.utc)
|
||||
)
|
||||
return data.iloc[0]['date'].to_pydatetime(), data.iloc[-1]['date'].to_pydatetime()
|
||||
|
||||
@abstractmethod
|
||||
@@ -361,13 +366,11 @@ class IDataHandler(ABC):
|
||||
"""
|
||||
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
if pairdata.iloc[0]['date'] > start:
|
||||
if pairdata.iloc[0]['date'] > timerange.startdt:
|
||||
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
||||
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}")
|
||||
if timerange.stoptype == 'date':
|
||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
if pairdata.iloc[-1]['date'] < stop:
|
||||
if pairdata.iloc[-1]['date'] < timerange.stopdt:
|
||||
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
||||
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")
|
||||
|
||||
|
Reference in New Issue
Block a user