2018-02-09 07:35:38 +00:00
|
|
|
# pragma pylint: disable=missing-docstring, W0212, too-many-arguments
|
2017-11-14 21:15:24 +00:00
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
"""
|
|
|
|
This module contains the backtesting logic
|
|
|
|
"""
|
2018-03-25 19:37:14 +00:00
|
|
|
import logging
|
2018-07-27 21:01:52 +00:00
|
|
|
from copy import deepcopy
|
2018-07-18 18:08:55 +00:00
|
|
|
from datetime import datetime, timedelta
|
2020-03-15 14:04:48 +00:00
|
|
|
from typing import Any, Dict, List, NamedTuple, Optional, Tuple
|
2018-03-17 21:44:47 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
import arrow
|
2018-03-29 18:16:25 +00:00
|
|
|
from pandas import DataFrame
|
2017-09-28 21:26:28 +00:00
|
|
|
|
2019-11-23 14:49:46 +00:00
|
|
|
from freqtrade.configuration import (TimeRange, remove_credentials,
|
|
|
|
validate_config_consistency)
|
2018-12-13 05:34:10 +00:00
|
|
|
from freqtrade.data import history
|
2019-12-25 14:47:04 +00:00
|
|
|
from freqtrade.data.converter import trim_dataframe
|
2019-03-24 14:24:47 +00:00
|
|
|
from freqtrade.data.dataprovider import DataProvider
|
2019-12-30 14:02:17 +00:00
|
|
|
from freqtrade.exceptions import OperationalException
|
2019-10-20 11:56:01 +00:00
|
|
|
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_seconds
|
2020-03-15 14:36:23 +00:00
|
|
|
from freqtrade.optimize.optimize_reports import (show_backtest_results,
|
|
|
|
store_backtest_result)
|
2017-09-28 21:26:28 +00:00
|
|
|
from freqtrade.persistence import Trade
|
2019-03-06 18:55:34 +00:00
|
|
|
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
2018-12-25 13:23:59 +00:00
|
|
|
from freqtrade.state import RunMode
|
2020-02-02 04:00:40 +00:00
|
|
|
from freqtrade.strategy.interface import IStrategy, SellCheckTuple, SellType
|
2019-04-09 09:27:35 +00:00
|
|
|
|
2018-03-25 19:37:14 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-06-10 11:15:25 +00:00
|
|
|
class BacktestResult(NamedTuple):
|
|
|
|
"""
|
|
|
|
NamedTuple Defining BacktestResults inputs.
|
|
|
|
"""
|
|
|
|
pair: str
|
|
|
|
profit_percent: float
|
|
|
|
profit_abs: float
|
2018-06-10 11:32:07 +00:00
|
|
|
open_time: datetime
|
|
|
|
close_time: datetime
|
2018-06-10 18:52:42 +00:00
|
|
|
open_index: int
|
|
|
|
close_index: int
|
2018-06-10 11:15:25 +00:00
|
|
|
trade_duration: float
|
2018-06-10 11:37:53 +00:00
|
|
|
open_at_end: bool
|
2018-06-23 12:19:50 +00:00
|
|
|
open_rate: float
|
|
|
|
close_rate: float
|
2018-07-11 18:03:40 +00:00
|
|
|
sell_reason: SellType
|
2018-06-10 11:15:25 +00:00
|
|
|
|
|
|
|
|
2019-09-12 01:39:52 +00:00
|
|
|
class Backtesting:
|
2017-11-14 22:14:01 +00:00
|
|
|
"""
|
2018-02-09 07:35:38 +00:00
|
|
|
Backtesting class, this class contains all the logic to run a backtest
|
|
|
|
|
|
|
|
To run a backtest:
|
|
|
|
backtesting = Backtesting(config)
|
|
|
|
backtesting.start()
|
2017-11-14 22:14:01 +00:00
|
|
|
"""
|
2018-07-28 05:00:58 +00:00
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
def __init__(self, config: Dict[str, Any]) -> None:
|
|
|
|
self.config = config
|
2018-04-06 07:57:08 +00:00
|
|
|
|
|
|
|
# Reset keys for backtesting
|
2019-11-05 11:39:19 +00:00
|
|
|
remove_credentials(self.config)
|
2018-07-28 05:41:38 +00:00
|
|
|
self.strategylist: List[IStrategy] = []
|
2019-12-23 09:03:18 +00:00
|
|
|
self.exchange = ExchangeResolver.load_exchange(self.config['exchange']['name'], self.config)
|
2019-10-05 13:29:00 +00:00
|
|
|
|
|
|
|
if config.get('fee'):
|
|
|
|
self.fee = config['fee']
|
|
|
|
else:
|
2019-12-14 11:52:33 +00:00
|
|
|
self.fee = self.exchange.get_fee(symbol=self.config['exchange']['pair_whitelist'][0])
|
2019-03-25 18:49:58 +00:00
|
|
|
|
|
|
|
if self.config.get('runmode') != RunMode.HYPEROPT:
|
|
|
|
self.dataprovider = DataProvider(self.config, self.exchange)
|
|
|
|
IStrategy.dp = self.dataprovider
|
2019-03-24 14:24:47 +00:00
|
|
|
|
2018-07-28 05:41:38 +00:00
|
|
|
if self.config.get('strategy_list', None):
|
2018-07-28 05:55:59 +00:00
|
|
|
for strat in list(self.config['strategy_list']):
|
2018-07-28 05:41:38 +00:00
|
|
|
stratconf = deepcopy(self.config)
|
|
|
|
stratconf['strategy'] = strat
|
2019-12-23 09:23:48 +00:00
|
|
|
self.strategylist.append(StrategyResolver.load_strategy(stratconf))
|
2019-11-23 14:49:46 +00:00
|
|
|
validate_config_consistency(stratconf)
|
2018-07-28 05:41:38 +00:00
|
|
|
|
|
|
|
else:
|
2019-06-09 23:08:54 +00:00
|
|
|
# No strategy list specified, only one strategy
|
2019-12-23 09:23:48 +00:00
|
|
|
self.strategylist.append(StrategyResolver.load_strategy(self.config))
|
2019-11-23 14:49:46 +00:00
|
|
|
validate_config_consistency(self.config)
|
2019-06-09 23:08:54 +00:00
|
|
|
|
2019-08-26 19:31:24 +00:00
|
|
|
if "ticker_interval" not in self.config:
|
2020-03-08 10:35:31 +00:00
|
|
|
raise OperationalException("Timeframe (ticker interval) needs to be set in either "
|
|
|
|
"configuration or as cli argument `--ticker-interval 5m`")
|
2019-11-02 19:26:26 +00:00
|
|
|
self.timeframe = str(self.config.get('ticker_interval'))
|
2019-12-11 06:12:37 +00:00
|
|
|
self.timeframe_min = timeframe_to_minutes(self.timeframe)
|
2019-08-26 19:31:24 +00:00
|
|
|
|
2019-10-20 11:56:01 +00:00
|
|
|
# Get maximum required startup period
|
|
|
|
self.required_startup = max([strat.startup_candle_count for strat in self.strategylist])
|
2019-06-09 23:08:54 +00:00
|
|
|
# Load one (first) strategy
|
2018-07-28 05:41:38 +00:00
|
|
|
self._set_strategy(self.strategylist[0])
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2018-07-28 05:00:58 +00:00
|
|
|
def _set_strategy(self, strategy):
|
2018-07-28 04:54:33 +00:00
|
|
|
"""
|
|
|
|
Load strategy into backtesting
|
|
|
|
"""
|
|
|
|
self.strategy = strategy
|
2019-03-06 18:55:34 +00:00
|
|
|
# Set stoploss_on_exchange to false for backtesting,
|
|
|
|
# since a "perfect" stoploss-sell is assumed anyway
|
|
|
|
# And the regular "stoploss" function would not apply to that case
|
|
|
|
self.strategy.order_types['stoploss_on_exchange'] = False
|
2018-07-28 04:54:33 +00:00
|
|
|
|
2020-03-15 14:04:48 +00:00
|
|
|
def load_bt_data(self) -> Tuple[Dict[str, DataFrame], TimeRange]:
|
2019-10-23 18:13:43 +00:00
|
|
|
timerange = TimeRange.parse_timerange(None if self.config.get(
|
|
|
|
'timerange') is None else str(self.config.get('timerange')))
|
|
|
|
|
|
|
|
data = history.load_data(
|
2019-12-23 18:32:31 +00:00
|
|
|
datadir=self.config['datadir'],
|
2019-10-23 18:13:43 +00:00
|
|
|
pairs=self.config['exchange']['pair_whitelist'],
|
2019-11-02 19:26:26 +00:00
|
|
|
timeframe=self.timeframe,
|
2019-10-23 18:13:43 +00:00
|
|
|
timerange=timerange,
|
|
|
|
startup_candles=self.required_startup,
|
|
|
|
fail_without_data=True,
|
2019-12-28 13:57:39 +00:00
|
|
|
data_format=self.config.get('dataformat_ohlcv', 'json'),
|
2019-10-23 18:13:43 +00:00
|
|
|
)
|
|
|
|
|
2019-12-17 22:06:03 +00:00
|
|
|
min_date, max_date = history.get_timerange(data)
|
2019-10-23 18:13:43 +00:00
|
|
|
|
|
|
|
logger.info(
|
|
|
|
'Loading data from %s up to %s (%s days)..',
|
|
|
|
min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
|
|
|
|
)
|
|
|
|
# Adjust startts forward if not enough data is available
|
2019-11-02 19:34:39 +00:00
|
|
|
timerange.adjust_start_if_necessary(timeframe_to_seconds(self.timeframe),
|
2019-10-23 18:13:43 +00:00
|
|
|
self.required_startup, min_date)
|
|
|
|
|
|
|
|
return data, timerange
|
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
def _get_ohlcv_as_lists(self, processed: Dict) -> Dict[str, DataFrame]:
|
2019-03-23 14:00:07 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Helper function to convert a processed dataframes into lists for performance reasons.
|
2019-03-23 14:00:07 +00:00
|
|
|
|
|
|
|
Used by backtest() - so keep this optimized for performance.
|
|
|
|
"""
|
|
|
|
headers = ['date', 'buy', 'open', 'close', 'sell', 'low', 'high']
|
2020-03-08 10:35:31 +00:00
|
|
|
data: Dict = {}
|
|
|
|
# Create dict with data
|
2019-03-23 14:00:07 +00:00
|
|
|
for pair, pair_data in processed.items():
|
2019-11-03 09:38:21 +00:00
|
|
|
pair_data.loc[:, 'buy'] = 0 # cleanup from previous run
|
|
|
|
pair_data.loc[:, 'sell'] = 0 # cleanup from previous run
|
2019-03-23 14:00:07 +00:00
|
|
|
|
2020-03-13 00:54:56 +00:00
|
|
|
df_analyzed = self.strategy.advise_sell(
|
2019-09-18 19:57:17 +00:00
|
|
|
self.strategy.advise_buy(pair_data, {'pair': pair}), {'pair': pair})[headers].copy()
|
2019-03-23 14:00:07 +00:00
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
# To avoid using data from future, we use buy/sell signals shifted
|
|
|
|
# from the previous candle
|
2020-03-13 00:54:56 +00:00
|
|
|
df_analyzed.loc[:, 'buy'] = df_analyzed['buy'].shift(1)
|
|
|
|
df_analyzed.loc[:, 'sell'] = df_analyzed['sell'].shift(1)
|
2019-03-23 14:00:07 +00:00
|
|
|
|
2020-03-13 00:54:56 +00:00
|
|
|
df_analyzed.drop(df_analyzed.head(1).index, inplace=True)
|
2019-03-23 14:00:07 +00:00
|
|
|
|
|
|
|
# Convert from Pandas to list for performance reasons
|
|
|
|
# (Looping Pandas is slow.)
|
2020-03-13 00:54:56 +00:00
|
|
|
data[pair] = [x for x in df_analyzed.itertuples()]
|
2020-03-08 10:35:31 +00:00
|
|
|
return data
|
2019-03-23 14:00:07 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def _get_close_rate(self, sell_row, trade: Trade, sell: SellCheckTuple,
|
|
|
|
trade_dur: int) -> float:
|
2019-12-07 14:28:56 +00:00
|
|
|
"""
|
|
|
|
Get close rate for backtesting result
|
|
|
|
"""
|
2019-12-07 13:30:14 +00:00
|
|
|
# Special handling if high or low hit STOP_LOSS or ROI
|
|
|
|
if sell.sell_type in (SellType.STOP_LOSS, SellType.TRAILING_STOP_LOSS):
|
|
|
|
# Set close_rate to stoploss
|
2019-12-07 14:28:56 +00:00
|
|
|
return trade.stop_loss
|
2019-12-07 13:30:14 +00:00
|
|
|
elif sell.sell_type == (SellType.ROI):
|
2019-12-07 14:18:12 +00:00
|
|
|
roi_entry, roi = self.strategy.min_roi_reached_entry(trade_dur)
|
2019-12-07 13:30:14 +00:00
|
|
|
if roi is not None:
|
2019-12-14 22:10:09 +00:00
|
|
|
if roi == -1 and roi_entry % self.timeframe_min == 0:
|
2019-12-09 15:52:12 +00:00
|
|
|
# When forceselling with ROI=-1, the roi time will always be equal to trade_dur.
|
|
|
|
# If that entry is a multiple of the timeframe (so on candle open)
|
2019-12-07 14:28:56 +00:00
|
|
|
# - we'll use open instead of close
|
|
|
|
return sell_row.open
|
|
|
|
|
2019-12-07 13:30:14 +00:00
|
|
|
# - (Expected abs profit + open_rate + open_fee) / (fee_close -1)
|
2019-12-09 15:52:12 +00:00
|
|
|
close_rate = - (trade.open_rate * roi + trade.open_rate *
|
|
|
|
(1 + trade.fee_open)) / (trade.fee_close - 1)
|
|
|
|
|
|
|
|
if (trade_dur > 0 and trade_dur == roi_entry
|
2019-12-14 22:10:09 +00:00
|
|
|
and roi_entry % self.timeframe_min == 0
|
2019-12-09 15:52:12 +00:00
|
|
|
and sell_row.open > close_rate):
|
|
|
|
# new ROI entry came into effect.
|
|
|
|
# use Open rate if open_rate > calculated sell rate
|
|
|
|
return sell_row.open
|
2019-12-07 13:30:14 +00:00
|
|
|
|
2019-12-09 15:52:12 +00:00
|
|
|
# Use the maximum between close_rate and low as we
|
2019-12-07 13:30:14 +00:00
|
|
|
# cannot sell outside of a candle.
|
2019-12-07 14:18:12 +00:00
|
|
|
# Applies when a new ROI setting comes in place and the whole candle is above that.
|
2019-12-09 15:52:12 +00:00
|
|
|
return max(close_rate, sell_row.low)
|
2019-12-07 14:18:12 +00:00
|
|
|
|
2019-12-07 13:30:14 +00:00
|
|
|
else:
|
|
|
|
# This should not be reached...
|
2019-12-07 14:28:56 +00:00
|
|
|
return sell_row.open
|
2019-12-07 13:30:14 +00:00
|
|
|
else:
|
2019-12-07 14:28:56 +00:00
|
|
|
return sell_row.open
|
2019-12-07 13:30:14 +00:00
|
|
|
|
2018-03-17 21:43:36 +00:00
|
|
|
def _get_sell_trade_entry(
|
|
|
|
self, pair: str, buy_row: DataFrame,
|
2020-03-08 10:35:31 +00:00
|
|
|
partial_ohlcv: List, trade_count_lock: Dict,
|
2019-06-10 14:20:19 +00:00
|
|
|
stake_amount: float, max_open_trades: int) -> Optional[BacktestResult]:
|
2018-03-17 21:43:36 +00:00
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
trade = Trade(
|
2019-09-10 07:42:45 +00:00
|
|
|
pair=pair,
|
2018-07-05 18:20:52 +00:00
|
|
|
open_rate=buy_row.open,
|
2018-03-04 00:42:37 +00:00
|
|
|
open_date=buy_row.date,
|
2018-02-09 07:35:38 +00:00
|
|
|
stake_amount=stake_amount,
|
2018-03-04 00:42:37 +00:00
|
|
|
amount=stake_amount / buy_row.open,
|
2018-06-22 18:04:07 +00:00
|
|
|
fee_open=self.fee,
|
2019-09-10 07:42:45 +00:00
|
|
|
fee_close=self.fee,
|
|
|
|
is_open=True,
|
2018-02-09 07:35:38 +00:00
|
|
|
)
|
2019-09-11 20:32:08 +00:00
|
|
|
logger.debug(f"{pair} - Backtesting emulates creation of new trade: {trade}.")
|
2018-02-09 07:35:38 +00:00
|
|
|
# calculate win/lose forwards from buy point
|
2020-03-08 10:35:31 +00:00
|
|
|
for sell_row in partial_ohlcv:
|
2018-02-09 07:35:38 +00:00
|
|
|
if max_open_trades > 0:
|
|
|
|
# Increase trade_count_lock for every iteration
|
2018-03-04 00:42:37 +00:00
|
|
|
trade_count_lock[sell_row.date] = trade_count_lock.get(sell_row.date, 0) + 1
|
|
|
|
|
2019-06-10 14:20:19 +00:00
|
|
|
sell = self.strategy.should_sell(trade, sell_row.open, sell_row.date, sell_row.buy,
|
2018-08-16 09:31:41 +00:00
|
|
|
sell_row.sell, low=sell_row.low, high=sell_row.high)
|
2018-07-12 20:21:52 +00:00
|
|
|
if sell.sell_flag:
|
2018-08-17 05:07:50 +00:00
|
|
|
trade_dur = int((sell_row.date - buy_row.date).total_seconds() // 60)
|
2019-12-07 13:30:14 +00:00
|
|
|
closerate = self._get_close_rate(sell_row, trade, sell, trade_dur)
|
2018-06-10 11:15:25 +00:00
|
|
|
|
|
|
|
return BacktestResult(pair=pair,
|
2019-12-17 07:53:30 +00:00
|
|
|
profit_percent=trade.calc_profit_ratio(rate=closerate),
|
2018-08-16 09:31:41 +00:00
|
|
|
profit_abs=trade.calc_profit(rate=closerate),
|
2018-06-10 11:15:25 +00:00
|
|
|
open_time=buy_row.date,
|
|
|
|
close_time=sell_row.date,
|
2018-08-17 05:07:50 +00:00
|
|
|
trade_duration=trade_dur,
|
2018-06-12 20:29:30 +00:00
|
|
|
open_index=buy_row.Index,
|
|
|
|
close_index=sell_row.Index,
|
2018-06-23 12:19:50 +00:00
|
|
|
open_at_end=False,
|
2018-07-05 18:20:52 +00:00
|
|
|
open_rate=buy_row.open,
|
2018-08-16 09:31:41 +00:00
|
|
|
close_rate=closerate,
|
2018-07-12 20:21:52 +00:00
|
|
|
sell_reason=sell.sell_type
|
2018-06-10 11:15:25 +00:00
|
|
|
)
|
2020-03-08 10:35:31 +00:00
|
|
|
if partial_ohlcv:
|
2018-06-09 19:44:20 +00:00
|
|
|
# no sell condition found - trade stil open at end of backtest period
|
2020-03-08 10:35:31 +00:00
|
|
|
sell_row = partial_ohlcv[-1]
|
2019-09-10 07:42:45 +00:00
|
|
|
bt_res = BacktestResult(pair=pair,
|
2019-12-17 07:53:30 +00:00
|
|
|
profit_percent=trade.calc_profit_ratio(rate=sell_row.open),
|
2019-09-10 07:42:45 +00:00
|
|
|
profit_abs=trade.calc_profit(rate=sell_row.open),
|
|
|
|
open_time=buy_row.date,
|
|
|
|
close_time=sell_row.date,
|
|
|
|
trade_duration=int((
|
|
|
|
sell_row.date - buy_row.date).total_seconds() // 60),
|
|
|
|
open_index=buy_row.Index,
|
|
|
|
close_index=sell_row.Index,
|
|
|
|
open_at_end=True,
|
|
|
|
open_rate=buy_row.open,
|
|
|
|
close_rate=sell_row.open,
|
|
|
|
sell_reason=SellType.FORCE_SELL
|
|
|
|
)
|
2019-09-11 20:32:08 +00:00
|
|
|
logger.debug(f"{pair} - Force selling still open trade, "
|
2019-09-10 07:42:45 +00:00
|
|
|
f"profit percent: {bt_res.profit_percent}, "
|
|
|
|
f"profit abs: {bt_res.profit_abs}")
|
|
|
|
|
|
|
|
return bt_res
|
2018-02-09 07:35:38 +00:00
|
|
|
return None
|
|
|
|
|
2020-01-25 11:48:13 +00:00
|
|
|
def backtest(self, processed: Dict, stake_amount: float,
|
2020-02-02 04:00:40 +00:00
|
|
|
start_date: arrow.Arrow, end_date: arrow.Arrow,
|
2019-12-13 23:12:16 +00:00
|
|
|
max_open_trades: int = 0, position_stacking: bool = False) -> DataFrame:
|
2018-02-09 07:35:38 +00:00
|
|
|
"""
|
2019-12-13 23:12:16 +00:00
|
|
|
Implement backtesting functionality
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
NOTE: This method is used by Hyperopt at each iteration. Please keep it optimized.
|
|
|
|
Of course try to not have ugly code. By some accessor are sometime slower than functions.
|
2019-12-13 23:12:16 +00:00
|
|
|
Avoid extensive logging in this method and functions it calls.
|
|
|
|
|
|
|
|
:param processed: a processed dictionary with format {pair, data}
|
|
|
|
:param stake_amount: amount to use for each trade
|
|
|
|
:param start_date: backtesting timerange start datetime
|
|
|
|
:param end_date: backtesting timerange end datetime
|
|
|
|
:param max_open_trades: maximum number of concurrent trades, <= 0 means unlimited
|
|
|
|
:param position_stacking: do we allow position stacking?
|
|
|
|
:return: DataFrame with trades (results of backtesting)
|
2018-02-09 07:35:38 +00:00
|
|
|
"""
|
2019-12-13 23:12:16 +00:00
|
|
|
logger.debug(f"Run backtest, stake_amount: {stake_amount}, "
|
|
|
|
f"start_date: {start_date}, end_date: {end_date}, "
|
|
|
|
f"max_open_trades: {max_open_trades}, position_stacking: {position_stacking}"
|
2020-01-25 11:48:13 +00:00
|
|
|
)
|
2018-02-09 07:35:38 +00:00
|
|
|
trades = []
|
2018-06-02 11:43:51 +00:00
|
|
|
trade_count_lock: Dict = {}
|
2018-06-07 07:21:07 +00:00
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
# Use dict of lists with data for performance
|
|
|
|
# (looping lists is a lot faster than pandas DataFrames)
|
|
|
|
data: Dict = self._get_ohlcv_as_lists(processed)
|
2018-10-15 20:02:23 +00:00
|
|
|
|
|
|
|
lock_pair_until: Dict = {}
|
2019-04-04 18:23:10 +00:00
|
|
|
# Indexes per pair, so some pairs are allowed to have a missing start.
|
2019-03-20 17:38:10 +00:00
|
|
|
indexes: Dict = {}
|
2019-12-11 06:12:37 +00:00
|
|
|
tmp = start_date + timedelta(minutes=self.timeframe_min)
|
2019-03-20 17:38:10 +00:00
|
|
|
|
2019-04-04 18:23:10 +00:00
|
|
|
# Loop timerange and get candle for each pair at that point in time
|
2018-10-15 20:02:23 +00:00
|
|
|
while tmp < end_date:
|
2019-03-20 17:38:10 +00:00
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
for i, pair in enumerate(data):
|
2019-03-20 17:38:10 +00:00
|
|
|
if pair not in indexes:
|
|
|
|
indexes[pair] = 0
|
|
|
|
|
2018-10-15 20:02:23 +00:00
|
|
|
try:
|
2020-03-08 10:35:31 +00:00
|
|
|
row = data[pair][indexes[pair]]
|
2018-10-15 20:02:23 +00:00
|
|
|
except IndexError:
|
2019-04-04 17:44:03 +00:00
|
|
|
# missing Data for one pair at the end.
|
2019-06-15 11:45:50 +00:00
|
|
|
# Warnings for this are shown during data loading
|
2018-10-15 20:02:23 +00:00
|
|
|
continue
|
2018-03-04 00:42:37 +00:00
|
|
|
|
2019-04-04 17:44:03 +00:00
|
|
|
# Waits until the time-counter reaches the start of the data for this pair.
|
2019-03-20 18:44:59 +00:00
|
|
|
if row.date > tmp.datetime:
|
2019-03-20 17:38:10 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
indexes[pair] += 1
|
|
|
|
|
2018-03-04 00:42:37 +00:00
|
|
|
if row.buy == 0 or row.sell == 1:
|
|
|
|
continue # skip rows where no buy signal or that would immediately sell off
|
|
|
|
|
2019-04-03 18:04:04 +00:00
|
|
|
if (not position_stacking and pair in lock_pair_until
|
|
|
|
and row.date <= lock_pair_until[pair]):
|
2019-04-04 17:44:03 +00:00
|
|
|
# without positionstacking, we can only have one open trade per pair.
|
2019-04-03 18:04:04 +00:00
|
|
|
continue
|
2019-04-04 17:44:03 +00:00
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
if max_open_trades > 0:
|
|
|
|
# Check if max_open_trades has already been reached for the given date
|
2018-03-04 00:42:37 +00:00
|
|
|
if not trade_count_lock.get(row.date, 0) < max_open_trades:
|
2018-02-09 07:35:38 +00:00
|
|
|
continue
|
2018-03-04 00:42:37 +00:00
|
|
|
trade_count_lock[row.date] = trade_count_lock.get(row.date, 0) + 1
|
2017-11-14 22:14:01 +00:00
|
|
|
|
2019-08-05 18:07:29 +00:00
|
|
|
# since indexes has been incremented before, we need to go one step back to
|
|
|
|
# also check the buying candle for sell conditions.
|
2020-03-08 10:35:31 +00:00
|
|
|
trade_entry = self._get_sell_trade_entry(pair, row, data[pair][indexes[pair]-1:],
|
2019-06-10 14:20:19 +00:00
|
|
|
trade_count_lock, stake_amount,
|
|
|
|
max_open_trades)
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2018-06-10 11:15:25 +00:00
|
|
|
if trade_entry:
|
2019-09-11 20:32:08 +00:00
|
|
|
logger.debug(f"{pair} - Locking pair till "
|
2019-09-10 07:42:45 +00:00
|
|
|
f"close_time={trade_entry.close_time}")
|
2018-10-15 20:02:23 +00:00
|
|
|
lock_pair_until[pair] = trade_entry.close_time
|
2018-02-09 07:35:38 +00:00
|
|
|
trades.append(trade_entry)
|
2018-06-09 19:44:20 +00:00
|
|
|
else:
|
|
|
|
# Set lock_pair_until to end of testing period if trade could not be closed
|
2019-03-20 18:44:59 +00:00
|
|
|
lock_pair_until[pair] = end_date.datetime
|
2018-06-09 19:44:20 +00:00
|
|
|
|
2019-04-04 17:44:03 +00:00
|
|
|
# Move time one configured time_interval ahead.
|
2019-12-11 06:12:37 +00:00
|
|
|
tmp += timedelta(minutes=self.timeframe_min)
|
2018-06-10 11:15:25 +00:00
|
|
|
return DataFrame.from_records(trades, columns=BacktestResult._fields)
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
def start(self) -> None:
|
|
|
|
"""
|
2019-12-13 23:12:16 +00:00
|
|
|
Run backtesting end-to-end
|
2018-02-09 07:35:38 +00:00
|
|
|
:return: None
|
|
|
|
"""
|
2018-08-19 17:39:22 +00:00
|
|
|
data: Dict[str, Any] = {}
|
2019-12-13 23:12:16 +00:00
|
|
|
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Using stake_currency: %s ...', self.config['stake_currency'])
|
|
|
|
logger.info('Using stake_amount: %s ...', self.config['stake_amount'])
|
2019-12-13 23:12:16 +00:00
|
|
|
|
2018-07-17 19:05:03 +00:00
|
|
|
# Use max_open_trades in backtesting, except --disable-max-market-positions is set
|
|
|
|
if self.config.get('use_max_market_positions', True):
|
2018-03-20 18:38:33 +00:00
|
|
|
max_open_trades = self.config['max_open_trades']
|
|
|
|
else:
|
2018-07-17 19:05:03 +00:00
|
|
|
logger.info('Ignoring max_open_trades (--disable-max-market-positions was used) ...')
|
2018-03-20 18:38:33 +00:00
|
|
|
max_open_trades = 0
|
2019-12-13 23:12:16 +00:00
|
|
|
position_stacking = self.config.get('position_stacking', False)
|
2018-03-20 18:38:33 +00:00
|
|
|
|
2019-10-23 18:13:43 +00:00
|
|
|
data, timerange = self.load_bt_data()
|
2019-06-14 17:37:54 +00:00
|
|
|
|
2019-10-23 18:13:43 +00:00
|
|
|
all_results = {}
|
2018-07-28 05:41:38 +00:00
|
|
|
for strat in self.strategylist:
|
2018-07-28 04:54:33 +00:00
|
|
|
logger.info("Running backtesting for Strategy %s", strat.get_strategy_name())
|
2018-07-28 05:00:58 +00:00
|
|
|
self._set_strategy(strat)
|
2018-07-27 21:01:52 +00:00
|
|
|
|
2018-12-31 14:09:50 +00:00
|
|
|
# need to reprocess data every time to populate signals
|
2020-03-08 10:35:31 +00:00
|
|
|
preprocessed = self.strategy.ohlcvdata_to_dataframe(data)
|
2018-03-20 18:38:33 +00:00
|
|
|
|
2019-10-20 11:56:01 +00:00
|
|
|
# Trim startup period from analyzed dataframe
|
|
|
|
for pair, df in preprocessed.items():
|
2019-12-25 14:47:04 +00:00
|
|
|
preprocessed[pair] = trim_dataframe(df, timerange)
|
2019-12-17 22:06:03 +00:00
|
|
|
min_date, max_date = history.get_timerange(preprocessed)
|
2019-10-20 11:56:01 +00:00
|
|
|
|
|
|
|
logger.info(
|
|
|
|
'Backtesting with data from %s up to %s (%s days)..',
|
|
|
|
min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
|
|
|
|
)
|
2018-07-27 21:01:52 +00:00
|
|
|
# Execute backtest and print results
|
2018-07-28 04:54:33 +00:00
|
|
|
all_results[self.strategy.get_strategy_name()] = self.backtest(
|
2020-01-31 03:39:18 +00:00
|
|
|
processed=preprocessed,
|
|
|
|
stake_amount=self.config['stake_amount'],
|
|
|
|
start_date=min_date,
|
|
|
|
end_date=max_date,
|
|
|
|
max_open_trades=max_open_trades,
|
|
|
|
position_stacking=position_stacking,
|
2018-07-27 21:01:52 +00:00
|
|
|
)
|
2020-03-15 14:36:23 +00:00
|
|
|
|
|
|
|
if self.config.get('export', False):
|
|
|
|
store_backtest_result(self.config['exportfilename'], all_results)
|
2020-03-15 14:17:53 +00:00
|
|
|
# Show backtest results
|
|
|
|
show_backtest_results(self.config, data, all_results)
|