2018-02-09 07:35:38 +00:00
|
|
|
# pragma pylint: disable=missing-docstring, W0212, too-many-arguments
|
2017-11-14 21:15:24 +00:00
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
"""
|
|
|
|
This module contains the backtesting logic
|
|
|
|
"""
|
2018-03-25 19:37:14 +00:00
|
|
|
import logging
|
2018-03-29 18:16:25 +00:00
|
|
|
import operator
|
2018-03-17 21:43:36 +00:00
|
|
|
from argparse import Namespace
|
2018-06-10 11:32:07 +00:00
|
|
|
from datetime import datetime
|
2018-06-10 11:15:25 +00:00
|
|
|
from typing import Dict, Tuple, Any, List, Optional, NamedTuple
|
2018-03-17 21:44:47 +00:00
|
|
|
|
2017-09-24 14:23:29 +00:00
|
|
|
import arrow
|
2018-03-29 18:16:25 +00:00
|
|
|
from pandas import DataFrame
|
2017-11-14 22:14:01 +00:00
|
|
|
from tabulate import tabulate
|
2017-09-28 21:26:28 +00:00
|
|
|
|
2018-01-10 07:51:36 +00:00
|
|
|
import freqtrade.optimize as optimize
|
2018-06-17 10:41:33 +00:00
|
|
|
from freqtrade.exchange import Exchange
|
2018-02-09 07:35:38 +00:00
|
|
|
from freqtrade.analyze import Analyze
|
2018-03-17 21:44:47 +00:00
|
|
|
from freqtrade.arguments import Arguments
|
|
|
|
from freqtrade.configuration import Configuration
|
2018-02-09 07:35:38 +00:00
|
|
|
from freqtrade.misc import file_dump_json
|
2017-09-28 21:26:28 +00:00
|
|
|
from freqtrade.persistence import Trade
|
2017-11-15 18:06:37 +00:00
|
|
|
|
2018-03-25 19:37:14 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-06-10 11:15:25 +00:00
|
|
|
class BacktestResult(NamedTuple):
|
|
|
|
"""
|
|
|
|
NamedTuple Defining BacktestResults inputs.
|
|
|
|
"""
|
|
|
|
pair: str
|
|
|
|
profit_percent: float
|
|
|
|
profit_abs: float
|
2018-06-10 11:32:07 +00:00
|
|
|
open_time: datetime
|
|
|
|
close_time: datetime
|
2018-06-10 18:52:42 +00:00
|
|
|
open_index: int
|
|
|
|
close_index: int
|
2018-06-10 11:15:25 +00:00
|
|
|
trade_duration: float
|
2018-06-10 11:37:53 +00:00
|
|
|
open_at_end: bool
|
2018-06-10 11:15:25 +00:00
|
|
|
|
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
class Backtesting(object):
|
2017-11-14 22:14:01 +00:00
|
|
|
"""
|
2018-02-09 07:35:38 +00:00
|
|
|
Backtesting class, this class contains all the logic to run a backtest
|
|
|
|
|
|
|
|
To run a backtest:
|
|
|
|
backtesting = Backtesting(config)
|
|
|
|
backtesting.start()
|
2017-11-14 22:14:01 +00:00
|
|
|
"""
|
2018-02-09 07:35:38 +00:00
|
|
|
def __init__(self, config: Dict[str, Any]) -> None:
|
|
|
|
self.config = config
|
|
|
|
self.analyze = Analyze(self.config)
|
|
|
|
self.ticker_interval = self.analyze.strategy.ticker_interval
|
|
|
|
self.tickerdata_to_dataframe = self.analyze.tickerdata_to_dataframe
|
|
|
|
self.populate_buy_trend = self.analyze.populate_buy_trend
|
|
|
|
self.populate_sell_trend = self.analyze.populate_sell_trend
|
2018-04-06 07:57:08 +00:00
|
|
|
|
|
|
|
# Reset keys for backtesting
|
2018-03-24 18:45:23 +00:00
|
|
|
self.config['exchange']['key'] = ''
|
|
|
|
self.config['exchange']['secret'] = ''
|
2018-04-10 15:20:27 +00:00
|
|
|
self.config['exchange']['password'] = ''
|
|
|
|
self.config['exchange']['uid'] = ''
|
|
|
|
self.config['dry_run'] = True
|
2018-06-17 10:41:33 +00:00
|
|
|
self.exchange = Exchange(self.config)
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
|
|
|
|
"""
|
|
|
|
Get the maximum timeframe for the given backtest data
|
|
|
|
:param data: dictionary with preprocessed backtesting data
|
|
|
|
:return: tuple containing min_date, max_date
|
|
|
|
"""
|
2018-03-29 18:16:25 +00:00
|
|
|
timeframe = [
|
|
|
|
(arrow.get(min(frame.date)), arrow.get(max(frame.date)))
|
|
|
|
for frame in data.values()
|
|
|
|
]
|
|
|
|
return min(timeframe, key=operator.itemgetter(0))[0], \
|
|
|
|
max(timeframe, key=operator.itemgetter(1))[1]
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
def _generate_text_table(self, data: Dict[str, Dict], results: DataFrame) -> str:
|
|
|
|
"""
|
|
|
|
Generates and returns a text table for the given backtest data and the results dataframe
|
|
|
|
:return: pretty printed table with tabulate as str
|
|
|
|
"""
|
2018-06-02 11:43:51 +00:00
|
|
|
stake_currency = str(self.config.get('stake_currency'))
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
floatfmt = ('s', 'd', '.2f', '.8f', '.1f')
|
|
|
|
tabular_data = []
|
|
|
|
headers = ['pair', 'buy count', 'avg profit %',
|
|
|
|
'total profit ' + stake_currency, 'avg duration', 'profit', 'loss']
|
|
|
|
for pair in data:
|
2018-06-10 11:15:25 +00:00
|
|
|
result = results[results.pair == pair]
|
2018-02-09 07:35:38 +00:00
|
|
|
tabular_data.append([
|
|
|
|
pair,
|
|
|
|
len(result.index),
|
|
|
|
result.profit_percent.mean() * 100.0,
|
2018-06-10 11:15:25 +00:00
|
|
|
result.profit_abs.sum(),
|
|
|
|
result.trade_duration.mean(),
|
|
|
|
len(result[result.profit_abs > 0]),
|
|
|
|
len(result[result.profit_abs < 0])
|
2018-02-09 07:35:38 +00:00
|
|
|
])
|
|
|
|
|
|
|
|
# Append Total
|
2017-11-14 22:14:01 +00:00
|
|
|
tabular_data.append([
|
2018-02-09 07:35:38 +00:00
|
|
|
'TOTAL',
|
|
|
|
len(results.index),
|
|
|
|
results.profit_percent.mean() * 100.0,
|
2018-06-10 11:15:25 +00:00
|
|
|
results.profit_abs.sum(),
|
|
|
|
results.trade_duration.mean(),
|
|
|
|
len(results[results.profit_abs > 0]),
|
|
|
|
len(results[results.profit_abs < 0])
|
2017-11-14 22:14:01 +00:00
|
|
|
])
|
2018-05-18 11:02:38 +00:00
|
|
|
return tabulate(tabular_data, headers=headers, floatfmt=floatfmt, tablefmt="pipe")
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2018-06-12 20:29:30 +00:00
|
|
|
def _store_backtest_result(self, recordfilename: Optional[str], results: DataFrame) -> None:
|
|
|
|
|
2018-06-13 17:53:12 +00:00
|
|
|
records = [(trade_entry.pair, trade_entry.profit_percent,
|
|
|
|
trade_entry.open_time.timestamp(),
|
|
|
|
trade_entry.close_time.timestamp(),
|
|
|
|
trade_entry.open_index - 1, trade_entry.trade_duration)
|
|
|
|
for index, trade_entry in results.iterrows()]
|
2018-06-12 20:29:30 +00:00
|
|
|
|
|
|
|
if records:
|
|
|
|
logger.info('Dumping backtest results to %s', recordfilename)
|
|
|
|
file_dump_json(recordfilename, records)
|
|
|
|
|
2018-03-17 21:43:36 +00:00
|
|
|
def _get_sell_trade_entry(
|
|
|
|
self, pair: str, buy_row: DataFrame,
|
2018-06-10 11:15:25 +00:00
|
|
|
partial_ticker: List, trade_count_lock: Dict, args: Dict) -> Optional[BacktestResult]:
|
2018-03-17 21:43:36 +00:00
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
stake_amount = args['stake_amount']
|
|
|
|
max_open_trades = args.get('max_open_trades', 0)
|
2018-06-17 10:41:33 +00:00
|
|
|
fee = self.exchange.get_fee()
|
2018-02-09 07:35:38 +00:00
|
|
|
trade = Trade(
|
2018-03-04 00:42:37 +00:00
|
|
|
open_rate=buy_row.close,
|
|
|
|
open_date=buy_row.date,
|
2018-02-09 07:35:38 +00:00
|
|
|
stake_amount=stake_amount,
|
2018-03-04 00:42:37 +00:00
|
|
|
amount=stake_amount / buy_row.open,
|
2018-04-21 18:05:49 +00:00
|
|
|
fee_open=fee,
|
|
|
|
fee_close=fee
|
2018-02-09 07:35:38 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
# calculate win/lose forwards from buy point
|
2018-03-04 00:42:37 +00:00
|
|
|
for sell_row in partial_ticker:
|
2018-02-09 07:35:38 +00:00
|
|
|
if max_open_trades > 0:
|
|
|
|
# Increase trade_count_lock for every iteration
|
2018-03-04 00:42:37 +00:00
|
|
|
trade_count_lock[sell_row.date] = trade_count_lock.get(sell_row.date, 0) + 1
|
|
|
|
|
|
|
|
buy_signal = sell_row.buy
|
|
|
|
if self.analyze.should_sell(trade, sell_row.close, sell_row.date, buy_signal,
|
|
|
|
sell_row.sell):
|
2018-06-10 11:15:25 +00:00
|
|
|
|
|
|
|
return BacktestResult(pair=pair,
|
|
|
|
profit_percent=trade.calc_profit_percent(rate=sell_row.close),
|
|
|
|
profit_abs=trade.calc_profit(rate=sell_row.close),
|
|
|
|
open_time=buy_row.date,
|
|
|
|
close_time=sell_row.date,
|
2018-06-10 11:37:53 +00:00
|
|
|
trade_duration=(sell_row.date - buy_row.date).seconds // 60,
|
2018-06-12 20:29:30 +00:00
|
|
|
open_index=buy_row.Index,
|
|
|
|
close_index=sell_row.Index,
|
2018-06-10 11:37:53 +00:00
|
|
|
open_at_end=False
|
2018-06-10 11:15:25 +00:00
|
|
|
)
|
2018-06-09 19:44:20 +00:00
|
|
|
if partial_ticker:
|
|
|
|
# no sell condition found - trade stil open at end of backtest period
|
|
|
|
sell_row = partial_ticker[-1]
|
2018-06-10 11:15:25 +00:00
|
|
|
btr = BacktestResult(pair=pair,
|
|
|
|
profit_percent=trade.calc_profit_percent(rate=sell_row.close),
|
|
|
|
profit_abs=trade.calc_profit(rate=sell_row.close),
|
|
|
|
open_time=buy_row.date,
|
|
|
|
close_time=sell_row.date,
|
2018-06-10 11:37:53 +00:00
|
|
|
trade_duration=(sell_row.date - buy_row.date).seconds // 60,
|
2018-06-12 20:29:30 +00:00
|
|
|
open_index=buy_row.Index,
|
|
|
|
close_index=sell_row.Index,
|
2018-06-10 11:37:53 +00:00
|
|
|
open_at_end=True
|
2018-06-10 11:15:25 +00:00
|
|
|
)
|
2018-06-13 17:43:33 +00:00
|
|
|
logger.debug('Force_selling still open trade %s with %s perc - %s', btr.pair,
|
|
|
|
btr.profit_percent, btr.profit_abs)
|
2018-06-10 11:15:25 +00:00
|
|
|
return btr
|
2018-02-09 07:35:38 +00:00
|
|
|
return None
|
|
|
|
|
2018-03-17 21:43:36 +00:00
|
|
|
def backtest(self, args: Dict) -> DataFrame:
|
2018-02-09 07:35:38 +00:00
|
|
|
"""
|
|
|
|
Implements backtesting functionality
|
|
|
|
|
|
|
|
NOTE: This method is used by Hyperopt at each iteration. Please keep it optimized.
|
|
|
|
Of course try to not have ugly code. By some accessor are sometime slower than functions.
|
|
|
|
Avoid, logging on this method
|
|
|
|
|
|
|
|
:param args: a dict containing:
|
|
|
|
stake_amount: btc amount to use for each trade
|
|
|
|
processed: a processed dictionary with format {pair, data}
|
|
|
|
max_open_trades: maximum number of concurrent trades (default: 0, disabled)
|
|
|
|
realistic: do we try to simulate realistic trades? (default: True)
|
|
|
|
:return: DataFrame
|
|
|
|
"""
|
2018-03-04 00:42:37 +00:00
|
|
|
headers = ['date', 'buy', 'open', 'close', 'sell']
|
2018-02-09 07:35:38 +00:00
|
|
|
processed = args['processed']
|
|
|
|
max_open_trades = args.get('max_open_trades', 0)
|
2018-03-04 09:33:39 +00:00
|
|
|
realistic = args.get('realistic', False)
|
2018-02-09 07:35:38 +00:00
|
|
|
trades = []
|
2018-06-02 11:43:51 +00:00
|
|
|
trade_count_lock: Dict = {}
|
2018-02-09 07:35:38 +00:00
|
|
|
for pair, pair_data in processed.items():
|
2018-03-04 00:42:37 +00:00
|
|
|
pair_data['buy'], pair_data['sell'] = 0, 0 # cleanup from previous run
|
|
|
|
|
2018-06-07 07:21:07 +00:00
|
|
|
ticker_data = self.populate_sell_trend(
|
|
|
|
self.populate_buy_trend(pair_data))[headers].copy()
|
|
|
|
|
|
|
|
# to avoid using data from future, we buy/sell with signal from previous candle
|
|
|
|
ticker_data.loc[:, 'buy'] = ticker_data['buy'].shift(1)
|
|
|
|
ticker_data.loc[:, 'sell'] = ticker_data['sell'].shift(1)
|
|
|
|
|
|
|
|
ticker_data.drop(ticker_data.head(1).index, inplace=True)
|
2018-06-06 10:56:08 +00:00
|
|
|
|
2018-06-10 06:58:28 +00:00
|
|
|
# Convert from Pandas to list for performance reasons
|
|
|
|
# (Looping Pandas is slow.)
|
2018-03-04 00:42:37 +00:00
|
|
|
ticker = [x for x in ticker_data.itertuples()]
|
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
lock_pair_until = None
|
2018-03-04 00:42:37 +00:00
|
|
|
for index, row in enumerate(ticker):
|
|
|
|
if row.buy == 0 or row.sell == 1:
|
|
|
|
continue # skip rows where no buy signal or that would immediately sell off
|
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
if realistic:
|
2018-03-04 00:42:37 +00:00
|
|
|
if lock_pair_until is not None and row.date <= lock_pair_until:
|
2018-02-09 07:35:38 +00:00
|
|
|
continue
|
|
|
|
if max_open_trades > 0:
|
|
|
|
# Check if max_open_trades has already been reached for the given date
|
2018-03-04 00:42:37 +00:00
|
|
|
if not trade_count_lock.get(row.date, 0) < max_open_trades:
|
2018-02-09 07:35:38 +00:00
|
|
|
continue
|
|
|
|
|
2018-03-04 00:42:37 +00:00
|
|
|
trade_count_lock[row.date] = trade_count_lock.get(row.date, 0) + 1
|
2017-11-14 22:14:01 +00:00
|
|
|
|
2018-06-10 11:15:25 +00:00
|
|
|
trade_entry = self._get_sell_trade_entry(pair, row, ticker[index + 1:],
|
|
|
|
trade_count_lock, args)
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2018-06-10 11:15:25 +00:00
|
|
|
if trade_entry:
|
|
|
|
lock_pair_until = trade_entry.close_time
|
2018-02-09 07:35:38 +00:00
|
|
|
trades.append(trade_entry)
|
2018-06-09 19:44:20 +00:00
|
|
|
else:
|
|
|
|
# Set lock_pair_until to end of testing period if trade could not be closed
|
|
|
|
# This happens only if the buy-signal was with the last candle
|
|
|
|
lock_pair_until = ticker_data.iloc[-1].date
|
|
|
|
|
2018-06-10 11:15:25 +00:00
|
|
|
return DataFrame.from_records(trades, columns=BacktestResult._fields)
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
def start(self) -> None:
|
|
|
|
"""
|
|
|
|
Run a backtesting end-to-end
|
|
|
|
:return: None
|
|
|
|
"""
|
|
|
|
data = {}
|
|
|
|
pairs = self.config['exchange']['pair_whitelist']
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Using stake_currency: %s ...', self.config['stake_currency'])
|
|
|
|
logger.info('Using stake_amount: %s ...', self.config['stake_amount'])
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
if self.config.get('live'):
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Downloading data for all pairs in whitelist ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
for pair in pairs:
|
2018-06-17 10:41:33 +00:00
|
|
|
data[pair] = self.exchange.get_ticker_history(pair, self.ticker_interval)
|
2018-02-09 07:35:38 +00:00
|
|
|
else:
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Using local backtesting data (using whitelist in given config) ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2018-06-02 12:07:54 +00:00
|
|
|
timerange = Arguments.parse_timerange(None if self.config.get(
|
|
|
|
'timerange') is None else str(self.config.get('timerange')))
|
2018-06-05 21:34:26 +00:00
|
|
|
data = optimize.load_data(
|
2018-02-09 07:35:38 +00:00
|
|
|
self.config['datadir'],
|
|
|
|
pairs=pairs,
|
|
|
|
ticker_interval=self.ticker_interval,
|
|
|
|
refresh_pairs=self.config.get('refresh_pairs', False),
|
2018-06-17 20:11:32 +00:00
|
|
|
exchange=self.exchange,
|
2018-02-09 07:35:38 +00:00
|
|
|
timerange=timerange
|
|
|
|
)
|
|
|
|
|
2018-06-11 17:50:43 +00:00
|
|
|
if not data:
|
|
|
|
logger.critical("No data found. Terminating.")
|
|
|
|
return
|
2018-03-20 18:38:33 +00:00
|
|
|
# Ignore max_open_trades in backtesting, except realistic flag was passed
|
|
|
|
if self.config.get('realistic_simulation', False):
|
|
|
|
max_open_trades = self.config['max_open_trades']
|
|
|
|
else:
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info('Ignoring max_open_trades (realistic_simulation not set) ...')
|
2018-03-20 18:38:33 +00:00
|
|
|
max_open_trades = 0
|
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
preprocessed = self.tickerdata_to_dataframe(data)
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
# Print timeframe
|
|
|
|
min_date, max_date = self.get_timeframe(preprocessed)
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info(
|
2018-02-09 07:35:38 +00:00
|
|
|
'Measuring data from %s up to %s (%s days)..',
|
|
|
|
min_date.isoformat(),
|
|
|
|
max_date.isoformat(),
|
|
|
|
(max_date - min_date).days
|
|
|
|
)
|
|
|
|
|
|
|
|
# Execute backtest and print results
|
|
|
|
results = self.backtest(
|
|
|
|
{
|
|
|
|
'stake_amount': self.config.get('stake_amount'),
|
|
|
|
'processed': preprocessed,
|
|
|
|
'max_open_trades': max_open_trades,
|
|
|
|
'realistic': self.config.get('realistic_simulation', False),
|
|
|
|
}
|
|
|
|
)
|
2018-06-12 20:29:30 +00:00
|
|
|
|
|
|
|
if self.config.get('export', False):
|
|
|
|
self._store_backtest_result(self.config.get('exportfilename'), results)
|
|
|
|
|
2018-03-25 19:37:14 +00:00
|
|
|
logger.info(
|
2018-06-13 04:57:49 +00:00
|
|
|
'\n======================================== '
|
2018-02-09 07:35:38 +00:00
|
|
|
'BACKTESTING REPORT'
|
2018-06-13 04:57:49 +00:00
|
|
|
' =========================================\n'
|
2018-02-09 07:35:38 +00:00
|
|
|
'%s',
|
|
|
|
self._generate_text_table(
|
|
|
|
data,
|
|
|
|
results
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-06-10 11:45:16 +00:00
|
|
|
logger.info(
|
2018-06-13 04:57:49 +00:00
|
|
|
'\n====================================== '
|
2018-06-10 11:45:16 +00:00
|
|
|
'LEFT OPEN TRADES REPORT'
|
2018-06-13 04:57:49 +00:00
|
|
|
' ======================================\n'
|
2018-06-10 11:45:16 +00:00
|
|
|
'%s',
|
|
|
|
self._generate_text_table(
|
|
|
|
data,
|
2018-06-10 11:55:48 +00:00
|
|
|
results.loc[results.open_at_end]
|
2018-06-10 11:45:16 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2018-03-17 21:43:36 +00:00
|
|
|
def setup_configuration(args: Namespace) -> Dict[str, Any]:
|
2017-11-22 23:25:06 +00:00
|
|
|
"""
|
2018-02-09 07:35:38 +00:00
|
|
|
Prepare the configuration for the backtesting
|
|
|
|
:param args: Cli args from Arguments()
|
|
|
|
:return: Configuration
|
|
|
|
"""
|
|
|
|
configuration = Configuration(args)
|
|
|
|
config = configuration.get_config()
|
|
|
|
|
|
|
|
# Ensure we do not use Exchange credentials
|
|
|
|
config['exchange']['key'] = ''
|
|
|
|
config['exchange']['secret'] = ''
|
|
|
|
|
|
|
|
return config
|
|
|
|
|
|
|
|
|
2018-03-17 21:43:36 +00:00
|
|
|
def start(args: Namespace) -> None:
|
2018-02-09 07:35:38 +00:00
|
|
|
"""
|
|
|
|
Start Backtesting script
|
|
|
|
:param args: Cli args from Arguments()
|
|
|
|
:return: None
|
2017-11-22 23:25:06 +00:00
|
|
|
"""
|
2018-02-09 07:35:38 +00:00
|
|
|
# Initialize configuration
|
|
|
|
config = setup_configuration(args)
|
2018-03-25 19:41:25 +00:00
|
|
|
logger.info('Starting freqtrade in Backtesting mode')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
|
|
|
# Initialize backtesting object
|
|
|
|
backtesting = Backtesting(config)
|
|
|
|
backtesting.start()
|