stable/freqtrade/optimize/backtesting.py

211 lines
8.2 KiB
Python
Raw Normal View History

# pragma pylint: disable=missing-docstring,W0212
2017-11-14 21:15:24 +00:00
2017-11-17 16:54:31 +00:00
import logging
2017-11-14 21:15:24 +00:00
from typing import Tuple, Dict
2017-09-28 21:26:28 +00:00
import arrow
2018-01-02 19:32:11 +00:00
from pandas import DataFrame, Series
from tabulate import tabulate
2017-09-28 21:26:28 +00:00
from freqtrade import exchange
from freqtrade.analyze import populate_buy_trend, populate_sell_trend
from freqtrade.exchange import Bittrex
2017-11-16 05:49:06 +00:00
from freqtrade.main import min_roi_reached
2018-01-05 09:23:12 +00:00
import freqtrade.misc as misc
from freqtrade.optimize import preprocess
import freqtrade.optimize as optimize
2017-09-28 21:26:28 +00:00
from freqtrade.persistence import Trade
logger = logging.getLogger(__name__)
2018-01-02 19:32:11 +00:00
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
"""
Get the maximum timeframe for the given backtest data
2018-01-02 19:32:11 +00:00
:param data: dictionary with preprocessed backtesting data
:return: tuple containing min_date, max_date
"""
2018-01-02 19:32:11 +00:00
all_dates = Series([])
for pair, pair_data in data.items():
all_dates = all_dates.append(pair_data['date'])
all_dates.sort_values(inplace=True)
return arrow.get(all_dates.iloc[0]), arrow.get(all_dates.iloc[-1])
2017-11-14 21:15:24 +00:00
2017-12-16 02:39:47 +00:00
def generate_text_table(
data: Dict[str, Dict], results: DataFrame, stake_currency, ticker_interval) -> str:
"""
Generates and returns a text table for the given backtest data and the results dataframe
:return: pretty printed table with tabulate as str
"""
2018-01-02 14:40:33 +00:00
floatfmt = ('s', 'd', '.2f', '.8f', '.1f')
tabular_data = []
2018-01-02 14:40:33 +00:00
headers = ['pair', 'buy count', 'avg profit %',
2018-01-03 10:35:54 +00:00
'total profit ' + stake_currency, 'avg duration', 'profit', 'loss']
for pair in data:
result = results[results.currency == pair]
tabular_data.append([
pair,
len(result.index),
2018-01-02 14:40:33 +00:00
result.profit_percent.mean() * 100.0,
result.profit_BTC.sum(),
result.duration.mean() * ticker_interval,
result.profit.sum(),
result.loss.sum()
])
# Append Total
tabular_data.append([
'TOTAL',
len(results.index),
2018-01-02 14:40:33 +00:00
results.profit_percent.mean() * 100.0,
results.profit_BTC.sum(),
results.duration.mean() * ticker_interval,
results.profit.sum(),
2018-01-03 10:35:54 +00:00
results.loss.sum()
])
2018-01-02 14:40:33 +00:00
return tabulate(tabular_data, headers=headers, floatfmt=floatfmt)
2018-01-11 16:45:41 +00:00
def get_trade_entry(pair, row, ticker, trade_count_lock, args):
stake_amount = args['stake_amount']
max_open_trades = args.get('max_open_trades', 0)
sell_profit_only = args.get('sell_profit_only', False)
stoploss = args.get('stoploss', -1)
use_sell_signal = args.get('use_sell_signal', False)
trade = Trade(open_rate=row.close,
open_date=row.date,
stake_amount=stake_amount,
amount=stake_amount / row.open,
fee=exchange.get_fee()
)
# calculate win/lose forwards from buy point
sell_subset = ticker[row.Index + 1:][['close', 'date', 'sell']]
for row2 in sell_subset.itertuples(index=True):
if max_open_trades > 0:
# Increase trade_count_lock for every iteration
trade_count_lock[row2.date] = trade_count_lock.get(row2.date, 0) + 1
current_profit_percent = trade.calc_profit_percent(rate=row2.close)
if (sell_profit_only and current_profit_percent < 0):
continue
if min_roi_reached(trade, row2.close, row2.date) or \
(row2.sell == 1 and use_sell_signal) or \
current_profit_percent <= stoploss:
current_profit_btc = trade.calc_profit(rate=row2.close)
return row2.Index, (pair,
current_profit_percent,
current_profit_btc,
row2.Index - row.Index,
current_profit_btc > 0,
current_profit_btc < 0
)
def backtest(args) -> DataFrame:
2017-11-22 23:25:06 +00:00
"""
Implements backtesting functionality
2018-01-11 16:45:41 +00:00
:param args: a dict containing:
stake_amount: btc amount to use for each trade
processed: a processed dictionary with format {pair, data}
max_open_trades: maximum number of concurrent trades (default: 0, disabled)
realistic: do we try to simulate realistic trades? (default: True)
sell_profit_only: sell if profit only
use_sell_signal: act on sell-signal
stoploss: use stoploss
2017-11-22 23:25:06 +00:00
:return: DataFrame
"""
2018-01-11 16:45:41 +00:00
processed = args['processed']
max_open_trades = args.get('max_open_trades', 0)
realistic = args.get('realistic', True)
trades = []
trade_count_lock: dict = {}
exchange._API = Bittrex({'key': '', 'secret': ''})
for pair, pair_data in processed.items():
pair_data['buy'], pair_data['sell'] = 0, 0
ticker = populate_sell_trend(populate_buy_trend(pair_data))
# for each buy point
lock_pair_until = None
buy_subset = ticker[ticker.buy == 1][['buy', 'open', 'close', 'date', 'sell']]
for row in buy_subset.itertuples(index=True):
if realistic:
if lock_pair_until is not None and row.Index <= lock_pair_until:
continue
2017-11-22 23:25:06 +00:00
if max_open_trades > 0:
# Check if max_open_trades has already been reached for the given date
if not trade_count_lock.get(row.date, 0) < max_open_trades:
continue
if max_open_trades > 0:
# Increase lock
trade_count_lock[row.date] = trade_count_lock.get(row.date, 0) + 1
2018-01-11 16:45:41 +00:00
ret = get_trade_entry(pair, row, ticker,
trade_count_lock, args)
if ret:
lock_pair_until, trade_entry = ret
trades.append(trade_entry)
2018-01-03 10:35:54 +00:00
labels = ['currency', 'profit_percent', 'profit_BTC', 'duration', 'profit', 'loss']
2017-11-17 06:02:06 +00:00
return DataFrame.from_records(trades, columns=labels)
2017-10-30 23:36:35 +00:00
def start(args):
# Initialize logger
logging.basicConfig(
level=args.loglevel,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
)
2017-11-17 16:54:31 +00:00
exchange._API = Bittrex({'key': '', 'secret': ''})
2017-11-14 21:15:24 +00:00
logger.info('Using config: %s ...', args.config)
2018-01-05 09:23:12 +00:00
config = misc.load_config(args.config)
2017-11-14 21:15:24 +00:00
logger.info('Using ticker_interval: %s ...', args.ticker_interval)
2017-11-14 21:37:30 +00:00
data = {}
pairs = config['exchange']['pair_whitelist']
if args.live:
logger.info('Downloading data for all pairs in whitelist ...')
for pair in pairs:
data[pair] = exchange.get_ticker_history(pair, args.ticker_interval)
else:
logger.info('Using local backtesting data (using whitelist in given config) ...')
data = optimize.load_data(args.datadir, pairs=pairs, ticker_interval=args.ticker_interval,
2018-01-05 09:23:12 +00:00
refresh_pairs=args.refresh_pairs)
2017-11-14 21:15:24 +00:00
logger.info('Using stake_currency: %s ...', config['stake_currency'])
logger.info('Using stake_amount: %s ...', config['stake_amount'])
2017-11-14 22:46:48 +00:00
max_open_trades = 0
if args.realistic_simulation:
logger.info('Using max_open_trades: %s ...', config['max_open_trades'])
max_open_trades = config['max_open_trades']
2017-11-25 00:12:44 +00:00
# Monkey patch config
from freqtrade import main
main._CONF = config
2018-01-02 19:32:11 +00:00
preprocessed = preprocess(data)
# Print timeframe
min_date, max_date = get_timeframe(preprocessed)
logger.info('Measuring data from %s up to %s ...', min_date.isoformat(), max_date.isoformat())
2017-11-17 16:54:31 +00:00
# Execute backtest and print results
2018-01-11 16:45:41 +00:00
sell_profit_only = config.get('experimental', {}).get('sell_profit_only', False)
use_sell_signal = config.get('experimental', {}).get('use_sell_signal', False)
results = backtest({'stake_amount': config['stake_amount'],
'processed': preprocessed,
'max_open_trades': max_open_trades,
'realistic': args.realistic_simulation,
'sell_profit_only': sell_profit_only,
'use_sell_signal': use_sell_signal,
'stoploss': config.get('stoploss')
})
logger.info(
2018-01-05 07:12:55 +00:00
'\n==================================== BACKTESTING REPORT ====================================\n%s', # noqa
generate_text_table(data, results, config['stake_currency'], args.ticker_interval)
)