fixed most tests and moved AWS related stuff out
This commit is contained in:
@@ -4,44 +4,45 @@ import gzip
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from typing import Optional, List, Dict, Tuple, Any
|
||||
import arrow
|
||||
from typing import Optional, List, Dict, Tuple
|
||||
|
||||
from freqtrade import misc, constants
|
||||
from freqtrade.exchange import get_ticker_history
|
||||
from freqtrade.arguments import TimeRange
|
||||
|
||||
from user_data.hyperopt_conf import hyperopt_optimize_conf
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def trim_tickerlist(tickerlist: List[Dict], timerange: Tuple[Tuple, int, int]) -> List[Dict]:
|
||||
def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
|
||||
if not tickerlist:
|
||||
return tickerlist
|
||||
|
||||
stype, start, stop = timerange
|
||||
|
||||
start_index = 0
|
||||
stop_index = len(tickerlist)
|
||||
|
||||
if stype[0] == 'line':
|
||||
stop_index = start
|
||||
if stype[0] == 'index':
|
||||
start_index = start
|
||||
elif stype[0] == 'date':
|
||||
while tickerlist[start_index][0] < start * 1000:
|
||||
if timerange.starttype == 'line':
|
||||
stop_index = timerange.startts
|
||||
if timerange.starttype == 'index':
|
||||
start_index = timerange.startts
|
||||
elif timerange.starttype == 'date':
|
||||
while (start_index < len(tickerlist) and
|
||||
tickerlist[start_index][0] < timerange.startts * 1000):
|
||||
start_index += 1
|
||||
|
||||
if stype[1] == 'line':
|
||||
start_index = len(tickerlist) + stop
|
||||
if stype[1] == 'index':
|
||||
stop_index = stop
|
||||
elif stype[1] == 'date':
|
||||
while tickerlist[stop_index-1][0] > stop * 1000:
|
||||
if timerange.stoptype == 'line':
|
||||
start_index = len(tickerlist) + timerange.stopts
|
||||
if timerange.stoptype == 'index':
|
||||
stop_index = timerange.stopts
|
||||
elif timerange.stoptype == 'date':
|
||||
while (stop_index > 0 and
|
||||
tickerlist[stop_index-1][0] > timerange.stopts * 1000):
|
||||
stop_index -= 1
|
||||
|
||||
if start_index > stop_index:
|
||||
raise ValueError(f'The timerange [{start},{stop}] is incorrect')
|
||||
raise ValueError(f'The timerange [{timerange.startts},{timerange.stopts}] is incorrect')
|
||||
|
||||
return tickerlist[start_index:stop_index]
|
||||
|
||||
@@ -49,7 +50,7 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: Tuple[Tuple, int, int]) -
|
||||
def load_tickerdata_file(
|
||||
datadir: str, pair: str,
|
||||
ticker_interval: str,
|
||||
timerange: Optional[Tuple[Tuple, int, int]] = None) -> Optional[List[Dict]]:
|
||||
timerange: Optional[TimeRange] = None) -> Optional[List[Dict]]:
|
||||
"""
|
||||
Load a pair from file,
|
||||
:return dict OR empty if unsuccesful
|
||||
@@ -84,7 +85,7 @@ def load_data(datadir: str,
|
||||
ticker_interval: str,
|
||||
pairs: Optional[List[str]] = None,
|
||||
refresh_pairs: Optional[bool] = False,
|
||||
timerange: Optional[Tuple[Tuple, int, int]] = None) -> Dict[str, List]:
|
||||
timerange: TimeRange = TimeRange(None, None, 0, 0)) -> Dict[str, List]:
|
||||
"""
|
||||
Loads ticker history data for the given parameters
|
||||
:return: dict
|
||||
@@ -100,15 +101,16 @@ def load_data(datadir: str,
|
||||
|
||||
for pair in _pairs:
|
||||
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)
|
||||
if not pairdata:
|
||||
# download the tickerdata from exchange
|
||||
download_backtesting_testdata(datadir,
|
||||
pair=pair,
|
||||
tick_interval=ticker_interval,
|
||||
timerange=timerange)
|
||||
# and retry reading the pair
|
||||
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)
|
||||
result[pair] = pairdata
|
||||
if pairdata:
|
||||
result[pair] = pairdata
|
||||
else:
|
||||
logger.warning(
|
||||
'No data for pair: "%s", Interval: %s. '
|
||||
'Use --refresh-pairs-cached to download the data',
|
||||
pair,
|
||||
ticker_interval
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -123,7 +125,7 @@ def make_testdata_path(datadir: str) -> str:
|
||||
|
||||
def download_pairs(datadir, pairs: List[str],
|
||||
ticker_interval: str,
|
||||
timerange: Optional[Tuple[Tuple, int, int]] = None) -> bool:
|
||||
timerange: TimeRange = TimeRange(None, None, 0, 0)) -> bool:
|
||||
"""For each pairs passed in parameters, download the ticker intervals"""
|
||||
for pair in pairs:
|
||||
try:
|
||||
@@ -143,7 +145,9 @@ def download_pairs(datadir, pairs: List[str],
|
||||
|
||||
def load_cached_data_for_updating(filename: str,
|
||||
tick_interval: str,
|
||||
timerange: Optional[Tuple[Tuple, int, int]]) -> Tuple[list, int]:
|
||||
timerange: Optional[TimeRange]) -> Tuple[
|
||||
List[Any],
|
||||
Optional[int]]:
|
||||
"""
|
||||
Load cached data and choose what part of the data should be updated
|
||||
"""
|
||||
@@ -152,10 +156,10 @@ def load_cached_data_for_updating(filename: str,
|
||||
|
||||
# user sets timerange, so find the start time
|
||||
if timerange:
|
||||
if timerange[0][0] == 'date':
|
||||
since_ms = timerange[1] * 1000
|
||||
elif timerange[0][1] == 'line':
|
||||
num_minutes = timerange[2] * constants.TICKER_INTERVAL_MINUTES[tick_interval]
|
||||
if timerange.starttype == 'date':
|
||||
since_ms = timerange.startts * 1000
|
||||
elif timerange.stoptype == 'line':
|
||||
num_minutes = timerange.stopts * constants.TICKER_INTERVAL_MINUTES[tick_interval]
|
||||
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
||||
|
||||
# read the cached file
|
||||
@@ -185,7 +189,7 @@ def load_cached_data_for_updating(filename: str,
|
||||
def download_backtesting_testdata(datadir: str,
|
||||
pair: str,
|
||||
tick_interval: str = '5m',
|
||||
timerange: Optional[Tuple[Tuple, int, int]] = None) -> None:
|
||||
timerange: Optional[TimeRange] = None) -> None:
|
||||
|
||||
"""
|
||||
Download the latest ticker intervals from the exchange for the pairs passed in parameters
|
||||
|
||||
@@ -34,18 +34,6 @@ class Backtesting(object):
|
||||
|
||||
def __init__(self, config: Dict[str, Any]) -> None:
|
||||
self.config = config
|
||||
self.analyze = None
|
||||
self.ticker_interval = None
|
||||
self.tickerdata_to_dataframe = None
|
||||
self.populate_buy_trend = None
|
||||
self.populate_sell_trend = None
|
||||
self._init()
|
||||
|
||||
def _init(self) -> None:
|
||||
"""
|
||||
Init objects required for backtesting
|
||||
:return: None
|
||||
"""
|
||||
self.analyze = Analyze(self.config)
|
||||
self.ticker_interval = self.analyze.strategy.ticker_interval
|
||||
self.tickerdata_to_dataframe = self.analyze.tickerdata_to_dataframe
|
||||
@@ -79,9 +67,9 @@ class Backtesting(object):
|
||||
Generates and returns a text table for the given backtest data and the results dataframe
|
||||
:return: pretty printed table with tabulate as str
|
||||
"""
|
||||
floatfmt, headers, tabular_data = self.aggregate(data, results)
|
||||
|
||||
return tabulate(tabular_data, headers=headers, floatfmt=floatfmt)
|
||||
floatfmt, headers, tabular_data = self.aggregate(data, results)
|
||||
return tabulate(tabular_data, headers=headers, floatfmt=floatfmt, tablefmt="pipe")
|
||||
|
||||
def aggregate(self, data, results):
|
||||
stake_currency = self.config.get('stake_currency')
|
||||
@@ -91,6 +79,7 @@ class Backtesting(object):
|
||||
'total profit ' + stake_currency, 'avg duration', 'profit', 'loss']
|
||||
for pair in data:
|
||||
result = results[results.currency == pair]
|
||||
print(results)
|
||||
tabular_data.append([
|
||||
pair,
|
||||
len(result.index),
|
||||
@@ -174,13 +163,22 @@ class Backtesting(object):
|
||||
max_open_trades = args.get('max_open_trades', 0)
|
||||
realistic = args.get('realistic', False)
|
||||
record = args.get('record', None)
|
||||
recordfilename = args.get('recordfn', 'backtest-result.json')
|
||||
records = []
|
||||
trades = []
|
||||
trade_count_lock = {}
|
||||
trade_count_lock: Dict = {}
|
||||
for pair, pair_data in processed.items():
|
||||
pair_data['buy'], pair_data['sell'] = 0, 0 # cleanup from previous run
|
||||
|
||||
ticker_data = self.populate_sell_trend(self.populate_buy_trend(pair_data))[headers]
|
||||
ticker_data = self.populate_sell_trend(
|
||||
self.populate_buy_trend(pair_data))[headers].copy()
|
||||
|
||||
# to avoid using data from future, we buy/sell with signal from previous candle
|
||||
ticker_data.loc[:, 'buy'] = ticker_data['buy'].shift(1)
|
||||
ticker_data.loc[:, 'sell'] = ticker_data['sell'].shift(1)
|
||||
|
||||
ticker_data.drop(ticker_data.head(1).index, inplace=True)
|
||||
|
||||
ticker = [x for x in ticker_data.itertuples()]
|
||||
|
||||
lock_pair_until = None
|
||||
@@ -217,7 +215,8 @@ class Backtesting(object):
|
||||
# For now export inside backtest(), maybe change so that backtest()
|
||||
# returns a tuple like: (dataframe, records, logs, etc)
|
||||
if record and record.find('trades') >= 0:
|
||||
logger.info('Dumping backtest results')
|
||||
logger.info('Dumping backtest results to %s', recordfilename)
|
||||
file_dump_json(recordfilename, records)
|
||||
file_dump_json('backtest-result.json', records)
|
||||
labels = ['currency', 'profit_percent', 'profit_BTC', 'duration', 'entry', 'exit']
|
||||
|
||||
@@ -240,7 +239,8 @@ class Backtesting(object):
|
||||
else:
|
||||
logger.info('Using local backtesting data (using whitelist in given config) ...')
|
||||
|
||||
timerange = Arguments.parse_timerange(self.config.get('timerange'))
|
||||
timerange = Arguments.parse_timerange(None if self.config.get(
|
||||
'timerange') is None else str(self.config.get('timerange')))
|
||||
data = optimize.load_data(
|
||||
self.config['datadir'],
|
||||
pairs=pairs,
|
||||
@@ -249,6 +249,9 @@ class Backtesting(object):
|
||||
timerange=timerange
|
||||
)
|
||||
|
||||
if not data:
|
||||
logger.critical("No data found. Terminating.")
|
||||
return
|
||||
# Ignore max_open_trades in backtesting, except realistic flag was passed
|
||||
if self.config.get('realistic_simulation', False):
|
||||
max_open_trades = self.config['max_open_trades']
|
||||
@@ -278,7 +281,8 @@ class Backtesting(object):
|
||||
'realistic': self.config.get('realistic_simulation', False),
|
||||
'sell_profit_only': sell_profit_only,
|
||||
'use_sell_signal': use_sell_signal,
|
||||
'record': self.config.get('export')
|
||||
'record': self.config.get('export'),
|
||||
'recordfn': self.config.get('exportfilename'),
|
||||
}
|
||||
)
|
||||
logger.info(
|
||||
|
||||
@@ -14,7 +14,7 @@ from argparse import Namespace
|
||||
from functools import reduce
|
||||
from math import exp
|
||||
from operator import itemgetter
|
||||
from typing import Dict, Any, Callable
|
||||
from typing import Dict, Any, Callable, Optional
|
||||
|
||||
import numpy
|
||||
import talib.abstract as ta
|
||||
@@ -60,7 +60,7 @@ class Hyperopt(Backtesting):
|
||||
self.expected_max_profit = 3.0
|
||||
|
||||
# Configuration and data used by hyperopt
|
||||
self.processed = None
|
||||
self.processed: Optional[Dict[str, Any]] = None
|
||||
|
||||
# Hyperopt Trials
|
||||
self.trials_file = os.path.join('user_data', 'hyperopt_trials.pickle')
|
||||
@@ -344,7 +344,7 @@ class Hyperopt(Backtesting):
|
||||
"""
|
||||
Return the space to use during Hyperopt
|
||||
"""
|
||||
spaces = {}
|
||||
spaces: Dict = {}
|
||||
if self.has_space('buy'):
|
||||
spaces = {**spaces, **Hyperopt.indicator_space()}
|
||||
if self.has_space('roi'):
|
||||
@@ -455,6 +455,7 @@ class Hyperopt(Backtesting):
|
||||
|
||||
if trade_count == 0 or trade_duration > self.max_accepted_trade_duration:
|
||||
print('.', end='')
|
||||
sys.stdout.flush()
|
||||
return {
|
||||
'status': STATUS_FAIL,
|
||||
'loss': float('inf')
|
||||
@@ -479,31 +480,32 @@ class Hyperopt(Backtesting):
|
||||
'result': result_explanation,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def format_results(results: DataFrame) -> str:
|
||||
def format_results(self, results: DataFrame) -> str:
|
||||
"""
|
||||
Return the format result in a string
|
||||
"""
|
||||
return ('{:6d} trades. Avg profit {: 5.2f}%. '
|
||||
'Total profit {: 11.8f} BTC ({:.4f}Σ%). Avg duration {:5.1f} mins.').format(
|
||||
'Total profit {: 11.8f} {} ({:.4f}Σ%). Avg duration {:5.1f} mins.').format(
|
||||
len(results.index),
|
||||
results.profit_percent.mean() * 100.0,
|
||||
results.profit_BTC.sum(),
|
||||
self.config['stake_currency'],
|
||||
results.profit_percent.sum(),
|
||||
results.duration.mean(),
|
||||
)
|
||||
|
||||
def start(self) -> None:
|
||||
timerange = Arguments.parse_timerange(self.config.get('timerange'))
|
||||
timerange = Arguments.parse_timerange(None if self.config.get(
|
||||
'timerange') is None else str(self.config.get('timerange')))
|
||||
data = load_data(
|
||||
datadir=self.config.get('datadir'),
|
||||
datadir=str(self.config.get('datadir')),
|
||||
pairs=self.config['exchange']['pair_whitelist'],
|
||||
ticker_interval=self.ticker_interval,
|
||||
timerange=timerange
|
||||
)
|
||||
|
||||
if self.has_space('buy'):
|
||||
self.analyze.populate_indicators = Hyperopt.populate_indicators
|
||||
self.analyze.populate_indicators = Hyperopt.populate_indicators # type: ignore
|
||||
self.processed = self.tickerdata_to_dataframe(data)
|
||||
|
||||
if self.config.get('mongodb'):
|
||||
|
||||
Reference in New Issue
Block a user