Move Backtesting to a class and add unit tests
This commit is contained in:
parent
db67b10605
commit
1d251d6151
@ -42,6 +42,9 @@ class Configuration(object):
|
||||
if self.args.dry_run_db and config.get('dry_run', False):
|
||||
config.update({'dry_run_db': True})
|
||||
|
||||
# Load Backtesting / Hyperopt
|
||||
config = self._load_backtesting_config(config)
|
||||
|
||||
return config
|
||||
|
||||
def _load_config_file(self, path: str) -> Dict[str, Any]:
|
||||
@ -59,6 +62,51 @@ class Configuration(object):
|
||||
|
||||
return self._validate_config(conf)
|
||||
|
||||
def _load_backtesting_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract information for sys.argv and load Backtesting and Hyperopt configuration
|
||||
:return: configuration as dictionary
|
||||
"""
|
||||
# If -i/--ticker-interval is used we override the configuration parameter
|
||||
# (that will override the strategy configuration)
|
||||
if 'ticker_interval' in self.args and self.args.ticker_interval:
|
||||
config.update({'ticker_interval': self.args.ticker_interval})
|
||||
self.logger.info('Parameter -i/--ticker-interval detected ...')
|
||||
self.logger.info('Using ticker_interval: %d ...', config.get('ticker_interval'))
|
||||
|
||||
# If -l/--live is used we add it to the configuration
|
||||
if 'live' in self.args and self.args.live:
|
||||
config.update({'live': True})
|
||||
self.logger.info('Parameter -l/--live detected ...')
|
||||
|
||||
# If --realistic-simulation is used we add it to the configuration
|
||||
if 'realistic_simulation' in self.args and self.args.realistic_simulation:
|
||||
config.update({'realistic_simulation': True})
|
||||
self.logger.info('Parameter --realistic-simulation detected ...')
|
||||
self.logger.info('Using max_open_trades: %s ...', config.get('max_open_trades'))
|
||||
|
||||
# If --timerange is used we add it to the configuration
|
||||
if 'timerange' in self.args and self.args.timerange:
|
||||
config.update({'timerange': self.args.timerange})
|
||||
self.logger.info('Parameter --timerange detected: %s ...', self.args.timerange)
|
||||
|
||||
# If --datadir is used we add it to the configuration
|
||||
if 'datadir' in self.args and self.args.datadir:
|
||||
config.update({'datadir': self.args.datadir})
|
||||
self.logger.info('Parameter --datadir detected: %s ...', self.args.datadir)
|
||||
|
||||
# If -r/--refresh-pairs-cached is used we add it to the configuration
|
||||
if 'refresh_pairs' in self.args and self.args.refresh_pairs:
|
||||
config.update({'refresh_pairs': True})
|
||||
self.logger.info('Parameter -r/--refresh-pairs-cached detected ...')
|
||||
|
||||
# If --export is used we add it to the configuration
|
||||
if 'export' in self.args and self.args.export:
|
||||
config.update({'export': self.args.export})
|
||||
self.logger.info('Parameter --export detected: %s ...', self.args.export)
|
||||
|
||||
return config
|
||||
|
||||
def _validate_config(self, conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate the configuration follow the Config Schema
|
||||
|
@ -20,21 +20,61 @@ class Logger(object):
|
||||
"""
|
||||
self.name = name
|
||||
self.level = level
|
||||
self.logger = None
|
||||
|
||||
self._init_logger()
|
||||
|
||||
def _init_logger(self) -> logging:
|
||||
def _init_logger(self) -> None:
|
||||
"""
|
||||
Setup the bot logger configuration
|
||||
:return: logging object
|
||||
:return: None
|
||||
"""
|
||||
logging.basicConfig(
|
||||
level=self.level,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
)
|
||||
|
||||
self.logger = self.get_logger()
|
||||
self.set_level(self.level)
|
||||
|
||||
def get_logger(self) -> logging.RootLogger:
|
||||
"""
|
||||
Return the logger instance to use for sending message
|
||||
:return: the logger instance
|
||||
"""
|
||||
return logging.getLogger(self.name)
|
||||
|
||||
def set_name(self, name: str) -> logging.RootLogger:
|
||||
"""
|
||||
Set the name of the logger
|
||||
:param name: Name of the logger
|
||||
:return: None
|
||||
"""
|
||||
self.name = name
|
||||
self.logger = self.get_logger()
|
||||
return self.logger
|
||||
|
||||
def set_level(self, level) -> None:
|
||||
"""
|
||||
Set the level of the logger
|
||||
:param level:
|
||||
:return: None
|
||||
"""
|
||||
self.level = level
|
||||
self.logger.setLevel(self.level)
|
||||
|
||||
def set_format(self, log_format: str, propagate: bool = False) -> None:
|
||||
"""
|
||||
Set a new logging format
|
||||
:return: None
|
||||
"""
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
len_handlers = len(self.logger.handlers)
|
||||
if len_handlers:
|
||||
self.logger.removeHandler(handler)
|
||||
|
||||
handler.setFormatter(logging.Formatter(log_format))
|
||||
self.logger.addHandler(handler)
|
||||
|
||||
self.logger.propagate = propagate
|
||||
|
@ -1,18 +1,16 @@
|
||||
# pragma pylint: disable=missing-docstring
|
||||
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
from typing import Optional, List, Dict
|
||||
from pandas import DataFrame
|
||||
import gzip
|
||||
from freqtrade.exchange import get_ticker_history
|
||||
from freqtrade.analyze import populate_indicators, parse_ticker_dataframe
|
||||
|
||||
from freqtrade import misc
|
||||
from freqtrade.logger import Logger
|
||||
from user_data.hyperopt_conf import hyperopt_optimize_conf
|
||||
import gzip
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = Logger(name=__name__).get_logger()
|
||||
|
||||
|
||||
def trim_tickerlist(tickerlist, timerange):
|
||||
@ -84,21 +82,13 @@ def load_data(datadir: str, ticker_interval: int, pairs: Optional[List[str]] = N
|
||||
return result
|
||||
|
||||
|
||||
def tickerdata_to_dataframe(data):
|
||||
preprocessed = preprocess(data)
|
||||
return preprocessed
|
||||
|
||||
|
||||
def preprocess(tickerdata: Dict[str, List]) -> Dict[str, DataFrame]:
|
||||
"""Creates a dataframe and populates indicators for given ticker data"""
|
||||
return {pair: populate_indicators(parse_ticker_dataframe(pair_data))
|
||||
for pair, pair_data in tickerdata.items()}
|
||||
|
||||
|
||||
def make_testdata_path(datadir: str) -> str:
|
||||
"""Return the path where testdata files are stored"""
|
||||
return datadir or os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||
'..', 'tests', 'testdata'))
|
||||
return datadir or os.path.abspath(
|
||||
os.path.join(
|
||||
os.path.dirname(__file__), '..', 'tests', 'testdata'
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def download_pairs(datadir, pairs: List[str], ticker_interval: int) -> bool:
|
||||
@ -115,11 +105,6 @@ def download_pairs(datadir, pairs: List[str], ticker_interval: int) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def file_dump_json(filename, data):
|
||||
with open(filename, "wt") as fp:
|
||||
json.dump(data, fp)
|
||||
|
||||
|
||||
# FIX: 20180110, suggest rename interval to tick_interval
|
||||
def download_backtesting_testdata(datadir: str, pair: str, interval: int = 5) -> bool:
|
||||
"""
|
||||
@ -142,8 +127,8 @@ def download_backtesting_testdata(datadir: str, pair: str, interval: int = 5) ->
|
||||
))
|
||||
|
||||
if os.path.isfile(filename):
|
||||
with open(filename, "rt") as fp:
|
||||
data = json.load(fp)
|
||||
with open(filename, "rt") as file:
|
||||
data = json.load(file)
|
||||
logger.debug("Current Start: {}".format(data[1]['T']))
|
||||
logger.debug("Current End: {}".format(data[-1:][0]['T']))
|
||||
else:
|
||||
|
@ -1,235 +1,321 @@
|
||||
# pragma pylint: disable=missing-docstring,W0212
|
||||
# pragma pylint: disable=missing-docstring, W0212, too-many-arguments
|
||||
|
||||
"""
|
||||
This module contains the backtesting logic
|
||||
"""
|
||||
|
||||
from typing import Dict, Tuple, Any
|
||||
import logging
|
||||
from typing import Dict, Tuple
|
||||
|
||||
import arrow
|
||||
from pandas import DataFrame, Series
|
||||
from tabulate import tabulate
|
||||
|
||||
import freqtrade.misc as misc
|
||||
import freqtrade.optimize as optimize
|
||||
from freqtrade import exchange
|
||||
from freqtrade.analyze import populate_buy_trend, populate_sell_trend
|
||||
from freqtrade.arguments import Arguments
|
||||
from freqtrade.exchange import Bittrex
|
||||
from freqtrade.main import should_sell
|
||||
from freqtrade.configuration import Configuration
|
||||
from freqtrade import exchange
|
||||
from freqtrade.analyze import Analyze
|
||||
from freqtrade.logger import Logger
|
||||
from freqtrade.misc import file_dump_json
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.strategy.strategy import Strategy
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
|
||||
class Backtesting(object):
|
||||
"""
|
||||
Get the maximum timeframe for the given backtest data
|
||||
:param data: dictionary with preprocessed backtesting data
|
||||
:return: tuple containing min_date, max_date
|
||||
"""
|
||||
all_dates = Series([])
|
||||
for pair_data in data.values():
|
||||
all_dates = all_dates.append(pair_data['date'])
|
||||
all_dates.sort_values(inplace=True)
|
||||
return arrow.get(all_dates.iloc[0]), arrow.get(all_dates.iloc[-1])
|
||||
Backtesting class, this class contains all the logic to run a backtest
|
||||
|
||||
To run a backtest:
|
||||
backtesting = Backtesting(config)
|
||||
backtesting.start()
|
||||
"""
|
||||
def __init__(self, config: Dict[str, Any]) -> None:
|
||||
self.logging = Logger(name=__name__)
|
||||
self.logger = self.logging.get_logger()
|
||||
|
||||
def generate_text_table(
|
||||
data: Dict[str, Dict], results: DataFrame, stake_currency, ticker_interval) -> str:
|
||||
"""
|
||||
Generates and returns a text table for the given backtest data and the results dataframe
|
||||
:return: pretty printed table with tabulate as str
|
||||
"""
|
||||
floatfmt = ('s', 'd', '.2f', '.8f', '.1f')
|
||||
tabular_data = []
|
||||
headers = ['pair', 'buy count', 'avg profit %',
|
||||
'total profit ' + stake_currency, 'avg duration', 'profit', 'loss']
|
||||
for pair in data:
|
||||
result = results[results.currency == pair]
|
||||
self.config = config
|
||||
self.analyze = None
|
||||
self.ticker_interval = None
|
||||
self.tickerdata_to_dataframe = None
|
||||
self.populate_buy_trend = None
|
||||
self.populate_sell_trend = None
|
||||
self._init()
|
||||
|
||||
def _init(self) -> None:
|
||||
"""
|
||||
Init objects required for backtesting
|
||||
:return: None
|
||||
"""
|
||||
self.analyze = Analyze(self.config)
|
||||
self.ticker_interval = self.analyze.strategy.ticker_interval
|
||||
self.tickerdata_to_dataframe = self.analyze.tickerdata_to_dataframe
|
||||
self.populate_buy_trend = self.analyze.populate_buy_trend
|
||||
self.populate_sell_trend = self.analyze.populate_sell_trend
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
@staticmethod
|
||||
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
|
||||
"""
|
||||
Get the maximum timeframe for the given backtest data
|
||||
:param data: dictionary with preprocessed backtesting data
|
||||
:return: tuple containing min_date, max_date
|
||||
"""
|
||||
all_dates = Series([])
|
||||
for pair_data in data.values():
|
||||
all_dates = all_dates.append(pair_data['date'])
|
||||
all_dates.sort_values(inplace=True)
|
||||
return arrow.get(all_dates.iloc[0]), arrow.get(all_dates.iloc[-1])
|
||||
|
||||
def _generate_text_table(self, data: Dict[str, Dict], results: DataFrame) -> str:
|
||||
"""
|
||||
Generates and returns a text table for the given backtest data and the results dataframe
|
||||
:return: pretty printed table with tabulate as str
|
||||
"""
|
||||
stake_currency = self.config.get('stake_currency')
|
||||
ticker_interval = self.ticker_interval
|
||||
|
||||
floatfmt = ('s', 'd', '.2f', '.8f', '.1f')
|
||||
tabular_data = []
|
||||
headers = ['pair', 'buy count', 'avg profit %',
|
||||
'total profit ' + stake_currency, 'avg duration', 'profit', 'loss']
|
||||
for pair in data:
|
||||
result = results[results.currency == pair]
|
||||
tabular_data.append([
|
||||
pair,
|
||||
len(result.index),
|
||||
result.profit_percent.mean() * 100.0,
|
||||
result.profit_BTC.sum(),
|
||||
result.duration.mean() * ticker_interval,
|
||||
len(result[result.profit_BTC > 0]),
|
||||
len(result[result.profit_BTC < 0])
|
||||
])
|
||||
|
||||
# Append Total
|
||||
tabular_data.append([
|
||||
pair,
|
||||
len(result.index),
|
||||
result.profit_percent.mean() * 100.0,
|
||||
result.profit_BTC.sum(),
|
||||
result.duration.mean() * ticker_interval,
|
||||
len(result[result.profit_BTC > 0]),
|
||||
len(result[result.profit_BTC < 0])
|
||||
'TOTAL',
|
||||
len(results.index),
|
||||
results.profit_percent.mean() * 100.0,
|
||||
results.profit_BTC.sum(),
|
||||
results.duration.mean() * ticker_interval,
|
||||
len(results[results.profit_BTC > 0]),
|
||||
len(results[results.profit_BTC < 0])
|
||||
])
|
||||
return tabulate(tabular_data, headers=headers, floatfmt=floatfmt)
|
||||
|
||||
# Append Total
|
||||
tabular_data.append([
|
||||
'TOTAL',
|
||||
len(results.index),
|
||||
results.profit_percent.mean() * 100.0,
|
||||
results.profit_BTC.sum(),
|
||||
results.duration.mean() * ticker_interval,
|
||||
len(results[results.profit_BTC > 0]),
|
||||
len(results[results.profit_BTC < 0])
|
||||
])
|
||||
return tabulate(tabular_data, headers=headers, floatfmt=floatfmt)
|
||||
def _get_sell_trade_entry(self, pair, row, buy_subset, ticker, trade_count_lock, args):
|
||||
stake_amount = args['stake_amount']
|
||||
max_open_trades = args.get('max_open_trades', 0)
|
||||
trade = Trade(
|
||||
open_rate=row.close,
|
||||
open_date=row.date,
|
||||
stake_amount=stake_amount,
|
||||
amount=stake_amount / row.open,
|
||||
fee=exchange.get_fee()
|
||||
)
|
||||
|
||||
|
||||
def get_sell_trade_entry(pair, row, buy_subset, ticker, trade_count_lock, args):
|
||||
stake_amount = args['stake_amount']
|
||||
max_open_trades = args.get('max_open_trades', 0)
|
||||
trade = Trade(open_rate=row.close,
|
||||
open_date=row.date,
|
||||
stake_amount=stake_amount,
|
||||
amount=stake_amount / row.open,
|
||||
fee=exchange.get_fee()
|
||||
)
|
||||
|
||||
# calculate win/lose forwards from buy point
|
||||
sell_subset = ticker[ticker.date > row.date][['close', 'date', 'sell']]
|
||||
for row2 in sell_subset.itertuples(index=True):
|
||||
if max_open_trades > 0:
|
||||
# Increase trade_count_lock for every iteration
|
||||
trade_count_lock[row2.date] = trade_count_lock.get(row2.date, 0) + 1
|
||||
|
||||
# Buy is on is in the buy_subset there is a row that matches the date
|
||||
# of the sell event
|
||||
buy_signal = not buy_subset[buy_subset.date == row2.date].empty
|
||||
if(should_sell(trade, row2.close, row2.date, buy_signal, row2.sell)):
|
||||
return row2, (pair,
|
||||
trade.calc_profit_percent(rate=row2.close),
|
||||
trade.calc_profit(rate=row2.close),
|
||||
row2.Index - row.Index
|
||||
), row2.date
|
||||
return None
|
||||
|
||||
|
||||
def backtest(args) -> DataFrame:
|
||||
"""
|
||||
Implements backtesting functionality
|
||||
:param args: a dict containing:
|
||||
stake_amount: btc amount to use for each trade
|
||||
processed: a processed dictionary with format {pair, data}
|
||||
max_open_trades: maximum number of concurrent trades (default: 0, disabled)
|
||||
realistic: do we try to simulate realistic trades? (default: True)
|
||||
sell_profit_only: sell if profit only
|
||||
use_sell_signal: act on sell-signal
|
||||
stoploss: use stoploss
|
||||
:return: DataFrame
|
||||
"""
|
||||
processed = args['processed']
|
||||
max_open_trades = args.get('max_open_trades', 0)
|
||||
realistic = args.get('realistic', True)
|
||||
record = args.get('record', None)
|
||||
records = []
|
||||
trades = []
|
||||
trade_count_lock: dict = {}
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
for pair, pair_data in processed.items():
|
||||
pair_data['buy'], pair_data['sell'] = 0, 0
|
||||
ticker = populate_sell_trend(populate_buy_trend(pair_data))
|
||||
# for each buy point
|
||||
lock_pair_until = None
|
||||
headers = ['buy', 'open', 'close', 'date', 'sell']
|
||||
buy_subset = ticker[(ticker.buy == 1) & (ticker.sell == 0)][headers]
|
||||
for row in buy_subset.itertuples(index=True):
|
||||
if realistic:
|
||||
if lock_pair_until is not None and row.date <= lock_pair_until:
|
||||
continue
|
||||
# calculate win/lose forwards from buy point
|
||||
sell_subset = ticker[ticker.date > row.date][['close', 'date', 'sell']]
|
||||
for row2 in sell_subset.itertuples(index=True):
|
||||
if max_open_trades > 0:
|
||||
# Check if max_open_trades has already been reached for the given date
|
||||
if not trade_count_lock.get(row.date, 0) < max_open_trades:
|
||||
continue
|
||||
# Increase trade_count_lock for every iteration
|
||||
trade_count_lock[row2.date] = trade_count_lock.get(row2.date, 0) + 1
|
||||
|
||||
if max_open_trades > 0:
|
||||
# Increase lock
|
||||
trade_count_lock[row.date] = trade_count_lock.get(row.date, 0) + 1
|
||||
# Buy is on is in the buy_subset there is a row that matches the date
|
||||
# of the sell event
|
||||
buy_signal = not buy_subset[buy_subset.date == row2.date].empty
|
||||
if(
|
||||
self.analyze.should_sell(
|
||||
trade=trade,
|
||||
rate=row2.close,
|
||||
date=row2.date,
|
||||
buy=buy_signal,
|
||||
sell=row2.sell
|
||||
)
|
||||
):
|
||||
return \
|
||||
row2, \
|
||||
(
|
||||
pair,
|
||||
trade.calc_profit_percent(rate=row2.close),
|
||||
trade.calc_profit(rate=row2.close),
|
||||
row2.Index - row.Index
|
||||
),\
|
||||
row2.date
|
||||
return None
|
||||
|
||||
ret = get_sell_trade_entry(pair, row, buy_subset, ticker,
|
||||
trade_count_lock, args)
|
||||
if ret:
|
||||
row2, trade_entry, next_date = ret
|
||||
lock_pair_until = next_date
|
||||
trades.append(trade_entry)
|
||||
if record:
|
||||
# Note, need to be json.dump friendly
|
||||
# record a tuple of pair, current_profit_percent,
|
||||
# entry-date, duration
|
||||
records.append((pair, trade_entry[1],
|
||||
row.date.strftime('%s'),
|
||||
row2.date.strftime('%s'),
|
||||
row.Index, trade_entry[3]))
|
||||
# For now export inside backtest(), maybe change so that backtest()
|
||||
# returns a tuple like: (dataframe, records, logs, etc)
|
||||
if record and record.find('trades') >= 0:
|
||||
logger.info('Dumping backtest results')
|
||||
misc.file_dump_json('backtest-result.json', records)
|
||||
labels = ['currency', 'profit_percent', 'profit_BTC', 'duration']
|
||||
return DataFrame.from_records(trades, columns=labels)
|
||||
def backtest(self, args) -> DataFrame:
|
||||
"""
|
||||
Implements backtesting functionality
|
||||
|
||||
NOTE: This method is used by Hyperopt at each iteration. Please keep it optimized.
|
||||
Of course try to not have ugly code. By some accessor are sometime slower than functions.
|
||||
Avoid, logging on this method
|
||||
|
||||
:param args: a dict containing:
|
||||
stake_amount: btc amount to use for each trade
|
||||
processed: a processed dictionary with format {pair, data}
|
||||
max_open_trades: maximum number of concurrent trades (default: 0, disabled)
|
||||
realistic: do we try to simulate realistic trades? (default: True)
|
||||
sell_profit_only: sell if profit only
|
||||
use_sell_signal: act on sell-signal
|
||||
stoploss: use stoploss
|
||||
:return: DataFrame
|
||||
"""
|
||||
processed = args['processed']
|
||||
max_open_trades = args.get('max_open_trades', 0)
|
||||
realistic = args.get('realistic', True)
|
||||
record = args.get('record', None)
|
||||
records = []
|
||||
trades = []
|
||||
trade_count_lock = {}
|
||||
for pair, pair_data in processed.items():
|
||||
pair_data['buy'], pair_data['sell'] = 0, 0
|
||||
ticker = self.populate_sell_trend(
|
||||
self.populate_buy_trend(pair_data)
|
||||
)
|
||||
# for each buy point
|
||||
lock_pair_until = None
|
||||
headers = ['buy', 'open', 'close', 'date', 'sell']
|
||||
buy_subset = ticker[(ticker.buy == 1) & (ticker.sell == 0)][headers]
|
||||
for row in buy_subset.itertuples(index=True):
|
||||
if realistic:
|
||||
if lock_pair_until is not None and row.date <= lock_pair_until:
|
||||
continue
|
||||
if max_open_trades > 0:
|
||||
# Check if max_open_trades has already been reached for the given date
|
||||
if not trade_count_lock.get(row.date, 0) < max_open_trades:
|
||||
continue
|
||||
|
||||
if max_open_trades > 0:
|
||||
# Increase lock
|
||||
trade_count_lock[row.date] = trade_count_lock.get(row.date, 0) + 1
|
||||
|
||||
ret = self._get_sell_trade_entry(
|
||||
pair=pair,
|
||||
row=row,
|
||||
buy_subset=buy_subset,
|
||||
ticker=ticker,
|
||||
trade_count_lock=trade_count_lock,
|
||||
args=args
|
||||
)
|
||||
|
||||
if ret:
|
||||
row2, trade_entry, next_date = ret
|
||||
lock_pair_until = next_date
|
||||
trades.append(trade_entry)
|
||||
if record:
|
||||
# Note, need to be json.dump friendly
|
||||
# record a tuple of pair, current_profit_percent,
|
||||
# entry-date, duration
|
||||
records.append((pair, trade_entry[1],
|
||||
row.date.strftime('%s'),
|
||||
row2.date.strftime('%s'),
|
||||
row.Index, trade_entry[3]))
|
||||
# For now export inside backtest(), maybe change so that backtest()
|
||||
# returns a tuple like: (dataframe, records, logs, etc)
|
||||
if record and record.find('trades') >= 0:
|
||||
self.logger.info('Dumping backtest results')
|
||||
file_dump_json('backtest-result.json', records)
|
||||
labels = ['currency', 'profit_percent', 'profit_BTC', 'duration']
|
||||
return DataFrame.from_records(trades, columns=labels)
|
||||
|
||||
def start(self) -> None:
|
||||
"""
|
||||
Run a backtesting end-to-end
|
||||
:return: None
|
||||
"""
|
||||
data = {}
|
||||
pairs = self.config['exchange']['pair_whitelist']
|
||||
|
||||
if self.config.get('live'):
|
||||
self.logger.info('Downloading data for all pairs in whitelist ...')
|
||||
for pair in pairs:
|
||||
data[pair] = exchange.get_ticker_history(pair, self.ticker_interval)
|
||||
else:
|
||||
self.logger.info('Using local backtesting data (using whitelist in given config) ...')
|
||||
self.logger.info('Using stake_currency: %s ...', self.config['stake_currency'])
|
||||
self.logger.info('Using stake_amount: %s ...', self.config['stake_amount'])
|
||||
|
||||
timerange = Arguments.parse_timerange(self.config.get('timerange'))
|
||||
data = optimize.load_data(
|
||||
self.config['datadir'],
|
||||
pairs=pairs,
|
||||
ticker_interval=self.ticker_interval,
|
||||
refresh_pairs=self.config.get('refresh_pairs', False),
|
||||
timerange=timerange
|
||||
)
|
||||
|
||||
max_open_trades = self.config.get('max_open_trades', 0)
|
||||
|
||||
preprocessed = self.tickerdata_to_dataframe(data)
|
||||
# Print timeframe
|
||||
min_date, max_date = self.get_timeframe(preprocessed)
|
||||
self.logger.info(
|
||||
'Measuring data from %s up to %s (%s days)..',
|
||||
min_date.isoformat(),
|
||||
max_date.isoformat(),
|
||||
(max_date - min_date).days
|
||||
)
|
||||
|
||||
# Execute backtest and print results
|
||||
sell_profit_only = self.config.get('experimental', {}).get('sell_profit_only', False)
|
||||
use_sell_signal = self.config.get('experimental', {}).get('use_sell_signal', False)
|
||||
results = self.backtest(
|
||||
{
|
||||
'stake_amount': self.config.get('stake_amount'),
|
||||
'processed': preprocessed,
|
||||
'max_open_trades': max_open_trades,
|
||||
'realistic': self.config.get('realistic_simulation', False),
|
||||
'sell_profit_only': sell_profit_only,
|
||||
'use_sell_signal': use_sell_signal,
|
||||
'stoploss': self.analyze.strategy.stoploss,
|
||||
'record': self.config.get('export')
|
||||
}
|
||||
)
|
||||
|
||||
self.logging.set_format('%(message)s')
|
||||
self.logger.info(
|
||||
'\n==================================== '
|
||||
'BACKTESTING REPORT'
|
||||
' ====================================\n'
|
||||
'%s',
|
||||
self._generate_text_table(
|
||||
data,
|
||||
results
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def start(args):
|
||||
def setup_configuration(args) -> Dict[str, Any]:
|
||||
"""
|
||||
Prepare the configuration for the backtesting
|
||||
:param args: Cli args from Arguments()
|
||||
:return: Configuration
|
||||
"""
|
||||
configuration = Configuration(args)
|
||||
config = configuration.get_config()
|
||||
|
||||
# Ensure we do not use Exchange credentials
|
||||
config['exchange']['key'] = ''
|
||||
config['exchange']['secret'] = ''
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def start(args) -> None:
|
||||
"""
|
||||
Start Backtesting script
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
|
||||
# Initialize logger
|
||||
logging.basicConfig(
|
||||
level=args.loglevel,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
)
|
||||
logger = Logger(name=__name__).get_logger()
|
||||
logger.info('Starting freqtrade in Backtesting mode')
|
||||
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
# Initialize configuration
|
||||
config = setup_configuration(args)
|
||||
|
||||
logger.info('Using config: %s ...', args.config)
|
||||
config = misc.load_config(args.config)
|
||||
|
||||
# If -i/--ticker-interval is use we override the configuration parameter
|
||||
# (that will override the strategy configuration)
|
||||
if args.ticker_interval:
|
||||
config.update({'ticker_interval': args.ticker_interval})
|
||||
|
||||
# init the strategy to use
|
||||
config.update({'strategy': args.strategy})
|
||||
strategy = Strategy()
|
||||
strategy.init(config)
|
||||
|
||||
logger.info('Using ticker_interval: %d ...', strategy.ticker_interval)
|
||||
|
||||
data = {}
|
||||
pairs = config['exchange']['pair_whitelist']
|
||||
if args.live:
|
||||
logger.info('Downloading data for all pairs in whitelist ...')
|
||||
for pair in pairs:
|
||||
data[pair] = exchange.get_ticker_history(pair, strategy.ticker_interval)
|
||||
else:
|
||||
logger.info('Using local backtesting data (using whitelist in given config) ...')
|
||||
logger.info('Using stake_currency: %s ...', config['stake_currency'])
|
||||
logger.info('Using stake_amount: %s ...', config['stake_amount'])
|
||||
|
||||
timerange = misc.parse_timerange(args.timerange)
|
||||
data = optimize.load_data(args.datadir,
|
||||
pairs=pairs,
|
||||
ticker_interval=strategy.ticker_interval,
|
||||
refresh_pairs=args.refresh_pairs,
|
||||
timerange=timerange)
|
||||
max_open_trades = 0
|
||||
if args.realistic_simulation:
|
||||
logger.info('Using max_open_trades: %s ...', config['max_open_trades'])
|
||||
max_open_trades = config['max_open_trades']
|
||||
|
||||
# Monkey patch config
|
||||
from freqtrade import main
|
||||
main._CONF = config
|
||||
|
||||
preprocessed = optimize.tickerdata_to_dataframe(data)
|
||||
# Print timeframe
|
||||
min_date, max_date = get_timeframe(preprocessed)
|
||||
logger.info('Measuring data from %s up to %s (%s days)..',
|
||||
min_date.isoformat(),
|
||||
max_date.isoformat(),
|
||||
(max_date-min_date).days)
|
||||
# Execute backtest and print results
|
||||
sell_profit_only = config.get('experimental', {}).get('sell_profit_only', False)
|
||||
use_sell_signal = config.get('experimental', {}).get('use_sell_signal', False)
|
||||
results = backtest({'stake_amount': config['stake_amount'],
|
||||
'processed': preprocessed,
|
||||
'max_open_trades': max_open_trades,
|
||||
'realistic': args.realistic_simulation,
|
||||
'sell_profit_only': sell_profit_only,
|
||||
'use_sell_signal': use_sell_signal,
|
||||
'stoploss': strategy.stoploss,
|
||||
'record': args.export
|
||||
})
|
||||
logger.info(
|
||||
'\n==================================== BACKTESTING REPORT ====================================\n%s', # noqa
|
||||
generate_text_table(data, results, config['stake_currency'], strategy.ticker_interval)
|
||||
)
|
||||
# Initialize backtesting object
|
||||
backtesting = Backtesting(config)
|
||||
backtesting.start()
|
||||
|
@ -269,13 +269,6 @@ def result():
|
||||
return Analyze.parse_ticker_dataframe(json.load(data_file))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def default_strategy():
|
||||
strategy = Strategy()
|
||||
strategy.init({'strategy': 'default_strategy'})
|
||||
return strategy
|
||||
|
||||
|
||||
# FIX:
|
||||
# Create an fixture/function
|
||||
# that inserts a trade of some type and open-status
|
||||
|
@ -1,14 +1,24 @@
|
||||
# pragma pylint: disable=missing-docstring, W0212, line-too-long, C0103
|
||||
# pragma pylint: disable=missing-docstring, W0212, line-too-long, C0103, unused-argument
|
||||
|
||||
import logging
|
||||
import json
|
||||
import math
|
||||
from typing import List
|
||||
from copy import deepcopy
|
||||
from unittest.mock import MagicMock
|
||||
import pandas as pd
|
||||
from freqtrade import exchange, optimize
|
||||
from freqtrade.exchange import Bittrex
|
||||
from freqtrade.optimize import preprocess
|
||||
from freqtrade.optimize.backtesting import backtest, generate_text_table, get_timeframe
|
||||
import freqtrade.optimize.backtesting as backtesting
|
||||
from freqtrade import optimize
|
||||
from freqtrade.optimize.backtesting import Backtesting, start, setup_configuration
|
||||
from freqtrade.arguments import Arguments
|
||||
from freqtrade.analyze import Analyze
|
||||
import freqtrade.tests.conftest as tt # test tools
|
||||
|
||||
|
||||
# Avoid to reinit the same object again and again
|
||||
_BACKTESTING = Backtesting(tt.default_conf())
|
||||
|
||||
|
||||
def get_args(args) -> List[str]:
|
||||
return Arguments(args, '').get_parsed_arg()
|
||||
|
||||
|
||||
def trim_dictlist(dict_list, num):
|
||||
@ -18,60 +28,6 @@ def trim_dictlist(dict_list, num):
|
||||
return new
|
||||
|
||||
|
||||
def test_generate_text_table():
|
||||
results = pd.DataFrame(
|
||||
{
|
||||
'currency': ['BTC_ETH', 'BTC_ETH'],
|
||||
'profit_percent': [0.1, 0.2],
|
||||
'profit_BTC': [0.2, 0.4],
|
||||
'duration': [10, 30],
|
||||
'profit': [2, 0],
|
||||
'loss': [0, 0]
|
||||
}
|
||||
)
|
||||
print(generate_text_table({'BTC_ETH': {}}, results, 'BTC', 5))
|
||||
assert generate_text_table({'BTC_ETH': {}}, results, 'BTC', 5) == (
|
||||
'pair buy count avg profit % total profit BTC avg duration profit loss\n' # noqa
|
||||
'------- ----------- -------------- ------------------ -------------- -------- ------\n' # noqa
|
||||
'BTC_ETH 2 15.00 0.60000000 100.0 2 0\n' # noqa
|
||||
'TOTAL 2 15.00 0.60000000 100.0 2 0') # noqa
|
||||
|
||||
|
||||
def test_get_timeframe(default_strategy):
|
||||
data = preprocess(optimize.load_data(
|
||||
None, ticker_interval=1, pairs=['BTC_UNITEST']))
|
||||
min_date, max_date = get_timeframe(data)
|
||||
assert min_date.isoformat() == '2017-11-04T23:02:00+00:00'
|
||||
assert max_date.isoformat() == '2017-11-14T22:59:00+00:00'
|
||||
|
||||
|
||||
def test_backtest(default_strategy, default_conf, mocker):
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
data = optimize.load_data(None, ticker_interval=5, pairs=['BTC_ETH'])
|
||||
data = trim_dictlist(data, -200)
|
||||
results = backtest({'stake_amount': default_conf['stake_amount'],
|
||||
'processed': optimize.preprocess(data),
|
||||
'max_open_trades': 10,
|
||||
'realistic': True})
|
||||
assert not results.empty
|
||||
|
||||
|
||||
def test_backtest_1min_ticker_interval(default_strategy, default_conf, mocker):
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
# Run a backtesting for an exiting 5min ticker_interval
|
||||
data = optimize.load_data(None, ticker_interval=1, pairs=['BTC_UNITEST'])
|
||||
data = trim_dictlist(data, -200)
|
||||
results = backtest({'stake_amount': default_conf['stake_amount'],
|
||||
'processed': optimize.preprocess(data),
|
||||
'max_open_trades': 1,
|
||||
'realistic': True})
|
||||
assert not results.empty
|
||||
|
||||
|
||||
def load_data_test(what):
|
||||
timerange = ((None, 'line'), None, -100)
|
||||
data = optimize.load_data(None, ticker_interval=1, pairs=['BTC_UNITEST'], timerange=timerange)
|
||||
@ -115,79 +71,324 @@ def load_data_test(what):
|
||||
return data
|
||||
|
||||
|
||||
def simple_backtest(config, contour, num_results):
|
||||
def simple_backtest(config, contour, num_results) -> None:
|
||||
backtesting = _BACKTESTING
|
||||
|
||||
data = load_data_test(contour)
|
||||
processed = optimize.preprocess(data)
|
||||
processed = backtesting.tickerdata_to_dataframe(data)
|
||||
assert isinstance(processed, dict)
|
||||
results = backtest({'stake_amount': config['stake_amount'],
|
||||
'processed': processed,
|
||||
'max_open_trades': 1,
|
||||
'realistic': True})
|
||||
results = backtesting.backtest(
|
||||
{
|
||||
'stake_amount': config['stake_amount'],
|
||||
'processed': processed,
|
||||
'max_open_trades': 1,
|
||||
'realistic': True
|
||||
}
|
||||
)
|
||||
# results :: <class 'pandas.core.frame.DataFrame'>
|
||||
assert len(results) == num_results
|
||||
|
||||
|
||||
# Test backtest on offline data
|
||||
# loaded by freqdata/optimize/__init__.py::load_data()
|
||||
|
||||
|
||||
def test_backtest2(default_conf, mocker, default_strategy):
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
data = optimize.load_data(None, ticker_interval=5, pairs=['BTC_ETH'])
|
||||
data = trim_dictlist(data, -200)
|
||||
results = backtest({'stake_amount': default_conf['stake_amount'],
|
||||
'processed': optimize.preprocess(data),
|
||||
'max_open_trades': 10,
|
||||
'realistic': True})
|
||||
assert not results.empty
|
||||
|
||||
|
||||
def test_processed(default_conf, mocker, default_strategy):
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
dict_of_tickerrows = load_data_test('raise')
|
||||
dataframes = optimize.preprocess(dict_of_tickerrows)
|
||||
dataframe = dataframes['BTC_UNITEST']
|
||||
cols = dataframe.columns
|
||||
# assert the dataframe got some of the indicator columns
|
||||
for col in ['close', 'high', 'low', 'open', 'date',
|
||||
'ema50', 'ao', 'macd', 'plus_dm']:
|
||||
assert col in cols
|
||||
|
||||
|
||||
def test_backtest_pricecontours(default_conf, mocker, default_strategy):
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
tests = [['raise', 17], ['lower', 0], ['sine', 17]]
|
||||
for [contour, numres] in tests:
|
||||
simple_backtest(default_conf, contour, numres)
|
||||
|
||||
|
||||
def mocked_load_data(datadir, pairs=[], ticker_interval=0, refresh_pairs=False, timerange=None):
|
||||
tickerdata = optimize.load_tickerdata_file(datadir, 'BTC_UNITEST', 1, timerange=timerange)
|
||||
pairdata = {'BTC_UNITEST': tickerdata}
|
||||
return pairdata
|
||||
|
||||
|
||||
def test_backtest_start(default_conf, mocker, caplog):
|
||||
caplog.set_level(logging.INFO)
|
||||
default_conf['exchange']['pair_whitelist'] = ['BTC_UNITEST']
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
mocker.patch('freqtrade.misc.load_config', new=lambda s: default_conf)
|
||||
mocker.patch.multiple('freqtrade.optimize',
|
||||
load_data=mocked_load_data)
|
||||
args = MagicMock()
|
||||
args.ticker_interval = 1
|
||||
args.level = 10
|
||||
args.live = False
|
||||
args.datadir = None
|
||||
args.export = None
|
||||
args.timerange = '-100' # needed due to MagicMock malleability
|
||||
backtesting.start(args)
|
||||
# Unit tests
|
||||
def test_setup_configuration_without_arguments(mocker, default_conf, caplog) -> None:
|
||||
"""
|
||||
Test setup_configuration() function
|
||||
"""
|
||||
mocker.patch('freqtrade.configuration.open', mocker.mock_open(
|
||||
read_data=json.dumps(default_conf)
|
||||
))
|
||||
|
||||
args = [
|
||||
'--config', 'config.json',
|
||||
'--strategy', 'default_strategy',
|
||||
'backtesting'
|
||||
]
|
||||
|
||||
config = setup_configuration(get_args(args))
|
||||
assert 'max_open_trades' in config
|
||||
assert 'stake_currency' in config
|
||||
assert 'stake_amount' in config
|
||||
assert 'exchange' in config
|
||||
assert 'pair_whitelist' in config['exchange']
|
||||
assert 'datadir' in config
|
||||
assert tt.log_has(
|
||||
'Parameter --datadir detected: {} ...'.format(config['datadir']),
|
||||
caplog.record_tuples
|
||||
)
|
||||
assert 'ticker_interval' in config
|
||||
assert not tt.log_has('Parameter -i/--ticker-interval detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'live' not in config
|
||||
assert not tt.log_has('Parameter -l/--live detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'realistic_simulation' not in config
|
||||
assert not tt.log_has('Parameter --realistic-simulation detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'refresh_pairs' not in config
|
||||
assert not tt.log_has('Parameter -r/--refresh-pairs-cached detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'timerange' not in config
|
||||
assert 'export' not in config
|
||||
|
||||
|
||||
def test_setup_configuration_with_arguments(mocker, default_conf, caplog) -> None:
|
||||
"""
|
||||
Test setup_configuration() function
|
||||
"""
|
||||
mocker.patch('freqtrade.configuration.open', mocker.mock_open(
|
||||
read_data=json.dumps(default_conf)
|
||||
))
|
||||
|
||||
args = [
|
||||
'--config', 'config.json',
|
||||
'--strategy', 'default_strategy',
|
||||
'--datadir', '/foo/bar',
|
||||
'backtesting',
|
||||
'--ticker-interval', '1',
|
||||
'--live',
|
||||
'--realistic-simulation',
|
||||
'--refresh-pairs-cached',
|
||||
'--timerange', ':100',
|
||||
'--export', '/bar/foo'
|
||||
]
|
||||
|
||||
config = setup_configuration(get_args(args))
|
||||
assert 'max_open_trades' in config
|
||||
assert 'stake_currency' in config
|
||||
assert 'stake_amount' in config
|
||||
assert 'exchange' in config
|
||||
assert 'pair_whitelist' in config['exchange']
|
||||
assert 'datadir' in config
|
||||
assert tt.log_has(
|
||||
'Parameter --datadir detected: {} ...'.format(config['datadir']),
|
||||
caplog.record_tuples
|
||||
)
|
||||
assert 'ticker_interval' in config
|
||||
assert tt.log_has('Parameter -i/--ticker-interval detected ...', caplog.record_tuples)
|
||||
assert tt.log_has(
|
||||
'Using ticker_interval: 1 ...',
|
||||
caplog.record_tuples
|
||||
)
|
||||
|
||||
assert 'live' in config
|
||||
assert tt.log_has('Parameter -l/--live detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'realistic_simulation'in config
|
||||
assert tt.log_has('Parameter --realistic-simulation detected ...', caplog.record_tuples)
|
||||
assert tt.log_has('Using max_open_trades: 1 ...', caplog.record_tuples)
|
||||
|
||||
assert 'refresh_pairs'in config
|
||||
assert tt.log_has('Parameter -r/--refresh-pairs-cached detected ...', caplog.record_tuples)
|
||||
|
||||
import pprint
|
||||
pprint.pprint(caplog.record_tuples)
|
||||
pprint.pprint(config['timerange'])
|
||||
assert 'timerange' in config
|
||||
assert tt.log_has(
|
||||
'Parameter --timerange detected: {} ...'.format(config['timerange']),
|
||||
caplog.record_tuples
|
||||
)
|
||||
|
||||
assert 'export' in config
|
||||
assert tt.log_has(
|
||||
'Parameter --export detected: {} ...'.format(config['export']),
|
||||
caplog.record_tuples
|
||||
)
|
||||
|
||||
|
||||
def test_start(mocker, default_conf, caplog) -> None:
|
||||
"""
|
||||
Test start() function
|
||||
"""
|
||||
start_mock = MagicMock()
|
||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.start', start_mock)
|
||||
mocker.patch('freqtrade.configuration.open', mocker.mock_open(
|
||||
read_data=json.dumps(default_conf)
|
||||
))
|
||||
args = [
|
||||
'--config', 'config.json',
|
||||
'--strategy', 'default_strategy',
|
||||
'backtesting'
|
||||
]
|
||||
args = get_args(args)
|
||||
start(args)
|
||||
assert tt.log_has(
|
||||
'Starting freqtrade in Backtesting mode',
|
||||
caplog.record_tuples
|
||||
)
|
||||
assert start_mock.call_count == 1
|
||||
|
||||
|
||||
def test_backtesting__init__(mocker, default_conf) -> None:
|
||||
"""
|
||||
Test Backtesting.__init__() method
|
||||
"""
|
||||
init_mock = MagicMock()
|
||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting._init', init_mock)
|
||||
|
||||
backtesting = Backtesting(default_conf)
|
||||
assert backtesting.config == default_conf
|
||||
assert backtesting.analyze is None
|
||||
assert backtesting.ticker_interval is None
|
||||
assert backtesting.tickerdata_to_dataframe is None
|
||||
assert backtesting.populate_buy_trend is None
|
||||
assert backtesting.populate_sell_trend is None
|
||||
assert init_mock.call_count == 1
|
||||
|
||||
|
||||
def test_backtesting_init(default_conf) -> None:
|
||||
"""
|
||||
Test Backtesting._init() method
|
||||
"""
|
||||
backtesting = Backtesting(default_conf)
|
||||
assert backtesting.config == default_conf
|
||||
assert isinstance(backtesting.analyze, Analyze)
|
||||
assert backtesting.ticker_interval == 5
|
||||
assert callable(backtesting.tickerdata_to_dataframe)
|
||||
assert callable(backtesting.populate_buy_trend)
|
||||
assert callable(backtesting.populate_sell_trend)
|
||||
|
||||
|
||||
def test_get_timeframe() -> None:
|
||||
"""
|
||||
Test Backtesting.get_timeframe() method
|
||||
"""
|
||||
backtesting = _BACKTESTING
|
||||
|
||||
data = backtesting.tickerdata_to_dataframe(
|
||||
optimize.load_data(
|
||||
None,
|
||||
ticker_interval=1,
|
||||
pairs=['BTC_UNITEST']
|
||||
)
|
||||
)
|
||||
min_date, max_date = backtesting.get_timeframe(data)
|
||||
assert min_date.isoformat() == '2017-11-04T23:02:00+00:00'
|
||||
assert max_date.isoformat() == '2017-11-14T22:59:00+00:00'
|
||||
|
||||
|
||||
def test_generate_text_table():
|
||||
"""
|
||||
Test Backtesting.generate_text_table() method
|
||||
"""
|
||||
backtesting = _BACKTESTING
|
||||
|
||||
results = pd.DataFrame(
|
||||
{
|
||||
'currency': ['BTC_ETH', 'BTC_ETH'],
|
||||
'profit_percent': [0.1, 0.2],
|
||||
'profit_BTC': [0.2, 0.4],
|
||||
'duration': [10, 30],
|
||||
'profit': [2, 0],
|
||||
'loss': [0, 0]
|
||||
}
|
||||
)
|
||||
|
||||
result_str = (
|
||||
'pair buy count avg profit % '
|
||||
'total profit BTC avg duration profit loss\n'
|
||||
'------- ----------- -------------- '
|
||||
'------------------ -------------- -------- ------\n'
|
||||
'BTC_ETH 2 15.00 '
|
||||
'0.60000000 100.0 2 0\n'
|
||||
'TOTAL 2 15.00 '
|
||||
'0.60000000 100.0 2 0'
|
||||
)
|
||||
|
||||
assert backtesting._generate_text_table(data={'BTC_ETH': {}}, results=results) == result_str
|
||||
|
||||
|
||||
def test_backtesting_start(default_conf, mocker, caplog) -> None:
|
||||
"""
|
||||
Test Backtesting.start() method
|
||||
"""
|
||||
mocker.patch.multiple('freqtrade.optimize', load_data=mocked_load_data)
|
||||
mocker.patch('freqtrade.exchange.get_ticker_history', MagicMock)
|
||||
|
||||
conf = deepcopy(default_conf)
|
||||
conf['exchange']['pair_whitelist'] = ['BTC_UNITEST']
|
||||
conf['ticker_interval'] = 1
|
||||
conf['live'] = False
|
||||
conf['datadir'] = None
|
||||
conf['export'] = None
|
||||
conf['timerange'] = '-100'
|
||||
|
||||
backtesting = Backtesting(conf)
|
||||
backtesting.start()
|
||||
# check the logs, that will contain the backtest result
|
||||
exists = ['Using max_open_trades: 1 ...',
|
||||
'Using stake_amount: 0.001 ...',
|
||||
'Measuring data from 2017-11-14T21:17:00+00:00 '
|
||||
'up to 2017-11-14T22:59:00+00:00 (0 days)..']
|
||||
exists = [
|
||||
'Using local backtesting data (using whitelist in given config) ...',
|
||||
'Using stake_currency: BTC ...',
|
||||
'Using stake_amount: 0.001 ...',
|
||||
'Measuring data from 2017-11-14T21:17:00+00:00 '
|
||||
'up to 2017-11-14T22:59:00+00:00 (0 days)..'
|
||||
]
|
||||
for line in exists:
|
||||
assert ('freqtrade.optimize.backtesting',
|
||||
logging.INFO,
|
||||
line) in caplog.record_tuples
|
||||
assert tt.log_has(line, caplog.record_tuples)
|
||||
|
||||
|
||||
def test_backtest(default_conf) -> None:
|
||||
"""
|
||||
Test Backtesting.backtest() method
|
||||
"""
|
||||
backtesting = _BACKTESTING
|
||||
|
||||
data = optimize.load_data(None, ticker_interval=5, pairs=['BTC_ETH'])
|
||||
data = trim_dictlist(data, -200)
|
||||
results = backtesting.backtest(
|
||||
{
|
||||
'stake_amount': default_conf['stake_amount'],
|
||||
'processed': backtesting.tickerdata_to_dataframe(data),
|
||||
'max_open_trades': 10,
|
||||
'realistic': True
|
||||
}
|
||||
)
|
||||
assert not results.empty
|
||||
|
||||
|
||||
def test_backtest_1min_ticker_interval(default_conf) -> None:
|
||||
"""
|
||||
Test Backtesting.backtest() method with 1 min ticker
|
||||
"""
|
||||
backtesting = _BACKTESTING
|
||||
|
||||
# Run a backtesting for an exiting 5min ticker_interval
|
||||
data = optimize.load_data(None, ticker_interval=1, pairs=['BTC_UNITEST'])
|
||||
data = trim_dictlist(data, -200)
|
||||
results = backtesting.backtest(
|
||||
{
|
||||
'stake_amount': default_conf['stake_amount'],
|
||||
'processed': backtesting.tickerdata_to_dataframe(data),
|
||||
'max_open_trades': 1,
|
||||
'realistic': True
|
||||
}
|
||||
)
|
||||
assert not results.empty
|
||||
|
||||
|
||||
def test_processed() -> None:
|
||||
"""
|
||||
Test Backtesting.backtest() method with offline data
|
||||
"""
|
||||
backtesting = _BACKTESTING
|
||||
|
||||
dict_of_tickerrows = load_data_test('raise')
|
||||
dataframes = backtesting.tickerdata_to_dataframe(dict_of_tickerrows)
|
||||
dataframe = dataframes['BTC_UNITEST']
|
||||
cols = dataframe.columns
|
||||
# assert the dataframe got some of the indicator columns
|
||||
for col in ['close', 'high', 'low', 'open', 'date',
|
||||
'ema50', 'ao', 'macd', 'plus_dm']:
|
||||
assert col in cols
|
||||
|
||||
|
||||
def test_backtest_pricecontours(default_conf) -> None:
|
||||
tests = [['raise', 17], ['lower', 0], ['sine', 17]]
|
||||
for [contour, numres] in tests:
|
||||
simple_backtest(default_conf, contour, numres)
|
||||
|
@ -5,10 +5,11 @@ import json
|
||||
import logging
|
||||
import uuid
|
||||
from shutil import copyfile
|
||||
from freqtrade import exchange, optimize
|
||||
from freqtrade.exchange import Bittrex
|
||||
from freqtrade import optimize
|
||||
from freqtrade.analyze import Analyze
|
||||
from freqtrade.optimize.__init__ import make_testdata_path, download_pairs,\
|
||||
download_backtesting_testdata, load_tickerdata_file, trim_tickerlist, file_dump_json
|
||||
download_backtesting_testdata, load_tickerdata_file, trim_tickerlist
|
||||
from freqtrade.misc import file_dump_json
|
||||
|
||||
# Change this if modifying BTC_UNITEST testdatafile
|
||||
_BTC_UNITTEST_LENGTH = 13681
|
||||
@ -45,12 +46,11 @@ def _clean_test_file(file: str) -> None:
|
||||
os.rename(file_swp, file)
|
||||
|
||||
|
||||
def test_load_data_30min_ticker(default_conf, ticker_history, mocker, caplog):
|
||||
caplog.set_level(logging.INFO)
|
||||
def test_load_data_30min_ticker(ticker_history, mocker, caplog) -> None:
|
||||
"""
|
||||
Test load_data() with 30 min ticker
|
||||
"""
|
||||
mocker.patch('freqtrade.optimize.get_ticker_history', return_value=ticker_history)
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
file = 'freqtrade/tests/testdata/BTC_UNITTEST-30.json'
|
||||
_backup_file(file, copy_file=True)
|
||||
@ -62,12 +62,11 @@ def test_load_data_30min_ticker(default_conf, ticker_history, mocker, caplog):
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_load_data_5min_ticker(default_conf, ticker_history, mocker, caplog):
|
||||
caplog.set_level(logging.INFO)
|
||||
def test_load_data_5min_ticker(ticker_history, mocker, caplog) -> None:
|
||||
"""
|
||||
Test load_data() with 5 min ticker
|
||||
"""
|
||||
mocker.patch('freqtrade.optimize.get_ticker_history', return_value=ticker_history)
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
file = 'freqtrade/tests/testdata/BTC_ETH-5.json'
|
||||
_backup_file(file, copy_file=True)
|
||||
@ -79,12 +78,11 @@ def test_load_data_5min_ticker(default_conf, ticker_history, mocker, caplog):
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_load_data_1min_ticker(default_conf, ticker_history, mocker, caplog):
|
||||
caplog.set_level(logging.INFO)
|
||||
def test_load_data_1min_ticker(ticker_history, mocker, caplog) -> None:
|
||||
"""
|
||||
Test load_data() with 1 min ticker
|
||||
"""
|
||||
mocker.patch('freqtrade.optimize.get_ticker_history', return_value=ticker_history)
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
file = 'freqtrade/tests/testdata/BTC_ETH-1.json'
|
||||
_backup_file(file, copy_file=True)
|
||||
@ -96,12 +94,11 @@ def test_load_data_1min_ticker(default_conf, ticker_history, mocker, caplog):
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_load_data_with_new_pair_1min(default_conf, ticker_history, mocker, caplog):
|
||||
caplog.set_level(logging.INFO)
|
||||
def test_load_data_with_new_pair_1min(ticker_history, mocker, caplog) -> None:
|
||||
"""
|
||||
Test load_data() with 1 min ticker
|
||||
"""
|
||||
mocker.patch('freqtrade.optimize.get_ticker_history', return_value=ticker_history)
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
file = 'freqtrade/tests/testdata/BTC_MEME-1.json'
|
||||
_backup_file(file)
|
||||
@ -113,14 +110,12 @@ def test_load_data_with_new_pair_1min(default_conf, ticker_history, mocker, capl
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_testdata_path():
|
||||
def test_testdata_path() -> None:
|
||||
assert os.path.join('freqtrade', 'tests', 'testdata') in make_testdata_path(None)
|
||||
|
||||
|
||||
def test_download_pairs(default_conf, ticker_history, mocker):
|
||||
def test_download_pairs(ticker_history, mocker) -> None:
|
||||
mocker.patch('freqtrade.optimize.__init__.get_ticker_history', return_value=ticker_history)
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
file1_1 = 'freqtrade/tests/testdata/BTC_MEME-1.json'
|
||||
file1_5 = 'freqtrade/tests/testdata/BTC_MEME-5.json'
|
||||
@ -157,13 +152,10 @@ def test_download_pairs(default_conf, ticker_history, mocker):
|
||||
_clean_test_file(file2_5)
|
||||
|
||||
|
||||
def test_download_pairs_exception(default_conf, ticker_history, mocker, caplog):
|
||||
caplog.set_level(logging.INFO)
|
||||
def test_download_pairs_exception(ticker_history, mocker, caplog) -> None:
|
||||
mocker.patch('freqtrade.optimize.__init__.get_ticker_history', return_value=ticker_history)
|
||||
mocker.patch('freqtrade.optimize.__init__.download_backtesting_testdata',
|
||||
side_effect=BaseException('File Error'))
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
file1_1 = 'freqtrade/tests/testdata/BTC_MEME-1.json'
|
||||
file1_5 = 'freqtrade/tests/testdata/BTC_MEME-5.json'
|
||||
@ -179,10 +171,8 @@ def test_download_pairs_exception(default_conf, ticker_history, mocker, caplog):
|
||||
'Failed to download the pair: "BTC-MEME", Interval: 1 min') in caplog.record_tuples
|
||||
|
||||
|
||||
def test_download_backtesting_testdata(default_conf, ticker_history, mocker):
|
||||
def test_download_backtesting_testdata(ticker_history, mocker) -> None:
|
||||
mocker.patch('freqtrade.optimize.__init__.get_ticker_history', return_value=ticker_history)
|
||||
mocker.patch.dict('freqtrade.main._CONF', default_conf)
|
||||
exchange._API = Bittrex({'key': '', 'secret': ''})
|
||||
|
||||
# Download a 1 min ticker file
|
||||
file1 = 'freqtrade/tests/testdata/BTC_XEL-1.json'
|
||||
@ -200,7 +190,7 @@ def test_download_backtesting_testdata(default_conf, ticker_history, mocker):
|
||||
_clean_test_file(file2)
|
||||
|
||||
|
||||
def test_download_backtesting_testdata2(mocker):
|
||||
def test_download_backtesting_testdata2(mocker) -> None:
|
||||
tick = [{'T': 'bar'}, {'T': 'foo'}]
|
||||
mocker.patch('freqtrade.misc.file_dump_json', return_value=None)
|
||||
mocker.patch('freqtrade.optimize.__init__.get_ticker_history', return_value=tick)
|
||||
@ -208,7 +198,7 @@ def test_download_backtesting_testdata2(mocker):
|
||||
assert download_backtesting_testdata(None, pair="BTC-UNITEST", interval=3)
|
||||
|
||||
|
||||
def test_load_tickerdata_file():
|
||||
def test_load_tickerdata_file() -> None:
|
||||
# 7 does not exist in either format.
|
||||
assert not load_tickerdata_file(None, 'BTC_UNITEST', 7)
|
||||
# 1 exists only as a .json
|
||||
@ -219,22 +209,28 @@ def test_load_tickerdata_file():
|
||||
assert _BTC_UNITTEST_LENGTH == len(tickerdata)
|
||||
|
||||
|
||||
def test_init(default_conf, mocker):
|
||||
def test_init(default_conf, mocker) -> None:
|
||||
conf = {'exchange': {'pair_whitelist': []}}
|
||||
mocker.patch('freqtrade.optimize.hyperopt_optimize_conf', return_value=conf)
|
||||
assert {} == optimize.load_data('', pairs=[], refresh_pairs=True,
|
||||
ticker_interval=int(default_conf['ticker_interval']))
|
||||
assert {} == optimize.load_data(
|
||||
'',
|
||||
pairs=[],
|
||||
refresh_pairs=True,
|
||||
ticker_interval=int(default_conf['ticker_interval'])
|
||||
)
|
||||
|
||||
|
||||
def test_tickerdata_to_dataframe():
|
||||
def test_tickerdata_to_dataframe(default_conf) -> None:
|
||||
analyze = Analyze(default_conf)
|
||||
|
||||
timerange = ((None, 'line'), None, -100)
|
||||
tick = load_tickerdata_file(None, 'BTC_UNITEST', 1, timerange=timerange)
|
||||
tickerlist = {'BTC_UNITEST': tick}
|
||||
data = optimize.tickerdata_to_dataframe(tickerlist)
|
||||
data = analyze.tickerdata_to_dataframe(tickerlist)
|
||||
assert len(data['BTC_UNITEST']) == 100
|
||||
|
||||
|
||||
def test_trim_tickerlist():
|
||||
def test_trim_tickerlist() -> None:
|
||||
with open('freqtrade/tests/testdata/BTC_ETH-1.json') as data_file:
|
||||
ticker_list = json.load(data_file)
|
||||
ticker_list_len = len(ticker_list)
|
||||
@ -279,8 +275,11 @@ def test_trim_tickerlist():
|
||||
assert ticker_list_len == ticker_len
|
||||
|
||||
|
||||
def test_file_dump_json():
|
||||
|
||||
def test_file_dump_json() -> None:
|
||||
"""
|
||||
Test file_dump_json()
|
||||
:return: None
|
||||
"""
|
||||
file = 'freqtrade/tests/testdata/test_{id}.json'.format(id=str(uuid.uuid4()))
|
||||
data = {'bar': 'foo'}
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
# pragma pylint: disable=protected-access, invalid-name, missing-docstring
|
||||
|
||||
# pragma pylint: disable=protected-access, invalid-name
|
||||
"""
|
||||
Unit test file for configuration.py
|
||||
"""
|
||||
@ -18,7 +17,6 @@ import freqtrade.tests.conftest as tt # test tools
|
||||
def test_configuration_object() -> None:
|
||||
"""
|
||||
Test the Constants object has the mandatory Constants
|
||||
:return: None
|
||||
"""
|
||||
assert hasattr(Configuration, '_load_config')
|
||||
assert hasattr(Configuration, '_load_config_file')
|
||||
@ -30,12 +28,11 @@ def test_configuration_object() -> None:
|
||||
def test_load_config_invalid_pair(default_conf, mocker) -> None:
|
||||
"""
|
||||
Test the configuration validator with an invalid PAIR format
|
||||
:param default_conf: Configuration already read from a file (JSON format)
|
||||
:return: None
|
||||
"""
|
||||
mocker.patch.multiple(
|
||||
'freqtrade.configuration.Configuration',
|
||||
_load_config=MagicMock(return_value=[])
|
||||
_load_config=MagicMock(return_value=[]),
|
||||
show_info=MagicMock
|
||||
)
|
||||
conf = deepcopy(default_conf)
|
||||
conf['exchange']['pair_whitelist'].append('BTC-ETH')
|
||||
@ -48,12 +45,11 @@ def test_load_config_invalid_pair(default_conf, mocker) -> None:
|
||||
def test_load_config_missing_attributes(default_conf, mocker) -> None:
|
||||
"""
|
||||
Test the configuration validator with a missing attribute
|
||||
:param default_conf: Configuration already read from a file (JSON format)
|
||||
:return: None
|
||||
"""
|
||||
mocker.patch.multiple(
|
||||
'freqtrade.configuration.Configuration',
|
||||
_load_config=MagicMock(return_value=[])
|
||||
_load_config=MagicMock(return_value=[]),
|
||||
show_info=MagicMock
|
||||
)
|
||||
conf = deepcopy(default_conf)
|
||||
conf.pop('exchange')
|
||||
@ -65,12 +61,12 @@ def test_load_config_missing_attributes(default_conf, mocker) -> None:
|
||||
|
||||
def test_load_config_file(default_conf, mocker, caplog) -> None:
|
||||
"""
|
||||
Test _load_config_file() method
|
||||
:return:
|
||||
Test Configuration._load_config_file() method
|
||||
"""
|
||||
mocker.patch.multiple(
|
||||
'freqtrade.configuration.Configuration',
|
||||
_load_config=MagicMock(return_value=[])
|
||||
_load_config=MagicMock(return_value=[]),
|
||||
show_info=MagicMock
|
||||
)
|
||||
file_mock = mocker.patch('freqtrade.configuration.open', mocker.mock_open(
|
||||
read_data=json.dumps(default_conf)
|
||||
@ -85,6 +81,9 @@ def test_load_config_file(default_conf, mocker, caplog) -> None:
|
||||
|
||||
|
||||
def test_load_config(default_conf, mocker) -> None:
|
||||
"""
|
||||
Test Configuration._load_config() without any cli params
|
||||
"""
|
||||
mocker.patch('freqtrade.configuration.open', mocker.mock_open(
|
||||
read_data=json.dumps(default_conf)
|
||||
))
|
||||
@ -100,6 +99,9 @@ def test_load_config(default_conf, mocker) -> None:
|
||||
|
||||
|
||||
def test_load_config_with_params(default_conf, mocker) -> None:
|
||||
"""
|
||||
Test Configuration._load_config() with cli params used
|
||||
"""
|
||||
mocker.patch('freqtrade.configuration.open', mocker.mock_open(
|
||||
read_data=json.dumps(default_conf)
|
||||
))
|
||||
@ -123,6 +125,9 @@ def test_load_config_with_params(default_conf, mocker) -> None:
|
||||
|
||||
|
||||
def test_show_info(default_conf, mocker, caplog) -> None:
|
||||
"""
|
||||
Test Configuration.show_info()
|
||||
"""
|
||||
mocker.patch('freqtrade.configuration.open', mocker.mock_open(
|
||||
read_data=json.dumps(default_conf)
|
||||
))
|
||||
@ -155,3 +160,118 @@ def test_show_info(default_conf, mocker, caplog) -> None:
|
||||
'Dry run is disabled. (--dry_run_db ignored)',
|
||||
caplog.record_tuples
|
||||
)
|
||||
|
||||
|
||||
def test_setup_configuration_without_arguments(mocker, default_conf, caplog) -> None:
|
||||
"""
|
||||
Test setup_configuration() function
|
||||
"""
|
||||
mocker.patch('freqtrade.configuration.Configuration.show_info', MagicMock)
|
||||
mocker.patch('freqtrade.configuration.open', mocker.mock_open(
|
||||
read_data=json.dumps(default_conf)
|
||||
))
|
||||
|
||||
args = [
|
||||
'--config', 'config.json',
|
||||
'--strategy', 'default_strategy',
|
||||
'backtesting'
|
||||
]
|
||||
|
||||
args = Arguments(args, '').get_parsed_arg()
|
||||
|
||||
configuration = Configuration(args)
|
||||
config = configuration.get_config()
|
||||
assert 'max_open_trades' in config
|
||||
assert 'stake_currency' in config
|
||||
assert 'stake_amount' in config
|
||||
assert 'exchange' in config
|
||||
assert 'pair_whitelist' in config['exchange']
|
||||
assert 'datadir' in config
|
||||
assert tt.log_has(
|
||||
'Parameter --datadir detected: {} ...'.format(config['datadir']),
|
||||
caplog.record_tuples
|
||||
)
|
||||
assert 'ticker_interval' in config
|
||||
assert not tt.log_has('Parameter -i/--ticker-interval detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'live' not in config
|
||||
assert not tt.log_has('Parameter -l/--live detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'realistic_simulation' not in config
|
||||
assert not tt.log_has('Parameter --realistic-simulation detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'refresh_pairs' not in config
|
||||
assert not tt.log_has('Parameter -r/--refresh-pairs-cached detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'timerange' not in config
|
||||
assert 'export' not in config
|
||||
|
||||
|
||||
def test_setup_configuration_with_arguments(mocker, default_conf, caplog) -> None:
|
||||
"""
|
||||
Test setup_configuration() function
|
||||
"""
|
||||
mocker.patch('freqtrade.configuration.Configuration.show_info', MagicMock)
|
||||
mocker.patch('freqtrade.configuration.open', mocker.mock_open(
|
||||
read_data=json.dumps(default_conf)
|
||||
))
|
||||
|
||||
args = [
|
||||
'--config', 'config.json',
|
||||
'--strategy', 'default_strategy',
|
||||
'--datadir', '/foo/bar',
|
||||
'backtesting',
|
||||
'--ticker-interval', '1',
|
||||
'--live',
|
||||
'--realistic-simulation',
|
||||
'--refresh-pairs-cached',
|
||||
'--timerange', ':100',
|
||||
'--export', '/bar/foo'
|
||||
]
|
||||
|
||||
args = Arguments(args, '').get_parsed_arg()
|
||||
|
||||
configuration = Configuration(args)
|
||||
config = configuration.get_config()
|
||||
assert 'max_open_trades' in config
|
||||
assert 'stake_currency' in config
|
||||
assert 'stake_amount' in config
|
||||
assert 'exchange' in config
|
||||
assert 'pair_whitelist' in config['exchange']
|
||||
assert 'datadir' in config
|
||||
assert tt.log_has(
|
||||
'Parameter --datadir detected: {} ...'.format(config['datadir']),
|
||||
caplog.record_tuples
|
||||
)
|
||||
assert 'ticker_interval' in config
|
||||
assert tt.log_has('Parameter -i/--ticker-interval detected ...', caplog.record_tuples)
|
||||
assert tt.log_has(
|
||||
'Using ticker_interval: 1 ...',
|
||||
caplog.record_tuples
|
||||
)
|
||||
|
||||
assert 'live' in config
|
||||
assert tt.log_has('Parameter -l/--live detected ...', caplog.record_tuples)
|
||||
|
||||
assert 'realistic_simulation'in config
|
||||
assert tt.log_has('Parameter --realistic-simulation detected ...', caplog.record_tuples)
|
||||
assert tt.log_has('Using max_open_trades: 1 ...', caplog.record_tuples)
|
||||
|
||||
assert 'refresh_pairs'in config
|
||||
assert tt.log_has('Parameter -r/--refresh-pairs-cached detected ...', caplog.record_tuples)
|
||||
|
||||
import pprint
|
||||
pprint.pprint(caplog.record_tuples)
|
||||
pprint.pprint(config['timerange'])
|
||||
assert 'timerange' in config
|
||||
assert tt.log_has(
|
||||
'Parameter --timerange detected: {} ...'.format(config['timerange']),
|
||||
caplog.record_tuples
|
||||
)
|
||||
|
||||
assert 'export' in config
|
||||
assert tt.log_has(
|
||||
'Parameter --export detected: {} ...'.format(config['export']),
|
||||
caplog.record_tuples
|
||||
)
|
||||
|
||||
|
@ -26,11 +26,12 @@ def test_logger_object() -> None:
|
||||
|
||||
def test_get_logger() -> None:
|
||||
"""
|
||||
Test logger.get_logger()
|
||||
Test Logger.get_logger() and Logger._init_logger()
|
||||
:return: None
|
||||
"""
|
||||
logger = Logger(name='Foo', level=logging.WARNING)
|
||||
logger = Logger(name='get_logger', level=logging.WARNING)
|
||||
get_logger = logger.get_logger()
|
||||
assert logger.logger is get_logger
|
||||
assert get_logger is not None
|
||||
assert hasattr(get_logger, 'debug')
|
||||
assert hasattr(get_logger, 'info')
|
||||
@ -39,15 +40,57 @@ def test_get_logger() -> None:
|
||||
assert hasattr(get_logger, 'exception')
|
||||
|
||||
|
||||
def test_set_name() -> None:
|
||||
"""
|
||||
Test Logger.set_name()
|
||||
:return: None
|
||||
"""
|
||||
logger = Logger(name='set_name')
|
||||
assert logger.name == 'set_name'
|
||||
|
||||
logger.set_name('set_name_new')
|
||||
assert logger.name == 'set_name_new'
|
||||
|
||||
|
||||
def test_set_level() -> None:
|
||||
"""
|
||||
Test Logger.set_name()
|
||||
:return: None
|
||||
"""
|
||||
logger = Logger(name='Foo', level=logging.WARNING)
|
||||
assert logger.level == logging.WARNING
|
||||
assert logger.get_logger().level == logging.WARNING
|
||||
|
||||
logger.set_level(logging.INFO)
|
||||
assert logger.level == logging.INFO
|
||||
assert logger.get_logger().level == logging.INFO
|
||||
|
||||
|
||||
def test_sending_msg(caplog) -> None:
|
||||
"""
|
||||
Test send a logging message
|
||||
:return: None
|
||||
"""
|
||||
logger = Logger(name='FooBar', level=logging.WARNING).get_logger()
|
||||
logger = Logger(name='sending_msg', level=logging.WARNING).get_logger()
|
||||
|
||||
logger.info('I am an INFO message')
|
||||
assert('FooBar', logging.INFO, 'I am an INFO message') not in caplog.record_tuples
|
||||
assert('sending_msg', logging.INFO, 'I am an INFO message') not in caplog.record_tuples
|
||||
|
||||
logger.warning('I am an WARNING message')
|
||||
assert ('FooBar', logging.WARNING, 'I am an WARNING message') in caplog.record_tuples
|
||||
assert ('sending_msg', logging.WARNING, 'I am an WARNING message') in caplog.record_tuples
|
||||
|
||||
|
||||
def test_set_format(caplog) -> None:
|
||||
"""
|
||||
Test Logger.set_format()
|
||||
:return: None
|
||||
"""
|
||||
log = Logger(name='set_format')
|
||||
logger = log.get_logger()
|
||||
|
||||
logger.info('I am the first message')
|
||||
assert ('set_format', logging.INFO, 'I am the first message') in caplog.record_tuples
|
||||
|
||||
log.set_format(log_format='%(message)s', propagate=True)
|
||||
logger.info('I am the second message')
|
||||
assert ('set_format', logging.INFO, 'I am the second message') in caplog.record_tuples
|
||||
|
Loading…
Reference in New Issue
Block a user