Merge pull request #2430 from freqtrade/startup_period_bt

Add Startup period for strategies
This commit is contained in:
hroff-1902 2019-10-28 23:33:30 +03:00 committed by GitHub
commit 5254059fe4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 321 additions and 77 deletions

View File

@ -72,6 +72,8 @@ The exported trades can be used for [further analysis](#further-backtest-result-
freqtrade backtesting --export trades --export-filename=backtest_samplestrategy.json freqtrade backtesting --export trades --export-filename=backtest_samplestrategy.json
``` ```
Please also read about the [strategy startup period](strategy-customization.md#strategy-startup-period).
#### Supplying custom fee value #### Supplying custom fee value
Sometimes your account has certain fee rebates (fee reductions starting with a certain account size or monthly volume), which are not visible to ccxt. Sometimes your account has certain fee rebates (fee reductions starting with a certain account size or monthly volume), which are not visible to ccxt.

View File

@ -117,6 +117,37 @@ def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame
Look into the [user_data/strategies/sample_strategy.py](https://github.com/freqtrade/freqtrade/blob/develop/user_data/strategies/sample_strategy.py). Look into the [user_data/strategies/sample_strategy.py](https://github.com/freqtrade/freqtrade/blob/develop/user_data/strategies/sample_strategy.py).
Then uncomment indicators you need. Then uncomment indicators you need.
### Strategy startup period
Most indicators have an instable startup period, in which they are either not available, or the calculation is incorrect. This can lead to inconsistencies, since Freqtrade does not know how long this instable period should be.
To account for this, the strategy can be assigned the `startup_candle_count` attribute.
This should be set to the maximum number of candles that the strategy requires to calculate stable indicators.
In this example strategy, this should be set to 100 (`startup_candle_count = 100`), since the longest needed history is 100 candles.
``` python
dataframe['ema100'] = ta.EMA(dataframe, timeperiod=100)
```
By letting the bot know how much history is needed, backtest trades can start at the specified timerange during backtesting and hyperopt.
!!! Warning
`startup_candle_count` should be below `ohlcv_candle_limit` (which is 500 for most exchanges) - since only this amount of candles will be available during Dry-Run/Live Trade operations.
#### Example
Let's try to backtest 1 month (January 2019) of 5m candles using the an example strategy with EMA100, as above.
``` bash
freqtrade backtesting --timerange 20190101-20190201 --ticker-interval 5m
```
Assuming `startup_candle_count` is set to 100, backtesting knows it needs 100 candles to generate valid buy signals. It will load data from `20190101 - (100 * 5m)` - which is ~2019-12-31 15:30:00.
If this data is available, indicators will be calculated with this extended timerange. The instable startup period (up to 2019-01-01 00:00:00) will then be removed before starting backtesting.
!!! Note
If data for the startup period is not available, then the timerange will be adjusted to account for this startup period - so Backtesting would start at 2019-01-01 08:30:00.
### Buy signal rules ### Buy signal rules
Edit the method `populate_buy_trend()` in your strategy file to update your buy strategy. Edit the method `populate_buy_trend()` in your strategy file to update your buy strategy.

View File

@ -1,11 +1,14 @@
""" """
This module contains the argument manager class This module contains the argument manager class
""" """
import logging
import re import re
from typing import Optional from typing import Optional
import arrow import arrow
logger = logging.getLogger(__name__)
class TimeRange: class TimeRange:
""" """
@ -27,6 +30,34 @@ class TimeRange:
return (self.starttype == other.starttype and self.stoptype == other.stoptype return (self.starttype == other.starttype and self.stoptype == other.stoptype
and self.startts == other.startts and self.stopts == other.stopts) and self.startts == other.startts and self.stopts == other.stopts)
def subtract_start(self, seconds) -> None:
"""
Subtracts <seconds> from startts if startts is set.
:param seconds: Seconds to subtract from starttime
:return: None (Modifies the object in place)
"""
if self.startts:
self.startts = self.startts - seconds
def adjust_start_if_necessary(self, ticker_interval_secs: int, startup_candles: int,
min_date: arrow.Arrow) -> None:
"""
Adjust startts by <startup_candles> candles.
Applies only if no startup-candles have been available.
:param ticker_interval_secs: Ticker interval in seconds e.g. `timeframe_to_seconds('5m')`
:param startup_candles: Number of candles to move start-date forward
:param min_date: Minimum data date loaded. Key kriterium to decide if start-time
has to be moved
:return: None (Modifies the object in place)
"""
if (not self.starttype or (startup_candles
and min_date.timestamp >= self.startts)):
# If no startts was defined, or backtest-data starts at the defined backtest-date
logger.warning("Moving start-date by %s candles to account for startup time.",
startup_candles)
self.startts = (min_date.timestamp + ticker_interval_secs * startup_candles)
self.starttype = 'date'
@staticmethod @staticmethod
def parse_timerange(text: Optional[str]): def parse_timerange(text: Optional[str]):
""" """

View File

@ -8,17 +8,19 @@ Includes:
import logging import logging
import operator import operator
from copy import deepcopy
from datetime import datetime from datetime import datetime
from pathlib import Path from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple from typing import Any, Dict, List, Optional, Tuple
import arrow import arrow
import pytz
from pandas import DataFrame from pandas import DataFrame
from freqtrade import OperationalException, misc from freqtrade import OperationalException, misc
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.data.converter import parse_ticker_dataframe, trades_to_ohlcv from freqtrade.data.converter import parse_ticker_dataframe, trades_to_ohlcv
from freqtrade.exchange import Exchange, timeframe_to_minutes from freqtrade.exchange import Exchange, timeframe_to_minutes, timeframe_to_seconds
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -49,6 +51,19 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
return tickerlist[start_index:stop_index] return tickerlist[start_index:stop_index]
def trim_dataframe(df: DataFrame, timerange: TimeRange) -> DataFrame:
"""
Trim dataframe based on given timerange
"""
if timerange.starttype == 'date':
start = datetime.fromtimestamp(timerange.startts, tz=pytz.utc)
df = df.loc[df['date'] >= start, :]
if timerange.stoptype == 'date':
stop = datetime.fromtimestamp(timerange.stopts, tz=pytz.utc)
df = df.loc[df['date'] <= stop, :]
return df
def load_tickerdata_file(datadir: Path, pair: str, ticker_interval: str, def load_tickerdata_file(datadir: Path, pair: str, ticker_interval: str,
timerange: Optional[TimeRange] = None) -> Optional[list]: timerange: Optional[TimeRange] = None) -> Optional[list]:
""" """
@ -113,7 +128,8 @@ def load_pair_history(pair: str,
refresh_pairs: bool = False, refresh_pairs: bool = False,
exchange: Optional[Exchange] = None, exchange: Optional[Exchange] = None,
fill_up_missing: bool = True, fill_up_missing: bool = True,
drop_incomplete: bool = True drop_incomplete: bool = True,
startup_candles: int = 0,
) -> DataFrame: ) -> DataFrame:
""" """
Loads cached ticker history for the given pair. Loads cached ticker history for the given pair.
@ -126,9 +142,15 @@ def load_pair_history(pair: str,
:param exchange: Exchange object (needed when using "refresh_pairs") :param exchange: Exchange object (needed when using "refresh_pairs")
:param fill_up_missing: Fill missing values with "No action"-candles :param fill_up_missing: Fill missing values with "No action"-candles
:param drop_incomplete: Drop last candle assuming it may be incomplete. :param drop_incomplete: Drop last candle assuming it may be incomplete.
:param startup_candles: Additional candles to load at the start of the period
:return: DataFrame with ohlcv data :return: DataFrame with ohlcv data
""" """
timerange_startup = deepcopy(timerange)
if startup_candles > 0 and timerange_startup:
logger.info('Using indicator startup period: %s ...', startup_candles)
timerange_startup.subtract_start(timeframe_to_seconds(ticker_interval) * startup_candles)
# The user forced the refresh of pairs # The user forced the refresh of pairs
if refresh_pairs: if refresh_pairs:
download_pair_history(datadir=datadir, download_pair_history(datadir=datadir,
@ -137,11 +159,11 @@ def load_pair_history(pair: str,
ticker_interval=ticker_interval, ticker_interval=ticker_interval,
timerange=timerange) timerange=timerange)
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange) pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange_startup)
if pairdata: if pairdata:
if timerange: if timerange_startup:
_validate_pairdata(pair, pairdata, timerange) _validate_pairdata(pair, pairdata, timerange_startup)
return parse_ticker_dataframe(pairdata, ticker_interval, pair=pair, return parse_ticker_dataframe(pairdata, ticker_interval, pair=pair,
fill_missing=fill_up_missing, fill_missing=fill_up_missing,
drop_incomplete=drop_incomplete) drop_incomplete=drop_incomplete)
@ -160,10 +182,22 @@ def load_data(datadir: Path,
exchange: Optional[Exchange] = None, exchange: Optional[Exchange] = None,
timerange: Optional[TimeRange] = None, timerange: Optional[TimeRange] = None,
fill_up_missing: bool = True, fill_up_missing: bool = True,
startup_candles: int = 0,
fail_without_data: bool = False
) -> Dict[str, DataFrame]: ) -> Dict[str, DataFrame]:
""" """
Loads ticker history data for a list of pairs Loads ticker history data for a list of pairs
:return: dict(<pair>:<tickerlist>) :param datadir: Path to the data storage location.
:param ticker_interval: Ticker-interval (e.g. "5m")
:param pairs: List of pairs to load
:param refresh_pairs: Refresh pairs from exchange.
(Note: Requires exchange to be passed as well.)
:param exchange: Exchange object (needed when using "refresh_pairs")
:param timerange: Limit data to be loaded to this timerange
:param fill_up_missing: Fill missing values with "No action"-candles
:param startup_candles: Additional candles to load at the start of the period
:param fail_without_data: Raise OperationalException if no data is found.
:return: dict(<pair>:<Dataframe>)
TODO: refresh_pairs is still used by edge to keep the data uptodate. TODO: refresh_pairs is still used by edge to keep the data uptodate.
This should be replaced in the future. Instead, writing the current candles to disk This should be replaced in the future. Instead, writing the current candles to disk
from dataprovider should be implemented, as this would avoid loading ohlcv data twice. from dataprovider should be implemented, as this would avoid loading ohlcv data twice.
@ -176,9 +210,13 @@ def load_data(datadir: Path,
datadir=datadir, timerange=timerange, datadir=datadir, timerange=timerange,
refresh_pairs=refresh_pairs, refresh_pairs=refresh_pairs,
exchange=exchange, exchange=exchange,
fill_up_missing=fill_up_missing) fill_up_missing=fill_up_missing,
startup_candles=startup_candles)
if hist is not None: if hist is not None:
result[pair] = hist result[pair] = hist
if fail_without_data and not result:
raise OperationalException("No data found. Terminating.")
return result return result

View File

@ -100,7 +100,8 @@ class Edge:
ticker_interval=self.strategy.ticker_interval, ticker_interval=self.strategy.ticker_interval,
refresh_pairs=self._refresh_pairs, refresh_pairs=self._refresh_pairs,
exchange=self.exchange, exchange=self.exchange,
timerange=self._timerange timerange=self._timerange,
startup_candles=self.strategy.startup_candle_count,
) )
if not data: if not data:

View File

@ -228,6 +228,7 @@ class Exchange:
self.validate_pairs(config['exchange']['pair_whitelist']) self.validate_pairs(config['exchange']['pair_whitelist'])
self.validate_ordertypes(config.get('order_types', {})) self.validate_ordertypes(config.get('order_types', {}))
self.validate_order_time_in_force(config.get('order_time_in_force', {})) self.validate_order_time_in_force(config.get('order_time_in_force', {}))
self.validate_required_startup_candles(config.get('startup_candle_count', 0))
# Converts the interval provided in minutes in config to seconds # Converts the interval provided in minutes in config to seconds
self.markets_refresh_interval: int = exchange_config.get( self.markets_refresh_interval: int = exchange_config.get(
@ -443,6 +444,16 @@ class Exchange:
raise OperationalException( raise OperationalException(
f'Time in force policies are not supported for {self.name} yet.') f'Time in force policies are not supported for {self.name} yet.')
def validate_required_startup_candles(self, startup_candles) -> None:
"""
Checks if required startup_candles is more than ohlcv_candle_limit.
Requires a grace-period of 5 candles - so a startup-period up to 494 is allowed by default.
"""
if startup_candles + 5 > self._ft_has['ohlcv_candle_limit']:
raise OperationalException(
f"This strategy requires {startup_candles} candles to start. "
f"{self.name} only provides {self._ft_has['ohlcv_candle_limit']}.")
def exchange_has(self, endpoint: str) -> bool: def exchange_has(self, endpoint: str) -> bool:
""" """
Checks if exchange implements a specific API endpoint. Checks if exchange implements a specific API endpoint.

View File

@ -15,7 +15,7 @@ from freqtrade import OperationalException
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
from freqtrade.data import history from freqtrade.data import history
from freqtrade.data.dataprovider import DataProvider from freqtrade.data.dataprovider import DataProvider
from freqtrade.exchange import timeframe_to_minutes from freqtrade.exchange import timeframe_to_minutes, timeframe_to_seconds
from freqtrade.misc import file_dump_json from freqtrade.misc import file_dump_json
from freqtrade.persistence import Trade from freqtrade.persistence import Trade
from freqtrade.resolvers import ExchangeResolver, StrategyResolver from freqtrade.resolvers import ExchangeResolver, StrategyResolver
@ -90,6 +90,8 @@ class Backtesting:
self.ticker_interval = str(self.config.get('ticker_interval')) self.ticker_interval = str(self.config.get('ticker_interval'))
self.ticker_interval_mins = timeframe_to_minutes(self.ticker_interval) self.ticker_interval_mins = timeframe_to_minutes(self.ticker_interval)
# Get maximum required startup period
self.required_startup = max([strat.startup_candle_count for strat in self.strategylist])
# Load one (first) strategy # Load one (first) strategy
self._set_strategy(self.strategylist[0]) self._set_strategy(self.strategylist[0])
@ -103,6 +105,31 @@ class Backtesting:
# And the regular "stoploss" function would not apply to that case # And the regular "stoploss" function would not apply to that case
self.strategy.order_types['stoploss_on_exchange'] = False self.strategy.order_types['stoploss_on_exchange'] = False
def load_bt_data(self):
timerange = TimeRange.parse_timerange(None if self.config.get(
'timerange') is None else str(self.config.get('timerange')))
data = history.load_data(
datadir=Path(self.config['datadir']),
pairs=self.config['exchange']['pair_whitelist'],
ticker_interval=self.ticker_interval,
timerange=timerange,
startup_candles=self.required_startup,
fail_without_data=True,
)
min_date, max_date = history.get_timeframe(data)
logger.info(
'Loading data from %s up to %s (%s days)..',
min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
)
# Adjust startts forward if not enough data is available
timerange.adjust_start_if_necessary(timeframe_to_seconds(self.ticker_interval),
self.required_startup, min_date)
return data, timerange
def _generate_text_table(self, data: Dict[str, Dict], results: DataFrame, def _generate_text_table(self, data: Dict[str, Dict], results: DataFrame,
skip_nan: bool = False) -> str: skip_nan: bool = False) -> str:
""" """
@ -412,39 +439,18 @@ class Backtesting:
:return: None :return: None
""" """
data: Dict[str, Any] = {} data: Dict[str, Any] = {}
pairs = self.config['exchange']['pair_whitelist']
logger.info('Using stake_currency: %s ...', self.config['stake_currency']) logger.info('Using stake_currency: %s ...', self.config['stake_currency'])
logger.info('Using stake_amount: %s ...', self.config['stake_amount']) logger.info('Using stake_amount: %s ...', self.config['stake_amount'])
timerange = TimeRange.parse_timerange(None if self.config.get(
'timerange') is None else str(self.config.get('timerange')))
data = history.load_data(
datadir=Path(self.config['datadir']),
pairs=pairs,
ticker_interval=self.ticker_interval,
timerange=timerange,
)
if not data:
logger.critical("No data found. Terminating.")
return
# Use max_open_trades in backtesting, except --disable-max-market-positions is set # Use max_open_trades in backtesting, except --disable-max-market-positions is set
if self.config.get('use_max_market_positions', True): if self.config.get('use_max_market_positions', True):
max_open_trades = self.config['max_open_trades'] max_open_trades = self.config['max_open_trades']
else: else:
logger.info('Ignoring max_open_trades (--disable-max-market-positions was used) ...') logger.info('Ignoring max_open_trades (--disable-max-market-positions was used) ...')
max_open_trades = 0 max_open_trades = 0
data, timerange = self.load_bt_data()
all_results = {} all_results = {}
min_date, max_date = history.get_timeframe(data)
logger.info(
'Backtesting with data from %s up to %s (%s days)..',
min_date.isoformat(),
max_date.isoformat(),
(max_date - min_date).days
)
for strat in self.strategylist: for strat in self.strategylist:
logger.info("Running backtesting for Strategy %s", strat.get_strategy_name()) logger.info("Running backtesting for Strategy %s", strat.get_strategy_name())
self._set_strategy(strat) self._set_strategy(strat)
@ -452,6 +458,15 @@ class Backtesting:
# need to reprocess data every time to populate signals # need to reprocess data every time to populate signals
preprocessed = self.strategy.tickerdata_to_dataframe(data) preprocessed = self.strategy.tickerdata_to_dataframe(data)
# Trim startup period from analyzed dataframe
for pair, df in preprocessed.items():
preprocessed[pair] = history.trim_dataframe(df, timerange)
min_date, max_date = history.get_timeframe(preprocessed)
logger.info(
'Backtesting with data from %s up to %s (%s days)..',
min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
)
# Execute backtest and print results # Execute backtest and print results
all_results[self.strategy.get_strategy_name()] = self.backtest( all_results[self.strategy.get_strategy_name()] = self.backtest(
{ {

View File

@ -22,8 +22,7 @@ from pandas import DataFrame
from skopt import Optimizer from skopt import Optimizer
from skopt.space import Dimension from skopt.space import Dimension
from freqtrade.configuration import TimeRange from freqtrade.data.history import get_timeframe, trim_dataframe
from freqtrade.data.history import load_data, get_timeframe
from freqtrade.misc import round_dict from freqtrade.misc import round_dict
from freqtrade.optimize.backtesting import Backtesting from freqtrade.optimize.backtesting import Backtesting
# Import IHyperOpt and IHyperOptLoss to allow unpickling classes from these modules # Import IHyperOpt and IHyperOptLoss to allow unpickling classes from these modules
@ -379,30 +378,19 @@ class Hyperopt:
) )
def start(self) -> None: def start(self) -> None:
timerange = TimeRange.parse_timerange(None if self.config.get( data, timerange = self.backtesting.load_bt_data()
'timerange') is None else str(self.config.get('timerange')))
data = load_data(
datadir=Path(self.config['datadir']),
pairs=self.config['exchange']['pair_whitelist'],
ticker_interval=self.backtesting.ticker_interval,
timerange=timerange
)
if not data: preprocessed = self.backtesting.strategy.tickerdata_to_dataframe(data)
logger.critical("No data found. Terminating.")
return
# Trim startup period from analyzed dataframe
for pair, df in preprocessed.items():
preprocessed[pair] = trim_dataframe(df, timerange)
min_date, max_date = get_timeframe(data) min_date, max_date = get_timeframe(data)
logger.info( logger.info(
'Hyperopting with data from %s up to %s (%s days)..', 'Hyperopting with data from %s up to %s (%s days)..',
min_date.isoformat(), min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
max_date.isoformat(),
(max_date - min_date).days
) )
preprocessed = self.backtesting.strategy.tickerdata_to_dataframe(data)
dump(preprocessed, self.tickerdata_pickle) dump(preprocessed, self.tickerdata_pickle)
# We don't need exchange instance anymore while running hyperopt # We don't need exchange instance anymore while running hyperopt

View File

@ -57,6 +57,7 @@ class StrategyResolver(IResolver):
("order_time_in_force", None, False), ("order_time_in_force", None, False),
("stake_currency", None, False), ("stake_currency", None, False),
("stake_amount", None, False), ("stake_amount", None, False),
("startup_candle_count", None, False),
("use_sell_signal", True, True), ("use_sell_signal", True, True),
("sell_profit_only", False, True), ("sell_profit_only", False, True),
("ignore_roi_if_buy_signal", False, True), ("ignore_roi_if_buy_signal", False, True),

View File

@ -39,6 +39,9 @@ class DefaultStrategy(IStrategy):
'stoploss_on_exchange': False 'stoploss_on_exchange': False
} }
# Number of candles the strategy requires before producing valid signals
startup_candle_count: int = 20
# Optional time in force for orders # Optional time in force for orders
order_time_in_force = { order_time_in_force = {
'buy': 'gtc', 'buy': 'gtc',
@ -105,9 +108,6 @@ class DefaultStrategy(IStrategy):
# EMA - Exponential Moving Average # EMA - Exponential Moving Average
dataframe['ema10'] = ta.EMA(dataframe, timeperiod=10) dataframe['ema10'] = ta.EMA(dataframe, timeperiod=10)
# SMA - Simple Moving Average
dataframe['sma'] = ta.SMA(dataframe, timeperiod=40)
return dataframe return dataframe
def populate_buy_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame: def populate_buy_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:

View File

@ -103,6 +103,9 @@ class IStrategy(ABC):
# run "populate_indicators" only for new candle # run "populate_indicators" only for new candle
process_only_new_candles: bool = False process_only_new_candles: bool = False
# Count of candles the strategy requires before producing valid signals
startup_candle_count: int = 0
# Class level variables (intentional) containing # Class level variables (intentional) containing
# the dataprovider (dp) (access to other candles, historic data, ...) # the dataprovider (dp) (access to other candles, historic data, ...)
# and wallets - access to the current balance. # and wallets - access to the current balance.
@ -421,6 +424,7 @@ class IStrategy(ABC):
def tickerdata_to_dataframe(self, tickerdata: Dict[str, List]) -> Dict[str, DataFrame]: def tickerdata_to_dataframe(self, tickerdata: Dict[str, List]) -> Dict[str, DataFrame]:
""" """
Creates a dataframe and populates indicators for given ticker data Creates a dataframe and populates indicators for given ticker data
Used by optimize operations only, not during dry / live runs.
""" """
return {pair: self.advise_indicators(pair_data, {'pair': pair}) return {pair: self.advise_indicators(pair_data, {'pair': pair})
for pair, pair_data in tickerdata.items()} for pair, pair_data in tickerdata.items()}

View File

@ -95,6 +95,23 @@ def test_load_data_1min_ticker(ticker_history, mocker, caplog, testdatadir) -> N
_clean_test_file(file) _clean_test_file(file)
def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) -> None:
ltfmock = mocker.patch('freqtrade.data.history.load_tickerdata_file',
MagicMock(return_value=None))
timerange = TimeRange('date', None, 1510639620, 0)
history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='1m',
datadir=testdatadir, timerange=timerange,
startup_candles=20,
)
assert log_has(
'Using indicator startup period: 20 ...', caplog
)
assert ltfmock.call_count == 1
assert ltfmock.call_args_list[0][1]['timerange'] != timerange
# startts is 20 minutes earlier
assert ltfmock.call_args_list[0][1]['timerange'].startts == timerange.startts - 20 * 60
def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog, def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog,
default_conf, testdatadir) -> None: default_conf, testdatadir) -> None:
""" """
@ -427,6 +444,46 @@ def test_trim_tickerlist(testdatadir) -> None:
assert not ticker assert not ticker
def test_trim_dataframe(testdatadir) -> None:
data = history.load_data(
datadir=testdatadir,
ticker_interval='1m',
pairs=['UNITTEST/BTC']
)['UNITTEST/BTC']
min_date = int(data.iloc[0]['date'].timestamp())
max_date = int(data.iloc[-1]['date'].timestamp())
data_modify = data.copy()
# Remove first 30 minutes (1800 s)
tr = TimeRange('date', None, min_date + 1800, 0)
data_modify = history.trim_dataframe(data_modify, tr)
assert not data_modify.equals(data)
assert len(data_modify) < len(data)
assert len(data_modify) == len(data) - 30
assert all(data_modify.iloc[-1] == data.iloc[-1])
assert all(data_modify.iloc[0] == data.iloc[30])
data_modify = data.copy()
# Remove last 30 minutes (1800 s)
tr = TimeRange(None, 'date', 0, max_date - 1800)
data_modify = history.trim_dataframe(data_modify, tr)
assert not data_modify.equals(data)
assert len(data_modify) < len(data)
assert len(data_modify) == len(data) - 30
assert all(data_modify.iloc[0] == data.iloc[0])
assert all(data_modify.iloc[-1] == data.iloc[-31])
data_modify = data.copy()
# Remove first 25 and last 30 minutes (1800 s)
tr = TimeRange('date', 'date', min_date + 1500, max_date - 1800)
data_modify = history.trim_dataframe(data_modify, tr)
assert not data_modify.equals(data)
assert len(data_modify) < len(data)
assert len(data_modify) == len(data) - 55
# first row matches 25th original row
assert all(data_modify.iloc[0] == data.iloc[25])
def test_file_dump_json_tofile(testdatadir) -> None: def test_file_dump_json_tofile(testdatadir) -> None:
file = testdatadir / 'test_{id}.json'.format(id=str(uuid.uuid4())) file = testdatadir / 'test_{id}.json'.format(id=str(uuid.uuid4()))
data = {'bar': 'foo'} data = {'bar': 'foo'}

View File

@ -256,7 +256,7 @@ def test_edge_heartbeat_calculate(mocker, edge_conf):
def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=False, def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=False,
timerange=None, exchange=None): timerange=None, exchange=None, *args, **kwargs):
hz = 0.1 hz = 0.1
base = 0.001 base = 0.001

View File

@ -523,6 +523,24 @@ def test_validate_order_types_not_in_config(default_conf, mocker):
Exchange(conf) Exchange(conf)
def test_validate_required_startup_candles(default_conf, mocker, caplog):
api_mock = MagicMock()
mocker.patch('freqtrade.exchange.Exchange.name', PropertyMock(return_value='Binance'))
mocker.patch('freqtrade.exchange.Exchange._init_ccxt', api_mock)
mocker.patch('freqtrade.exchange.Exchange.validate_timeframes', MagicMock())
mocker.patch('freqtrade.exchange.Exchange._load_async_markets', MagicMock())
mocker.patch('freqtrade.exchange.Exchange.validate_pairs', MagicMock())
default_conf['startup_candle_count'] = 20
ex = Exchange(default_conf)
assert ex
default_conf['startup_candle_count'] = 600
with pytest.raises(OperationalException, match=r'This strategy requires 600.*'):
Exchange(default_conf)
def test_exchange_has(default_conf, mocker): def test_exchange_has(default_conf, mocker):
exchange = get_patched_exchange(mocker, default_conf) exchange = get_patched_exchange(mocker, default_conf)
assert not exchange.exchange_has('ASDFASDF') assert not exchange.exchange_has('ASDFASDF')

View File

@ -117,7 +117,7 @@ def simple_backtest(config, contour, num_results, mocker, testdatadir) -> None:
def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=False, def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=False,
timerange=None, exchange=None, live=False): timerange=None, exchange=None, live=False, *args, **kwargs):
tickerdata = history.load_tickerdata_file(datadir, 'UNITTEST/BTC', '1m', timerange=timerange) tickerdata = history.load_tickerdata_file(datadir, 'UNITTEST/BTC', '1m', timerange=timerange)
pairdata = {'UNITTEST/BTC': parse_ticker_dataframe(tickerdata, '1m', pair="UNITTEST/BTC", pairdata = {'UNITTEST/BTC': parse_ticker_dataframe(tickerdata, '1m', pair="UNITTEST/BTC",
fill_missing=True)} fill_missing=True)}
@ -494,7 +494,7 @@ def test_backtesting_start_no_data(default_conf, mocker, caplog, testdatadir) ->
def get_timeframe(input1): def get_timeframe(input1):
return Arrow(2017, 11, 14, 21, 17), Arrow(2017, 11, 14, 22, 59) return Arrow(2017, 11, 14, 21, 17), Arrow(2017, 11, 14, 22, 59)
mocker.patch('freqtrade.data.history.load_data', MagicMock(return_value={})) mocker.patch('freqtrade.data.history.load_pair_history', MagicMock(return_value=None))
mocker.patch('freqtrade.data.history.get_timeframe', get_timeframe) mocker.patch('freqtrade.data.history.get_timeframe', get_timeframe)
mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', MagicMock()) mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', MagicMock())
patch_exchange(mocker) patch_exchange(mocker)
@ -511,10 +511,8 @@ def test_backtesting_start_no_data(default_conf, mocker, caplog, testdatadir) ->
default_conf['timerange'] = '20180101-20180102' default_conf['timerange'] = '20180101-20180102'
backtesting = Backtesting(default_conf) backtesting = Backtesting(default_conf)
with pytest.raises(OperationalException, match='No data found. Terminating.'):
backtesting.start() backtesting.start()
# check the logs, that will contain the backtest result
assert log_has('No data found. Terminating.', caplog)
def test_backtest(default_conf, fee, mocker, testdatadir) -> None: def test_backtest(default_conf, fee, mocker, testdatadir) -> None:
@ -838,6 +836,8 @@ def test_backtest_start_timerange(default_conf, mocker, caplog, testdatadir):
f'Using data directory: {testdatadir} ...', f'Using data directory: {testdatadir} ...',
'Using stake_currency: BTC ...', 'Using stake_currency: BTC ...',
'Using stake_amount: 0.001 ...', 'Using stake_amount: 0.001 ...',
'Loading data from 2017-11-14T20:57:00+00:00 '
'up to 2017-11-14T22:58:00+00:00 (0 days)..',
'Backtesting with data from 2017-11-14T21:17:00+00:00 ' 'Backtesting with data from 2017-11-14T21:17:00+00:00 '
'up to 2017-11-14T22:58:00+00:00 (0 days)..', 'up to 2017-11-14T22:58:00+00:00 (0 days)..',
'Parameter --enable-position-stacking detected ...' 'Parameter --enable-position-stacking detected ...'
@ -892,6 +892,8 @@ def test_backtest_start_multi_strat(default_conf, mocker, caplog, testdatadir):
f'Using data directory: {testdatadir} ...', f'Using data directory: {testdatadir} ...',
'Using stake_currency: BTC ...', 'Using stake_currency: BTC ...',
'Using stake_amount: 0.001 ...', 'Using stake_amount: 0.001 ...',
'Loading data from 2017-11-14T20:57:00+00:00 '
'up to 2017-11-14T22:58:00+00:00 (0 days)..',
'Backtesting with data from 2017-11-14T21:17:00+00:00 ' 'Backtesting with data from 2017-11-14T21:17:00+00:00 '
'up to 2017-11-14T22:58:00+00:00 (0 days)..', 'up to 2017-11-14T22:58:00+00:00 (0 days)..',
'Parameter --enable-position-stacking detected ...', 'Parameter --enable-position-stacking detected ...',

View File

@ -228,7 +228,7 @@ def test_start(mocker, default_conf, caplog) -> None:
def test_start_no_data(mocker, default_conf, caplog) -> None: def test_start_no_data(mocker, default_conf, caplog) -> None:
patched_configuration_load_config_file(mocker, default_conf) patched_configuration_load_config_file(mocker, default_conf)
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock(return_value={})) mocker.patch('freqtrade.data.history.load_pair_history', MagicMock(return_value=None))
mocker.patch( mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe', 'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13))) MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
@ -242,10 +242,9 @@ def test_start_no_data(mocker, default_conf, caplog) -> None:
'--epochs', '5' '--epochs', '5'
] ]
args = get_args(args) args = get_args(args)
with pytest.raises(OperationalException, match='No data found. Terminating.'):
start_hyperopt(args) start_hyperopt(args)
assert log_has('No data found. Terminating.', caplog)
def test_start_filelock(mocker, default_conf, caplog) -> None: def test_start_filelock(mocker, default_conf, caplog) -> None:
start_mock = MagicMock(side_effect=Timeout(Hyperopt.get_lock_filename(default_conf))) start_mock = MagicMock(side_effect=Timeout(Hyperopt.get_lock_filename(default_conf)))
@ -393,7 +392,8 @@ def test_roi_table_generation(hyperopt) -> None:
def test_start_calls_optimizer(mocker, default_conf, caplog, capsys) -> None: def test_start_calls_optimizer(mocker, default_conf, caplog, capsys) -> None:
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock()) dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock()) mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
MagicMock(return_value=(MagicMock(), None)))
mocker.patch( mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe', 'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13))) MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
@ -608,7 +608,8 @@ def test_continue_hyperopt(mocker, default_conf, caplog):
def test_print_json_spaces_all(mocker, default_conf, caplog, capsys) -> None: def test_print_json_spaces_all(mocker, default_conf, caplog, capsys) -> None:
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock()) dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock()) mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
MagicMock(return_value=(MagicMock(), None)))
mocker.patch( mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe', 'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13))) MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
@ -645,7 +646,8 @@ def test_print_json_spaces_all(mocker, default_conf, caplog, capsys) -> None:
def test_print_json_spaces_roi_stoploss(mocker, default_conf, caplog, capsys) -> None: def test_print_json_spaces_roi_stoploss(mocker, default_conf, caplog, capsys) -> None:
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock()) dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock()) mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
MagicMock(return_value=(MagicMock(), None)))
mocker.patch( mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe', 'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13))) MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
@ -682,7 +684,8 @@ def test_print_json_spaces_roi_stoploss(mocker, default_conf, caplog, capsys) ->
def test_simplified_interface_roi_stoploss(mocker, default_conf, caplog, capsys) -> None: def test_simplified_interface_roi_stoploss(mocker, default_conf, caplog, capsys) -> None:
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock()) dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock()) mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
MagicMock(return_value=(MagicMock(), None)))
mocker.patch( mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe', 'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13))) MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
@ -728,7 +731,8 @@ def test_simplified_interface_roi_stoploss(mocker, default_conf, caplog, capsys)
def test_simplified_interface_all_failed(mocker, default_conf, caplog, capsys) -> None: def test_simplified_interface_all_failed(mocker, default_conf, caplog, capsys) -> None:
mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock()) mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock()) mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
MagicMock(return_value=(MagicMock(), None)))
mocker.patch( mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe', 'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13))) MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
@ -757,7 +761,8 @@ def test_simplified_interface_all_failed(mocker, default_conf, caplog, capsys) -
def test_simplified_interface_buy(mocker, default_conf, caplog, capsys) -> None: def test_simplified_interface_buy(mocker, default_conf, caplog, capsys) -> None:
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock()) dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock()) mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
MagicMock(return_value=(MagicMock(), None)))
mocker.patch( mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe', 'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13))) MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
@ -802,7 +807,8 @@ def test_simplified_interface_buy(mocker, default_conf, caplog, capsys) -> None:
def test_simplified_interface_sell(mocker, default_conf, caplog, capsys) -> None: def test_simplified_interface_sell(mocker, default_conf, caplog, capsys) -> None:
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock()) dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock()) mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
MagicMock(return_value=(MagicMock(), None)))
mocker.patch( mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe', 'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13))) MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
@ -853,7 +859,8 @@ def test_simplified_interface_sell(mocker, default_conf, caplog, capsys) -> None
]) ])
def test_simplified_interface_failed(mocker, default_conf, caplog, capsys, method, space) -> None: def test_simplified_interface_failed(mocker, default_conf, caplog, capsys, method, space) -> None:
mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock()) mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
mocker.patch('freqtrade.optimize.hyperopt.load_data', MagicMock()) mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
MagicMock(return_value=(MagicMock(), None)))
mocker.patch( mocker.patch(
'freqtrade.optimize.hyperopt.get_timeframe', 'freqtrade.optimize.hyperopt.get_timeframe',
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13))) MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))

View File

@ -1,10 +1,11 @@
# pragma pylint: disable=missing-docstring, C0103 # pragma pylint: disable=missing-docstring, C0103
import arrow
import pytest import pytest
from freqtrade.configuration import TimeRange from freqtrade.configuration import TimeRange
def test_parse_timerange_incorrect() -> None: def test_parse_timerange_incorrect():
assert TimeRange('date', None, 1274486400, 0) == TimeRange.parse_timerange('20100522-') assert TimeRange('date', None, 1274486400, 0) == TimeRange.parse_timerange('20100522-')
assert TimeRange(None, 'date', 0, 1274486400) == TimeRange.parse_timerange('-20100522') assert TimeRange(None, 'date', 0, 1274486400) == TimeRange.parse_timerange('-20100522')
@ -28,3 +29,37 @@ def test_parse_timerange_incorrect() -> None:
with pytest.raises(Exception, match=r'Incorrect syntax.*'): with pytest.raises(Exception, match=r'Incorrect syntax.*'):
TimeRange.parse_timerange('-') TimeRange.parse_timerange('-')
def test_subtract_start():
x = TimeRange('date', 'date', 1274486400, 1438214400)
x.subtract_start(300)
assert x.startts == 1274486400 - 300
# Do nothing if no startdate exists
x = TimeRange(None, 'date', 0, 1438214400)
x.subtract_start(300)
assert not x.startts
x = TimeRange('date', None, 1274486400, 0)
x.subtract_start(300)
assert x.startts == 1274486400 - 300
def test_adjust_start_if_necessary():
min_date = arrow.Arrow(2017, 11, 14, 21, 15, 00)
x = TimeRange('date', 'date', 1510694100, 1510780500)
# Adjust by 20 candles - min_date == startts
x.adjust_start_if_necessary(300, 20, min_date)
assert x.startts == 1510694100 + (20 * 300)
x = TimeRange('date', 'date', 1510700100, 1510780500)
# Do nothing, startup is set and different min_date
x.adjust_start_if_necessary(300, 20, min_date)
assert x.startts == 1510694100 + (20 * 300)
x = TimeRange(None, 'date', 0, 1510780500)
# Adjust by 20 candles = 20 * 5m
x.adjust_start_if_necessary(300, 20, min_date)
assert x.startts == 1510694100 + (20 * 300)

View File

@ -59,6 +59,9 @@ class SampleStrategy(IStrategy):
sell_profit_only = False sell_profit_only = False
ignore_roi_if_buy_signal = False ignore_roi_if_buy_signal = False
# Number of candles the strategy requires before producing valid signals
startup_candle_count: int = 20
# Optional order type mapping. # Optional order type mapping.
order_types = { order_types = {
'buy': 'limit', 'buy': 'limit',