Merge pull request #1413 from freqtrade/feat/data_helpers
Feat/data helpers
This commit is contained in:
commit
1dbcab0b09
8
freqtrade/data/__init__.py
Normal file
8
freqtrade/data/__init__.py
Normal file
@ -0,0 +1,8 @@
|
||||
"""
|
||||
Module to handle data operations for freqtrade
|
||||
"""
|
||||
|
||||
# limit what's imported when using `from freqtrad.data import *``
|
||||
__all__ = [
|
||||
'converter'
|
||||
]
|
@ -1,5 +1,5 @@
|
||||
"""
|
||||
Functions to analyze ticker data with indicators and produce buy and sell signals
|
||||
Functions to convert data from one format to another
|
||||
"""
|
||||
import logging
|
||||
import pandas as pd
|
||||
@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def parse_ticker_dataframe(ticker: list) -> DataFrame:
|
||||
"""
|
||||
Analyses the trend for the given ticker history
|
||||
Converts a ticker-list (format ccxt.fetch_ohlcv) to a Dataframe
|
||||
:param ticker: ticker list, as returned by exchange.async_get_candle_history
|
||||
:return: DataFrame
|
||||
"""
|
||||
@ -32,6 +32,7 @@ def parse_ticker_dataframe(ticker: list) -> DataFrame:
|
||||
'volume': 'max',
|
||||
})
|
||||
frame.drop(frame.tail(1).index, inplace=True) # eliminate partial candle
|
||||
logger.debug('Dropping last candle')
|
||||
return frame
|
||||
|
||||
|
251
freqtrade/data/history.py
Normal file
251
freqtrade/data/history.py
Normal file
@ -0,0 +1,251 @@
|
||||
"""
|
||||
Handle historic data (ohlcv).
|
||||
includes:
|
||||
* load data for a pair (or a list of pairs) from disk
|
||||
* download data from exchange and store to disk
|
||||
"""
|
||||
|
||||
import gzip
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Dict, Tuple, Any
|
||||
|
||||
import arrow
|
||||
from pandas import DataFrame
|
||||
import ujson
|
||||
|
||||
from freqtrade import misc, constants, OperationalException
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.arguments import TimeRange
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def json_load(data):
|
||||
"""
|
||||
load data with ujson
|
||||
Use this to have a consistent experience,
|
||||
otherwise "precise_float" needs to be passed to all load operations
|
||||
"""
|
||||
return ujson.load(data, precise_float=True)
|
||||
|
||||
|
||||
def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
|
||||
"""
|
||||
Trim tickerlist based on given timerange
|
||||
"""
|
||||
if not tickerlist:
|
||||
return tickerlist
|
||||
|
||||
start_index = 0
|
||||
stop_index = len(tickerlist)
|
||||
|
||||
if timerange.starttype == 'line':
|
||||
stop_index = timerange.startts
|
||||
if timerange.starttype == 'index':
|
||||
start_index = timerange.startts
|
||||
elif timerange.starttype == 'date':
|
||||
while (start_index < len(tickerlist) and
|
||||
tickerlist[start_index][0] < timerange.startts * 1000):
|
||||
start_index += 1
|
||||
|
||||
if timerange.stoptype == 'line':
|
||||
start_index = len(tickerlist) + timerange.stopts
|
||||
if timerange.stoptype == 'index':
|
||||
stop_index = timerange.stopts
|
||||
elif timerange.stoptype == 'date':
|
||||
while (stop_index > 0 and
|
||||
tickerlist[stop_index-1][0] > timerange.stopts * 1000):
|
||||
stop_index -= 1
|
||||
|
||||
if start_index > stop_index:
|
||||
raise ValueError(f'The timerange [{timerange.startts},{timerange.stopts}] is incorrect')
|
||||
|
||||
return tickerlist[start_index:stop_index]
|
||||
|
||||
|
||||
def load_tickerdata_file(
|
||||
datadir: Optional[Path], pair: str,
|
||||
ticker_interval: str,
|
||||
timerange: Optional[TimeRange] = None) -> Optional[list]:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
:return tickerlist or None if unsuccesful
|
||||
"""
|
||||
path = make_testdata_path(datadir)
|
||||
pair_s = pair.replace('/', '_')
|
||||
file = path.joinpath(f'{pair_s}-{ticker_interval}.json')
|
||||
gzipfile = file.with_suffix(file.suffix + '.gz')
|
||||
|
||||
# Try gzip file first, otherwise regular json file.
|
||||
if gzipfile.is_file():
|
||||
logger.debug('Loading ticker data from file %s', gzipfile)
|
||||
with gzip.open(gzipfile) as tickerdata:
|
||||
pairdata = json_load(tickerdata)
|
||||
elif file.is_file():
|
||||
logger.debug('Loading ticker data from file %s', file)
|
||||
with open(file) as tickerdata:
|
||||
pairdata = json_load(tickerdata)
|
||||
else:
|
||||
return None
|
||||
|
||||
if timerange:
|
||||
pairdata = trim_tickerlist(pairdata, timerange)
|
||||
return pairdata
|
||||
|
||||
|
||||
def load_pair_history(pair: str,
|
||||
ticker_interval: str,
|
||||
datadir: Optional[Path],
|
||||
timerange: TimeRange = TimeRange(None, None, 0, 0),
|
||||
refresh_pairs: bool = False,
|
||||
exchange: Optional[Exchange] = None,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Loads cached ticker history for the given pair.
|
||||
:return: DataFrame with ohlcv data
|
||||
"""
|
||||
|
||||
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)
|
||||
# If the user force the refresh of pairs
|
||||
if refresh_pairs:
|
||||
if not exchange:
|
||||
raise OperationalException("Exchange needs to be initialized when "
|
||||
"calling load_data with refresh_pairs=True")
|
||||
|
||||
logger.info('Download data for all pairs and store them in %s', datadir)
|
||||
download_pair_history(datadir=datadir,
|
||||
exchange=exchange,
|
||||
pair=pair,
|
||||
tick_interval=ticker_interval,
|
||||
timerange=timerange)
|
||||
|
||||
if pairdata:
|
||||
if timerange.starttype == 'date' and pairdata[0][0] > timerange.startts * 1000:
|
||||
logger.warning('Missing data at start for pair %s, data starts at %s',
|
||||
pair, arrow.get(pairdata[0][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
|
||||
if timerange.stoptype == 'date' and pairdata[-1][0] < timerange.stopts * 1000:
|
||||
logger.warning('Missing data at end for pair %s, data ends at %s',
|
||||
pair,
|
||||
arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
|
||||
return parse_ticker_dataframe(pairdata)
|
||||
else:
|
||||
logger.warning('No data for pair: "%s", Interval: %s. '
|
||||
'Use --refresh-pairs-cached to download the data',
|
||||
pair, ticker_interval)
|
||||
return None
|
||||
|
||||
|
||||
def load_data(datadir: Optional[Path],
|
||||
ticker_interval: str,
|
||||
pairs: List[str],
|
||||
refresh_pairs: bool = False,
|
||||
exchange: Optional[Exchange] = None,
|
||||
timerange: TimeRange = TimeRange(None, None, 0, 0)) -> Dict[str, DataFrame]:
|
||||
"""
|
||||
Loads ticker history data for a list of pairs the given parameters
|
||||
:return: dict(<pair>:<tickerlist>)
|
||||
"""
|
||||
result = {}
|
||||
|
||||
for pair in pairs:
|
||||
hist = load_pair_history(pair=pair, ticker_interval=ticker_interval,
|
||||
datadir=datadir, timerange=timerange,
|
||||
refresh_pairs=refresh_pairs,
|
||||
exchange=exchange)
|
||||
if hist is not None:
|
||||
result[pair] = hist
|
||||
return result
|
||||
|
||||
|
||||
def make_testdata_path(datadir: Optional[Path]) -> Path:
|
||||
"""Return the path where testdata files are stored"""
|
||||
return datadir or (Path(__file__).parent.parent / "tests" / "testdata").resolve()
|
||||
|
||||
|
||||
def load_cached_data_for_updating(filename: Path, tick_interval: str,
|
||||
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
||||
Optional[int]]:
|
||||
"""
|
||||
Load cached data and choose what part of the data should be updated
|
||||
"""
|
||||
|
||||
since_ms = None
|
||||
|
||||
# user sets timerange, so find the start time
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
since_ms = timerange.startts * 1000
|
||||
elif timerange.stoptype == 'line':
|
||||
num_minutes = timerange.stopts * constants.TICKER_INTERVAL_MINUTES[tick_interval]
|
||||
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
||||
|
||||
# read the cached file
|
||||
if filename.is_file():
|
||||
with open(filename, "rt") as file:
|
||||
data = json_load(file)
|
||||
# remove the last item, could be incomplete candle
|
||||
if data:
|
||||
data.pop()
|
||||
else:
|
||||
data = []
|
||||
|
||||
if data:
|
||||
if since_ms and since_ms < data[0][0]:
|
||||
# Earlier data than existing data requested, redownload all
|
||||
data = []
|
||||
else:
|
||||
# a part of the data was already downloaded, so download unexist data only
|
||||
since_ms = data[-1][0] + 1
|
||||
|
||||
return (data, since_ms)
|
||||
|
||||
|
||||
def download_pair_history(datadir: Optional[Path],
|
||||
exchange: Exchange,
|
||||
pair: str,
|
||||
tick_interval: str = '5m',
|
||||
timerange: Optional[TimeRange] = None) -> bool:
|
||||
"""
|
||||
Download the latest ticker intervals from the exchange for the pair passed in parameters
|
||||
The data is downloaded starting from the last correct ticker interval data that
|
||||
exists in a cache. If timerange starts earlier than the data in the cache,
|
||||
the full data will be redownloaded
|
||||
|
||||
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
|
||||
:param pair: pair to download
|
||||
:param tick_interval: ticker interval
|
||||
:param timerange: range of time to download
|
||||
:return: bool with success state
|
||||
|
||||
"""
|
||||
try:
|
||||
path = make_testdata_path(datadir)
|
||||
filepair = pair.replace("/", "_")
|
||||
filename = path.joinpath(f'{filepair}-{tick_interval}.json')
|
||||
|
||||
logger.info('Download the pair: "%s", Interval: %s', pair, tick_interval)
|
||||
|
||||
data, since_ms = load_cached_data_for_updating(filename, tick_interval, timerange)
|
||||
|
||||
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
|
||||
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
|
||||
|
||||
# Default since_ms to 30 days if nothing is given
|
||||
new_data = exchange.get_history(pair=pair, tick_interval=tick_interval,
|
||||
since_ms=since_ms if since_ms
|
||||
else
|
||||
int(arrow.utcnow().shift(days=-30).float_timestamp) * 1000)
|
||||
data.extend(new_data)
|
||||
|
||||
logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
|
||||
logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
|
||||
|
||||
misc.file_dump_json(filename, data)
|
||||
return True
|
||||
except BaseException:
|
||||
logger.info('Failed to download the pair: "%s", Interval: %s',
|
||||
pair, tick_interval)
|
||||
return False
|
@ -1,17 +1,19 @@
|
||||
# pragma pylint: disable=W0603
|
||||
""" Edge positioning package """
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, NamedTuple
|
||||
import arrow
|
||||
|
||||
import arrow
|
||||
import numpy as np
|
||||
import utils_find_1st as utf1st
|
||||
from pandas import DataFrame
|
||||
|
||||
import freqtrade.optimize as optimize
|
||||
from freqtrade import constants, OperationalException
|
||||
from freqtrade.arguments import Arguments
|
||||
from freqtrade.arguments import TimeRange
|
||||
from freqtrade.data import history
|
||||
from freqtrade.optimize import get_timeframe
|
||||
from freqtrade.strategy.interface import SellType
|
||||
|
||||
|
||||
@ -47,7 +49,7 @@ class Edge():
|
||||
self.strategy = strategy
|
||||
self.ticker_interval = self.strategy.ticker_interval
|
||||
self.tickerdata_to_dataframe = self.strategy.tickerdata_to_dataframe
|
||||
self.get_timeframe = optimize.get_timeframe
|
||||
self.get_timeframe = get_timeframe
|
||||
self.advise_sell = self.strategy.advise_sell
|
||||
self.advise_buy = self.strategy.advise_buy
|
||||
|
||||
@ -97,8 +99,8 @@ class Edge():
|
||||
logger.info('Using stake_currency: %s ...', self.config['stake_currency'])
|
||||
logger.info('Using local backtesting data (using whitelist in given config) ...')
|
||||
|
||||
data = optimize.load_data(
|
||||
self.config['datadir'],
|
||||
data = history.load_data(
|
||||
datadir=Path(self.config['datadir']) if self.config.get('datadir') else None,
|
||||
pairs=pairs,
|
||||
ticker_interval=self.ticker_interval,
|
||||
refresh_pairs=self._refresh_pairs,
|
||||
|
@ -14,7 +14,7 @@ import ccxt.async_support as ccxt_async
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade import constants, OperationalException, DependencyException, TemporaryError
|
||||
from freqtrade.exchange.exchange_helpers import parse_ticker_dataframe
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -14,15 +14,15 @@ from requests.exceptions import RequestException
|
||||
|
||||
from freqtrade import (DependencyException, OperationalException,
|
||||
TemporaryError, __version__, constants, persistence)
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.wallets import Wallets
|
||||
from freqtrade.data.converter import order_book_to_dataframe
|
||||
from freqtrade.edge import Edge
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.rpc import RPCManager, RPCMessageType
|
||||
from freqtrade.resolvers import StrategyResolver, PairListResolver
|
||||
from freqtrade.state import State
|
||||
from freqtrade.strategy.interface import SellType, IStrategy
|
||||
from freqtrade.exchange.exchange_helpers import order_book_to_dataframe
|
||||
from freqtrade.wallets import Wallets
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -1,69 +1,18 @@
|
||||
# pragma pylint: disable=missing-docstring
|
||||
|
||||
import gzip
|
||||
try:
|
||||
import ujson as json
|
||||
_UJSON = True
|
||||
except ImportError:
|
||||
# see mypy/issues/1153
|
||||
import json # type: ignore
|
||||
_UJSON = False
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Tuple, Any
|
||||
from typing import Dict, Tuple
|
||||
import operator
|
||||
|
||||
import arrow
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade import misc, constants, OperationalException
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.arguments import TimeRange
|
||||
from freqtrade.optimize.default_hyperopt import DefaultHyperOpts # noqa: F401
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def json_load(data):
|
||||
"""Try to load data with ujson"""
|
||||
if _UJSON:
|
||||
return json.load(data, precise_float=True)
|
||||
else:
|
||||
return json.load(data)
|
||||
|
||||
|
||||
def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
|
||||
if not tickerlist:
|
||||
return tickerlist
|
||||
|
||||
start_index = 0
|
||||
stop_index = len(tickerlist)
|
||||
|
||||
if timerange.starttype == 'line':
|
||||
stop_index = timerange.startts
|
||||
if timerange.starttype == 'index':
|
||||
start_index = timerange.startts
|
||||
elif timerange.starttype == 'date':
|
||||
while (start_index < len(tickerlist) and
|
||||
tickerlist[start_index][0] < timerange.startts * 1000):
|
||||
start_index += 1
|
||||
|
||||
if timerange.stoptype == 'line':
|
||||
start_index = len(tickerlist) + timerange.stopts
|
||||
if timerange.stoptype == 'index':
|
||||
stop_index = timerange.stopts
|
||||
elif timerange.stoptype == 'date':
|
||||
while (stop_index > 0 and
|
||||
tickerlist[stop_index-1][0] > timerange.stopts * 1000):
|
||||
stop_index -= 1
|
||||
|
||||
if start_index > stop_index:
|
||||
raise ValueError(f'The timerange [{timerange.startts},{timerange.stopts}] is incorrect')
|
||||
|
||||
return tickerlist[start_index:stop_index]
|
||||
|
||||
|
||||
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
|
||||
"""
|
||||
Get the maximum timeframe for the given backtest data
|
||||
@ -98,197 +47,3 @@ def validate_backtest_data(data: Dict[str, DataFrame], min_date: datetime,
|
||||
logger.warning("%s has missing frames: expected %s, got %s, that's %s missing values",
|
||||
pair, expected_frames, dflen, expected_frames - dflen)
|
||||
return found_missing
|
||||
|
||||
|
||||
def load_tickerdata_file(
|
||||
datadir: str, pair: str,
|
||||
ticker_interval: str,
|
||||
timerange: Optional[TimeRange] = None) -> Optional[List[Dict]]:
|
||||
"""
|
||||
Load a pair from file,
|
||||
:return dict OR empty if unsuccesful
|
||||
"""
|
||||
path = make_testdata_path(datadir)
|
||||
pair_s = pair.replace('/', '_')
|
||||
file = os.path.join(path, f'{pair_s}-{ticker_interval}.json')
|
||||
gzipfile = file + '.gz'
|
||||
|
||||
# If the file does not exist we download it when None is returned.
|
||||
# If file exists, read the file, load the json
|
||||
if os.path.isfile(gzipfile):
|
||||
logger.debug('Loading ticker data from file %s', gzipfile)
|
||||
with gzip.open(gzipfile) as tickerdata:
|
||||
pairdata = json.load(tickerdata)
|
||||
elif os.path.isfile(file):
|
||||
logger.debug('Loading ticker data from file %s', file)
|
||||
with open(file) as tickerdata:
|
||||
pairdata = json.load(tickerdata)
|
||||
else:
|
||||
return None
|
||||
|
||||
if timerange:
|
||||
pairdata = trim_tickerlist(pairdata, timerange)
|
||||
return pairdata
|
||||
|
||||
|
||||
def load_data(datadir: str,
|
||||
ticker_interval: str,
|
||||
pairs: List[str],
|
||||
refresh_pairs: Optional[bool] = False,
|
||||
exchange: Optional[Exchange] = None,
|
||||
timerange: TimeRange = TimeRange(None, None, 0, 0)) -> Dict[str, List]:
|
||||
"""
|
||||
Loads ticker history data for the given parameters
|
||||
:return: dict
|
||||
"""
|
||||
result = {}
|
||||
|
||||
# If the user force the refresh of pairs
|
||||
if refresh_pairs:
|
||||
logger.info('Download data for all pairs and store them in %s', datadir)
|
||||
if not exchange:
|
||||
raise OperationalException("Exchange needs to be initialized when "
|
||||
"calling load_data with refresh_pairs=True")
|
||||
download_pairs(datadir, exchange, pairs, ticker_interval, timerange=timerange)
|
||||
|
||||
for pair in pairs:
|
||||
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)
|
||||
if pairdata:
|
||||
if timerange.starttype == 'date' and pairdata[0][0] > timerange.startts * 1000:
|
||||
logger.warning('Missing data at start for pair %s, data starts at %s',
|
||||
pair,
|
||||
arrow.get(pairdata[0][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
|
||||
if timerange.stoptype == 'date' and pairdata[-1][0] < timerange.stopts * 1000:
|
||||
logger.warning('Missing data at end for pair %s, data ends at %s',
|
||||
pair,
|
||||
arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
|
||||
result[pair] = pairdata
|
||||
else:
|
||||
logger.warning(
|
||||
'No data for pair: "%s", Interval: %s. '
|
||||
'Use --refresh-pairs-cached to download the data',
|
||||
pair,
|
||||
ticker_interval
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def make_testdata_path(datadir: str) -> str:
|
||||
"""Return the path where testdata files are stored"""
|
||||
return datadir or os.path.abspath(
|
||||
os.path.join(
|
||||
os.path.dirname(__file__), '..', 'tests', 'testdata'
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def download_pairs(datadir, exchange: Exchange, pairs: List[str],
|
||||
ticker_interval: str,
|
||||
timerange: TimeRange = TimeRange(None, None, 0, 0)) -> bool:
|
||||
"""For each pairs passed in parameters, download the ticker intervals"""
|
||||
for pair in pairs:
|
||||
try:
|
||||
download_backtesting_testdata(datadir,
|
||||
exchange=exchange,
|
||||
pair=pair,
|
||||
tick_interval=ticker_interval,
|
||||
timerange=timerange)
|
||||
except BaseException:
|
||||
logger.info(
|
||||
'Failed to download the pair: "%s", Interval: %s',
|
||||
pair,
|
||||
ticker_interval
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def load_cached_data_for_updating(filename: str,
|
||||
tick_interval: str,
|
||||
timerange: Optional[TimeRange]) -> Tuple[
|
||||
List[Any],
|
||||
Optional[int]]:
|
||||
"""
|
||||
Load cached data and choose what part of the data should be updated
|
||||
"""
|
||||
|
||||
since_ms = None
|
||||
|
||||
# user sets timerange, so find the start time
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
since_ms = timerange.startts * 1000
|
||||
elif timerange.stoptype == 'line':
|
||||
num_minutes = timerange.stopts * constants.TICKER_INTERVAL_MINUTES[tick_interval]
|
||||
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
||||
|
||||
# read the cached file
|
||||
if os.path.isfile(filename):
|
||||
with open(filename, "rt") as file:
|
||||
data = json_load(file)
|
||||
# remove the last item, because we are not sure if it is correct
|
||||
# it could be fetched when the candle was incompleted
|
||||
if data:
|
||||
data.pop()
|
||||
else:
|
||||
data = []
|
||||
|
||||
if data:
|
||||
if since_ms and since_ms < data[0][0]:
|
||||
# the data is requested for earlier period than the cache has
|
||||
# so fully redownload all the data
|
||||
data = []
|
||||
else:
|
||||
# a part of the data was already downloaded, so
|
||||
# download unexist data only
|
||||
since_ms = data[-1][0] + 1
|
||||
|
||||
return (data, since_ms)
|
||||
|
||||
|
||||
def download_backtesting_testdata(datadir: str,
|
||||
exchange: Exchange,
|
||||
pair: str,
|
||||
tick_interval: str = '5m',
|
||||
timerange: Optional[TimeRange] = None) -> None:
|
||||
|
||||
"""
|
||||
Download the latest ticker intervals from the exchange for the pair passed in parameters
|
||||
The data is downloaded starting from the last correct ticker interval data that
|
||||
exists in a cache. If timerange starts earlier than the data in the cache,
|
||||
the full data will be redownloaded
|
||||
|
||||
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
|
||||
:param pair: pair to download
|
||||
:param tick_interval: ticker interval
|
||||
:param timerange: range of time to download
|
||||
:return: None
|
||||
|
||||
"""
|
||||
path = make_testdata_path(datadir)
|
||||
filepair = pair.replace("/", "_")
|
||||
filename = os.path.join(path, f'{filepair}-{tick_interval}.json')
|
||||
|
||||
logger.info(
|
||||
'Download the pair: "%s", Interval: %s',
|
||||
pair,
|
||||
tick_interval
|
||||
)
|
||||
|
||||
data, since_ms = load_cached_data_for_updating(filename, tick_interval, timerange)
|
||||
|
||||
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
|
||||
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
|
||||
|
||||
# Default since_ms to 30 days if nothing is given
|
||||
new_data = exchange.get_history(pair=pair, tick_interval=tick_interval,
|
||||
since_ms=since_ms if since_ms
|
||||
else
|
||||
int(arrow.utcnow().shift(days=-30).float_timestamp) * 1000)
|
||||
data.extend(new_data)
|
||||
|
||||
logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
|
||||
logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
|
||||
|
||||
misc.file_dump_json(filename, data)
|
||||
|
@ -18,6 +18,7 @@ from freqtrade import DependencyException, constants
|
||||
from freqtrade.arguments import Arguments
|
||||
from freqtrade.configuration import Configuration
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.data import history
|
||||
from freqtrade.misc import file_dump_json
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
@ -368,8 +369,8 @@ class Backtesting(object):
|
||||
|
||||
timerange = Arguments.parse_timerange(None if self.config.get(
|
||||
'timerange') is None else str(self.config.get('timerange')))
|
||||
data = optimize.load_data(
|
||||
self.config['datadir'],
|
||||
data = history.load_data(
|
||||
datadir=Path(self.config['datadir']) if self.config.get('datadir') else None,
|
||||
pairs=pairs,
|
||||
ticker_interval=self.ticker_interval,
|
||||
refresh_pairs=self.config.get('refresh_pairs', False),
|
||||
|
@ -5,11 +5,12 @@ This module contains the hyperopt logic
|
||||
"""
|
||||
|
||||
import logging
|
||||
import multiprocessing
|
||||
from argparse import Namespace
|
||||
import os
|
||||
import sys
|
||||
from argparse import Namespace
|
||||
from pathlib import Path
|
||||
from math import exp
|
||||
import multiprocessing
|
||||
from operator import itemgetter
|
||||
from typing import Any, Dict, List
|
||||
|
||||
@ -20,7 +21,8 @@ from skopt.space import Dimension
|
||||
|
||||
from freqtrade.arguments import Arguments
|
||||
from freqtrade.configuration import Configuration
|
||||
from freqtrade.optimize import load_data, get_timeframe
|
||||
from freqtrade.data.history import load_data
|
||||
from freqtrade.optimize import get_timeframe
|
||||
from freqtrade.optimize.backtesting import Backtesting
|
||||
from freqtrade.resolvers import HyperOptResolver
|
||||
|
||||
@ -239,7 +241,7 @@ class Hyperopt(Backtesting):
|
||||
timerange = Arguments.parse_timerange(None if self.config.get(
|
||||
'timerange') is None else str(self.config.get('timerange')))
|
||||
data = load_data(
|
||||
datadir=str(self.config.get('datadir')),
|
||||
datadir=Path(self.config['datadir']) if self.config.get('datadir') else None,
|
||||
pairs=self.config['exchange']['pair_whitelist'],
|
||||
ticker_interval=self.ticker_interval,
|
||||
timerange=timerange
|
||||
|
@ -13,7 +13,6 @@ import arrow
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade import constants
|
||||
from freqtrade.exchange.exchange_helpers import parse_ticker_dataframe
|
||||
from freqtrade.persistence import Trade
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -332,7 +331,7 @@ class IStrategy(ABC):
|
||||
"""
|
||||
Creates a dataframe and populates indicators for given ticker data
|
||||
"""
|
||||
return {pair: self.advise_indicators(parse_ticker_dataframe(pair_data), {'pair': pair})
|
||||
return {pair: self.advise_indicators(pair_data, {'pair': pair})
|
||||
for pair, pair_data in tickerdata.items()}
|
||||
|
||||
def advise_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
|
@ -11,7 +11,7 @@ import pytest
|
||||
from telegram import Chat, Message, Update
|
||||
|
||||
from freqtrade import constants
|
||||
from freqtrade.exchange.exchange_helpers import parse_ticker_dataframe
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.edge import Edge, PairInfo
|
||||
from freqtrade.freqtradebot import FreqtradeBot
|
||||
|
0
freqtrade/tests/data/__init__.py
Normal file
0
freqtrade/tests/data/__init__.py
Normal file
@ -1,7 +1,7 @@
|
||||
# pragma pylint: disable=missing-docstring, C0103
|
||||
import logging
|
||||
|
||||
from freqtrade.exchange.exchange_helpers import parse_ticker_dataframe
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.tests.conftest import log_has
|
||||
|
||||
|
475
freqtrade/tests/data/test_history.py
Normal file
475
freqtrade/tests/data/test_history.py
Normal file
@ -0,0 +1,475 @@
|
||||
# pragma pylint: disable=missing-docstring, protected-access, C0103
|
||||
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
import uuid
|
||||
from shutil import copyfile
|
||||
|
||||
import arrow
|
||||
from pandas import DataFrame
|
||||
import pytest
|
||||
|
||||
from freqtrade import OperationalException
|
||||
from freqtrade.arguments import TimeRange
|
||||
from freqtrade.data import history
|
||||
from freqtrade.data.history import (download_pair_history,
|
||||
load_cached_data_for_updating,
|
||||
load_tickerdata_file,
|
||||
make_testdata_path,
|
||||
trim_tickerlist)
|
||||
from freqtrade.misc import file_dump_json
|
||||
from freqtrade.tests.conftest import get_patched_exchange, log_has
|
||||
|
||||
# Change this if modifying UNITTEST/BTC testdatafile
|
||||
_BTC_UNITTEST_LENGTH = 13681
|
||||
|
||||
|
||||
def _backup_file(file: str, copy_file: bool = False) -> None:
|
||||
"""
|
||||
Backup existing file to avoid deleting the user file
|
||||
:param file: complete path to the file
|
||||
:param touch_file: create an empty file in replacement
|
||||
:return: None
|
||||
"""
|
||||
file_swp = file + '.swp'
|
||||
if os.path.isfile(file):
|
||||
os.rename(file, file_swp)
|
||||
|
||||
if copy_file:
|
||||
copyfile(file_swp, file)
|
||||
|
||||
|
||||
def _clean_test_file(file: str) -> None:
|
||||
"""
|
||||
Backup existing file to avoid deleting the user file
|
||||
:param file: complete path to the file
|
||||
:return: None
|
||||
"""
|
||||
file_swp = file + '.swp'
|
||||
# 1. Delete file from the test
|
||||
if os.path.isfile(file):
|
||||
os.remove(file)
|
||||
|
||||
# 2. Rollback to the initial file
|
||||
if os.path.isfile(file_swp):
|
||||
os.rename(file_swp, file)
|
||||
|
||||
|
||||
def test_load_data_30min_ticker(mocker, caplog, default_conf) -> None:
|
||||
ld = history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='30m', datadir=None)
|
||||
assert isinstance(ld, DataFrame)
|
||||
assert not log_has('Download the pair: "UNITTEST/BTC", Interval: 30m', caplog.record_tuples)
|
||||
|
||||
|
||||
def test_load_data_7min_ticker(mocker, caplog, default_conf) -> None:
|
||||
ld = history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='7m', datadir=None)
|
||||
assert not isinstance(ld, DataFrame)
|
||||
assert ld is None
|
||||
assert log_has(
|
||||
'No data for pair: "UNITTEST/BTC", Interval: 7m. '
|
||||
'Use --refresh-pairs-cached to download the data', caplog.record_tuples)
|
||||
|
||||
|
||||
def test_load_data_1min_ticker(ticker_history, mocker, caplog) -> None:
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history)
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'UNITTEST_BTC-1m.json')
|
||||
_backup_file(file, copy_file=True)
|
||||
history.load_data(datadir=None, ticker_interval='1m', pairs=['UNITTEST/BTC'])
|
||||
assert os.path.isfile(file) is True
|
||||
assert not log_has('Download the pair: "UNITTEST/BTC", Interval: 1m', caplog.record_tuples)
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog, default_conf) -> None:
|
||||
"""
|
||||
Test load_pair_history() with 1 min ticker
|
||||
"""
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history_list)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-1m.json')
|
||||
|
||||
_backup_file(file)
|
||||
# do not download a new pair if refresh_pairs isn't set
|
||||
history.load_pair_history(datadir=None,
|
||||
ticker_interval='1m',
|
||||
refresh_pairs=False,
|
||||
pair='MEME/BTC')
|
||||
assert os.path.isfile(file) is False
|
||||
assert log_has('No data for pair: "MEME/BTC", Interval: 1m. '
|
||||
'Use --refresh-pairs-cached to download the data',
|
||||
caplog.record_tuples)
|
||||
|
||||
# download a new pair if refresh_pairs is set
|
||||
history.load_pair_history(datadir=None,
|
||||
ticker_interval='1m',
|
||||
refresh_pairs=True,
|
||||
exchange=exchange,
|
||||
pair='MEME/BTC')
|
||||
assert os.path.isfile(file) is True
|
||||
assert log_has('Download the pair: "MEME/BTC", Interval: 1m', caplog.record_tuples)
|
||||
with pytest.raises(OperationalException, match=r'Exchange needs to be initialized when.*'):
|
||||
history.load_pair_history(datadir=None,
|
||||
ticker_interval='1m',
|
||||
refresh_pairs=True,
|
||||
exchange=None,
|
||||
pair='MEME/BTC')
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_testdata_path() -> None:
|
||||
assert str(Path('freqtrade') / 'tests' / 'testdata') in str(make_testdata_path(None))
|
||||
|
||||
|
||||
def test_load_cached_data_for_updating(mocker) -> None:
|
||||
datadir = Path(__file__).parent.parent.joinpath('testdata')
|
||||
|
||||
test_data = None
|
||||
test_filename = datadir.joinpath('UNITTEST_BTC-1m.json')
|
||||
with open(test_filename, "rt") as file:
|
||||
test_data = json.load(file)
|
||||
|
||||
# change now time to test 'line' cases
|
||||
# now = last cached item + 1 hour
|
||||
now_ts = test_data[-1][0] / 1000 + 60 * 60
|
||||
mocker.patch('arrow.utcnow', return_value=arrow.get(now_ts))
|
||||
|
||||
# timeframe starts earlier than the cached data
|
||||
# should fully update data
|
||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == []
|
||||
assert start_ts == test_data[0][0] - 1000
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 120
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
TimeRange(None, 'line', 0, -num_lines))
|
||||
assert data == []
|
||||
assert start_ts < test_data[0][0] - 1
|
||||
|
||||
# timeframe starts in the center of the cached data
|
||||
# should return the chached data w/o the last item
|
||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# timeframe starts after the chached data
|
||||
# should return the chached data w/o the last item
|
||||
timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 1, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# no timeframe is set
|
||||
# should return the chached data w/o the last item
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# no datafile exist
|
||||
# should return timestamp start time
|
||||
timerange = TimeRange('date', None, now_ts - 10000, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'),
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == []
|
||||
assert start_ts == (now_ts - 10000) * 1000
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'),
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == []
|
||||
assert start_ts == (now_ts - num_lines * 60) * 1000
|
||||
|
||||
# no datafile exist, no timeframe is set
|
||||
# should return an empty array and None
|
||||
data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'),
|
||||
'1m',
|
||||
None)
|
||||
assert data == []
|
||||
assert start_ts is None
|
||||
|
||||
|
||||
def test_download_pair_history(ticker_history_list, mocker, default_conf) -> None:
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history_list)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file1_1 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-1m.json')
|
||||
file1_5 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-5m.json')
|
||||
file2_1 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'CFI_BTC-1m.json')
|
||||
file2_5 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'CFI_BTC-5m.json')
|
||||
|
||||
_backup_file(file1_1)
|
||||
_backup_file(file1_5)
|
||||
_backup_file(file2_1)
|
||||
_backup_file(file2_5)
|
||||
|
||||
assert os.path.isfile(file1_1) is False
|
||||
assert os.path.isfile(file2_1) is False
|
||||
|
||||
assert download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
tick_interval='1m')
|
||||
assert download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='CFI/BTC',
|
||||
tick_interval='1m')
|
||||
assert not exchange._pairs_last_refresh_time
|
||||
assert os.path.isfile(file1_1) is True
|
||||
assert os.path.isfile(file2_1) is True
|
||||
|
||||
# clean files freshly downloaded
|
||||
_clean_test_file(file1_1)
|
||||
_clean_test_file(file2_1)
|
||||
|
||||
assert os.path.isfile(file1_5) is False
|
||||
assert os.path.isfile(file2_5) is False
|
||||
|
||||
assert download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
tick_interval='5m')
|
||||
assert download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='CFI/BTC',
|
||||
tick_interval='5m')
|
||||
assert not exchange._pairs_last_refresh_time
|
||||
assert os.path.isfile(file1_5) is True
|
||||
assert os.path.isfile(file2_5) is True
|
||||
|
||||
# clean files freshly downloaded
|
||||
_clean_test_file(file1_5)
|
||||
_clean_test_file(file2_5)
|
||||
|
||||
|
||||
def test_download_pair_history2(mocker, default_conf) -> None:
|
||||
tick = [
|
||||
[1509836520000, 0.00162008, 0.00162008, 0.00162008, 0.00162008, 108.14853839],
|
||||
[1509836580000, 0.00161, 0.00161, 0.00161, 0.00161, 82.390199]
|
||||
]
|
||||
json_dump_mock = mocker.patch('freqtrade.misc.file_dump_json', return_value=None)
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=tick)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
download_pair_history(None, exchange, pair="UNITTEST/BTC", tick_interval='1m')
|
||||
download_pair_history(None, exchange, pair="UNITTEST/BTC", tick_interval='3m')
|
||||
assert json_dump_mock.call_count == 2
|
||||
|
||||
|
||||
def test_download_backtesting_data_exception(ticker_history, mocker, caplog, default_conf) -> None:
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history',
|
||||
side_effect=BaseException('File Error'))
|
||||
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
|
||||
file1_1 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-1m.json')
|
||||
file1_5 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-5m.json')
|
||||
_backup_file(file1_1)
|
||||
_backup_file(file1_5)
|
||||
|
||||
assert not download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
tick_interval='1m')
|
||||
# clean files freshly downloaded
|
||||
_clean_test_file(file1_1)
|
||||
_clean_test_file(file1_5)
|
||||
assert log_has('Failed to download the pair: "MEME/BTC", Interval: 1m', caplog.record_tuples)
|
||||
|
||||
|
||||
def test_load_tickerdata_file() -> None:
|
||||
# 7 does not exist in either format.
|
||||
assert not load_tickerdata_file(None, 'UNITTEST/BTC', '7m')
|
||||
# 1 exists only as a .json
|
||||
tickerdata = load_tickerdata_file(None, 'UNITTEST/BTC', '1m')
|
||||
assert _BTC_UNITTEST_LENGTH == len(tickerdata)
|
||||
# 8 .json is empty and will fail if it's loaded. .json.gz is a copy of 1.json
|
||||
tickerdata = load_tickerdata_file(None, 'UNITTEST/BTC', '8m')
|
||||
assert _BTC_UNITTEST_LENGTH == len(tickerdata)
|
||||
|
||||
|
||||
def test_load_partial_missing(caplog) -> None:
|
||||
# Make sure we start fresh - test missing data at start
|
||||
start = arrow.get('2018-01-01T00:00:00')
|
||||
end = arrow.get('2018-01-11T00:00:00')
|
||||
tickerdata = history.load_data(None, '5m', ['UNITTEST/BTC'],
|
||||
refresh_pairs=False,
|
||||
timerange=TimeRange('date', 'date',
|
||||
start.timestamp, end.timestamp))
|
||||
# timedifference in 5 minutes
|
||||
td = ((end - start).total_seconds() // 60 // 5) + 1
|
||||
assert td != len(tickerdata['UNITTEST/BTC'])
|
||||
start_real = tickerdata['UNITTEST/BTC'].iloc[0, 0]
|
||||
assert log_has(f'Missing data at start for pair '
|
||||
f'UNITTEST/BTC, data starts at {start_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
||||
caplog.record_tuples)
|
||||
# Make sure we start fresh - test missing data at end
|
||||
caplog.clear()
|
||||
start = arrow.get('2018-01-10T00:00:00')
|
||||
end = arrow.get('2018-02-20T00:00:00')
|
||||
tickerdata = history.load_data(datadir=None, ticker_interval='5m',
|
||||
pairs=['UNITTEST/BTC'], refresh_pairs=False,
|
||||
timerange=TimeRange('date', 'date',
|
||||
start.timestamp, end.timestamp))
|
||||
# timedifference in 5 minutes
|
||||
td = ((end - start).total_seconds() // 60 // 5) + 1
|
||||
assert td != len(tickerdata['UNITTEST/BTC'])
|
||||
# Shift endtime with +5 - as last candle is dropped (partial candle)
|
||||
end_real = arrow.get(tickerdata['UNITTEST/BTC'].iloc[-1, 0]).shift(minutes=5)
|
||||
assert log_has(f'Missing data at end for pair '
|
||||
f'UNITTEST/BTC, data ends at {end_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
||||
caplog.record_tuples)
|
||||
|
||||
|
||||
def test_init(default_conf, mocker) -> None:
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
assert {} == history.load_data(
|
||||
datadir='',
|
||||
exchange=exchange,
|
||||
pairs=[],
|
||||
refresh_pairs=True,
|
||||
ticker_interval=default_conf['ticker_interval']
|
||||
)
|
||||
|
||||
|
||||
def test_trim_tickerlist() -> None:
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'UNITTEST_BTC-1m.json')
|
||||
with open(file) as data_file:
|
||||
ticker_list = json.load(data_file)
|
||||
ticker_list_len = len(ticker_list)
|
||||
|
||||
# Test the pattern ^(-\d+)$
|
||||
# This pattern uses the latest N elements
|
||||
timerange = TimeRange(None, 'line', 0, -5)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is not ticker[0] # The first element should be different
|
||||
assert ticker_list[-1] is ticker[-1] # The last element must be the same
|
||||
|
||||
# Test the pattern ^(\d+)-$
|
||||
# This pattern keep X element from the end
|
||||
timerange = TimeRange('line', None, 5, 0)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is ticker[0] # The first element must be the same
|
||||
assert ticker_list[-1] is not ticker[-1] # The last element should be different
|
||||
|
||||
# Test the pattern ^(\d+)-(\d+)$
|
||||
# This pattern extract a window
|
||||
timerange = TimeRange('index', 'index', 5, 10)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is not ticker[0] # The first element should be different
|
||||
assert ticker_list[5] is ticker[0] # The list starts at the index 5
|
||||
assert ticker_list[9] is ticker[-1] # The list ends at the index 9 (5 elements)
|
||||
|
||||
# Test the pattern ^(\d{8})-(\d{8})$
|
||||
# This pattern extract a window between the dates
|
||||
timerange = TimeRange('date', 'date', ticker_list[5][0] / 1000, ticker_list[10][0] / 1000 - 1)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is not ticker[0] # The first element should be different
|
||||
assert ticker_list[5] is ticker[0] # The list starts at the index 5
|
||||
assert ticker_list[9] is ticker[-1] # The list ends at the index 9 (5 elements)
|
||||
|
||||
# Test the pattern ^-(\d{8})$
|
||||
# This pattern extracts elements from the start to the date
|
||||
timerange = TimeRange(None, 'date', 0, ticker_list[10][0] / 1000 - 1)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 10
|
||||
assert ticker_list[0] is ticker[0] # The start of the list is included
|
||||
assert ticker_list[9] is ticker[-1] # The element 10 is not included
|
||||
|
||||
# Test the pattern ^(\d{8})-$
|
||||
# This pattern extracts elements from the date to now
|
||||
timerange = TimeRange('date', None, ticker_list[10][0] / 1000 - 1, None)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == ticker_list_len - 10
|
||||
assert ticker_list[10] is ticker[0] # The first element is element #10
|
||||
assert ticker_list[-1] is ticker[-1] # The last element is the same
|
||||
|
||||
# Test a wrong pattern
|
||||
# This pattern must return the list unchanged
|
||||
timerange = TimeRange(None, None, None, 5)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_list_len == ticker_len
|
||||
|
||||
# Test invalid timerange (start after stop)
|
||||
timerange = TimeRange('index', 'index', 10, 5)
|
||||
with pytest.raises(ValueError, match=r'The timerange .* is incorrect'):
|
||||
trim_tickerlist(ticker_list, timerange)
|
||||
|
||||
assert ticker_list_len == ticker_len
|
||||
|
||||
# passing empty list
|
||||
timerange = TimeRange(None, None, None, 5)
|
||||
ticker = trim_tickerlist([], timerange)
|
||||
assert 0 == len(ticker)
|
||||
assert not ticker
|
||||
|
||||
|
||||
def test_file_dump_json() -> None:
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata',
|
||||
'test_{id}.json'.format(id=str(uuid.uuid4())))
|
||||
data = {'bar': 'foo'}
|
||||
|
||||
# check the file we will create does not exist
|
||||
assert os.path.isfile(file) is False
|
||||
|
||||
# Create the Json file
|
||||
file_dump_json(file, data)
|
||||
|
||||
# Check the file was create
|
||||
assert os.path.isfile(file) is True
|
||||
|
||||
# Open the Json file created and test the data is in it
|
||||
with open(file) as data_file:
|
||||
json_from_file = json.load(data_file)
|
||||
|
||||
assert 'bar' in json_from_file
|
||||
assert json_from_file['bar'] == 'foo'
|
||||
|
||||
# Remove the file
|
||||
_clean_test_file(file)
|
@ -1,19 +1,22 @@
|
||||
# pragma pylint: disable=missing-docstring, C0103, C0330
|
||||
# pragma pylint: disable=protected-access, too-many-lines, invalid-name, too-many-arguments
|
||||
|
||||
import pytest
|
||||
import logging
|
||||
from freqtrade.tests.conftest import get_patched_freqtradebot
|
||||
from freqtrade.edge import Edge, PairInfo
|
||||
from pandas import DataFrame, to_datetime
|
||||
from freqtrade.strategy.interface import SellType
|
||||
from freqtrade.tests.optimize import (BTrade, BTContainer, _build_backtest_dataframe,
|
||||
_get_frame_time_from_offset)
|
||||
import math
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import arrow
|
||||
import numpy as np
|
||||
import math
|
||||
import pytest
|
||||
from pandas import DataFrame, to_datetime
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.edge import Edge, PairInfo
|
||||
from freqtrade.strategy.interface import SellType
|
||||
from freqtrade.tests.conftest import get_patched_freqtradebot
|
||||
from freqtrade.tests.optimize import (BTContainer, BTrade,
|
||||
_build_backtest_dataframe,
|
||||
_get_frame_time_from_offset)
|
||||
|
||||
# Cases to be tested:
|
||||
# 1) Open trade should be removed from the end
|
||||
@ -278,7 +281,8 @@ def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=Fals
|
||||
123.45
|
||||
] for x in range(0, 500)]
|
||||
|
||||
pairdata = {'NEO/BTC': ETHBTC, 'LTC/BTC': LTCBTC}
|
||||
pairdata = {'NEO/BTC': parse_ticker_dataframe(ETHBTC),
|
||||
'LTC/BTC': parse_ticker_dataframe(LTCBTC)}
|
||||
return pairdata
|
||||
|
||||
|
||||
@ -286,7 +290,7 @@ def test_edge_process_downloaded_data(mocker, edge_conf):
|
||||
edge_conf['datadir'] = None
|
||||
freqtrade = get_patched_freqtradebot(mocker, edge_conf)
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_fee', MagicMock(return_value=0.001))
|
||||
mocker.patch('freqtrade.optimize.load_data', mocked_load_data)
|
||||
mocker.patch('freqtrade.data.history.load_data', mocked_load_data)
|
||||
edge = Edge(edge_conf, freqtrade.exchange, freqtrade.strategy)
|
||||
|
||||
assert edge.calculate()
|
||||
|
@ -11,14 +11,16 @@ import pandas as pd
|
||||
import pytest
|
||||
from arrow import Arrow
|
||||
|
||||
from freqtrade import DependencyException, constants, optimize
|
||||
from freqtrade import DependencyException, constants
|
||||
from freqtrade.arguments import Arguments, TimeRange
|
||||
from freqtrade.data import history
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.optimize import get_timeframe
|
||||
from freqtrade.optimize.backtesting import (Backtesting, setup_configuration,
|
||||
start)
|
||||
from freqtrade.tests.conftest import log_has, patch_exchange
|
||||
from freqtrade.strategy.interface import SellType
|
||||
from freqtrade.strategy.default_strategy import DefaultStrategy
|
||||
from freqtrade.strategy.interface import SellType
|
||||
from freqtrade.tests.conftest import log_has, patch_exchange
|
||||
|
||||
|
||||
def get_args(args) -> List[str]:
|
||||
@ -34,22 +36,13 @@ def trim_dictlist(dict_list, num):
|
||||
|
||||
def load_data_test(what):
|
||||
timerange = TimeRange(None, 'line', 0, -101)
|
||||
data = optimize.load_data(None, ticker_interval='1m',
|
||||
pairs=['UNITTEST/BTC'], timerange=timerange)
|
||||
pair = data['UNITTEST/BTC']
|
||||
pair = history.load_tickerdata_file(None, ticker_interval='1m',
|
||||
pair='UNITTEST/BTC', timerange=timerange)
|
||||
datalen = len(pair)
|
||||
# Depending on the what parameter we now adjust the
|
||||
# loaded data looks:
|
||||
# pair :: [[ 1509836520000, unix timestamp in ms
|
||||
# 0.00162008, open
|
||||
# 0.00162008, high
|
||||
# 0.00162008, low
|
||||
# 0.00162008, close
|
||||
# 108.14853839 base volume
|
||||
# ]]
|
||||
|
||||
base = 0.001
|
||||
if what == 'raise':
|
||||
return {'UNITTEST/BTC': [
|
||||
data = [
|
||||
[
|
||||
pair[x][0], # Keep old dates
|
||||
x * base, # But replace O,H,L,C
|
||||
@ -58,9 +51,9 @@ def load_data_test(what):
|
||||
x * base,
|
||||
pair[x][5], # Keep old volume
|
||||
] for x in range(0, datalen)
|
||||
]}
|
||||
]
|
||||
if what == 'lower':
|
||||
return {'UNITTEST/BTC': [
|
||||
data = [
|
||||
[
|
||||
pair[x][0], # Keep old dates
|
||||
1 - x * base, # But replace O,H,L,C
|
||||
@ -69,10 +62,10 @@ def load_data_test(what):
|
||||
1 - x * base,
|
||||
pair[x][5] # Keep old volume
|
||||
] for x in range(0, datalen)
|
||||
]}
|
||||
]
|
||||
if what == 'sine':
|
||||
hz = 0.1 # frequency
|
||||
return {'UNITTEST/BTC': [
|
||||
data = [
|
||||
[
|
||||
pair[x][0], # Keep old dates
|
||||
math.sin(x * hz) / 1000 + base, # But replace O,H,L,C
|
||||
@ -81,8 +74,8 @@ def load_data_test(what):
|
||||
math.sin(x * hz) / 1000 + base,
|
||||
pair[x][5] # Keep old volume
|
||||
] for x in range(0, datalen)
|
||||
]}
|
||||
return data
|
||||
]
|
||||
return {'UNITTEST/BTC': parse_ticker_dataframe(data)}
|
||||
|
||||
|
||||
def simple_backtest(config, contour, num_results, mocker) -> None:
|
||||
@ -110,21 +103,21 @@ def simple_backtest(config, contour, num_results, mocker) -> None:
|
||||
|
||||
def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=False,
|
||||
timerange=None, exchange=None):
|
||||
tickerdata = optimize.load_tickerdata_file(datadir, 'UNITTEST/BTC', '1m', timerange=timerange)
|
||||
pairdata = {'UNITTEST/BTC': tickerdata}
|
||||
tickerdata = history.load_tickerdata_file(datadir, 'UNITTEST/BTC', '1m', timerange=timerange)
|
||||
pairdata = {'UNITTEST/BTC': parse_ticker_dataframe(tickerdata)}
|
||||
return pairdata
|
||||
|
||||
|
||||
# use for mock ccxt.fetch_ohlvc'
|
||||
def _load_pair_as_ticks(pair, tickfreq):
|
||||
ticks = optimize.load_data(None, ticker_interval=tickfreq, pairs=[pair])
|
||||
ticks = trim_dictlist(ticks, -201)
|
||||
return ticks[pair]
|
||||
ticks = history.load_tickerdata_file(None, ticker_interval=tickfreq, pair=pair)
|
||||
ticks = ticks[-201:]
|
||||
return ticks
|
||||
|
||||
|
||||
# FIX: fixturize this?
|
||||
def _make_backtest_conf(mocker, conf=None, pair='UNITTEST/BTC', record=None):
|
||||
data = optimize.load_data(None, ticker_interval='1m', pairs=[pair])
|
||||
data = history.load_data(datadir=None, ticker_interval='1m', pairs=[pair])
|
||||
data = trim_dictlist(data, -201)
|
||||
patch_exchange(mocker)
|
||||
backtesting = Backtesting(conf)
|
||||
@ -332,8 +325,8 @@ def test_backtesting_init(mocker, default_conf) -> None:
|
||||
def test_tickerdata_to_dataframe(default_conf, mocker) -> None:
|
||||
patch_exchange(mocker)
|
||||
timerange = TimeRange(None, 'line', 0, -100)
|
||||
tick = optimize.load_tickerdata_file(None, 'UNITTEST/BTC', '1m', timerange=timerange)
|
||||
tickerlist = {'UNITTEST/BTC': tick}
|
||||
tick = history.load_tickerdata_file(None, 'UNITTEST/BTC', '1m', timerange=timerange)
|
||||
tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick)}
|
||||
|
||||
backtesting = Backtesting(default_conf)
|
||||
data = backtesting.strategy.tickerdata_to_dataframe(tickerlist)
|
||||
@ -447,7 +440,7 @@ def test_backtesting_start(default_conf, mocker, caplog) -> None:
|
||||
def get_timeframe(input1):
|
||||
return Arrow(2017, 11, 14, 21, 17), Arrow(2017, 11, 14, 22, 59)
|
||||
|
||||
mocker.patch('freqtrade.optimize.load_data', mocked_load_data)
|
||||
mocker.patch('freqtrade.data.history.load_data', mocked_load_data)
|
||||
mocker.patch('freqtrade.optimize.get_timeframe', get_timeframe)
|
||||
mocker.patch('freqtrade.exchange.Exchange.refresh_tickers', MagicMock())
|
||||
patch_exchange(mocker)
|
||||
@ -482,7 +475,7 @@ def test_backtesting_start_no_data(default_conf, mocker, caplog) -> None:
|
||||
def get_timeframe(input1):
|
||||
return Arrow(2017, 11, 14, 21, 17), Arrow(2017, 11, 14, 22, 59)
|
||||
|
||||
mocker.patch('freqtrade.optimize.load_data', MagicMock(return_value={}))
|
||||
mocker.patch('freqtrade.data.history.load_data', MagicMock(return_value={}))
|
||||
mocker.patch('freqtrade.optimize.get_timeframe', get_timeframe)
|
||||
mocker.patch('freqtrade.exchange.Exchange.refresh_tickers', MagicMock())
|
||||
patch_exchange(mocker)
|
||||
@ -511,8 +504,9 @@ def test_backtest(default_conf, fee, mocker) -> None:
|
||||
patch_exchange(mocker)
|
||||
backtesting = Backtesting(default_conf)
|
||||
pair = 'UNITTEST/BTC'
|
||||
data = optimize.load_data(None, ticker_interval='5m', pairs=['UNITTEST/BTC'])
|
||||
data = trim_dictlist(data, -200)
|
||||
timerange = TimeRange(None, 'line', 0, -201)
|
||||
data = history.load_data(datadir=None, ticker_interval='5m', pairs=['UNITTEST/BTC'],
|
||||
timerange=timerange)
|
||||
data_processed = backtesting.strategy.tickerdata_to_dataframe(data)
|
||||
min_date, max_date = get_timeframe(data_processed)
|
||||
results = backtesting.backtest(
|
||||
@ -536,8 +530,8 @@ def test_backtest(default_conf, fee, mocker) -> None:
|
||||
Arrow(2018, 1, 30, 3, 30, 0).datetime],
|
||||
'close_time': [Arrow(2018, 1, 29, 22, 35, 0).datetime,
|
||||
Arrow(2018, 1, 30, 4, 15, 0).datetime],
|
||||
'open_index': [77, 183],
|
||||
'close_index': [124, 192],
|
||||
'open_index': [78, 184],
|
||||
'close_index': [125, 193],
|
||||
'trade_duration': [235, 45],
|
||||
'open_at_end': [False, False],
|
||||
'open_rate': [0.104445, 0.10302485],
|
||||
@ -563,9 +557,10 @@ def test_backtest_1min_ticker_interval(default_conf, fee, mocker) -> None:
|
||||
patch_exchange(mocker)
|
||||
backtesting = Backtesting(default_conf)
|
||||
|
||||
# Run a backtesting for an exiting 5min ticker_interval
|
||||
data = optimize.load_data(None, ticker_interval='1m', pairs=['UNITTEST/BTC'])
|
||||
data = trim_dictlist(data, -200)
|
||||
# Run a backtesting for an exiting 1min ticker_interval
|
||||
timerange = TimeRange(None, 'line', 0, -200)
|
||||
data = history.load_data(datadir=None, ticker_interval='1m', pairs=['UNITTEST/BTC'],
|
||||
timerange=timerange)
|
||||
processed = backtesting.strategy.tickerdata_to_dataframe(data)
|
||||
min_date, max_date = get_timeframe(processed)
|
||||
results = backtesting.backtest(
|
||||
@ -651,7 +646,7 @@ def test_backtest_alternate_buy_sell(default_conf, fee, mocker):
|
||||
# 200 candles in backtest data
|
||||
# won't buy on first (shifted by 1)
|
||||
# 100 buys signals
|
||||
assert len(results) == 99
|
||||
assert len(results) == 100
|
||||
# One trade was force-closed at the end
|
||||
assert len(results.loc[results.open_at_end]) == 0
|
||||
|
||||
@ -688,7 +683,7 @@ def test_backtest_multi_pair(default_conf, fee, mocker):
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_fee', fee)
|
||||
patch_exchange(mocker)
|
||||
pairs = ['ADA/BTC', 'DASH/BTC', 'ETH/BTC', 'LTC/BTC', 'NXT/BTC']
|
||||
data = optimize.load_data(None, ticker_interval='5m', pairs=pairs)
|
||||
data = history.load_data(datadir=None, ticker_interval='5m', pairs=pairs)
|
||||
data = trim_dictlist(data, -500)
|
||||
# We need to enable sell-signal - otherwise it sells on ROI!!
|
||||
default_conf['experimental'] = {"use_sell_signal": True}
|
||||
@ -840,7 +835,7 @@ def test_backtest_start_live(default_conf, mocker, caplog):
|
||||
'Using stake_currency: BTC ...',
|
||||
'Using stake_amount: 0.001 ...',
|
||||
'Downloading data for all pairs in whitelist ...',
|
||||
'Measuring data from 2017-11-14T19:31:00+00:00 up to 2017-11-14T22:57:00+00:00 (0 days)..',
|
||||
'Measuring data from 2017-11-14T19:31:00+00:00 up to 2017-11-14T22:58:00+00:00 (0 days)..',
|
||||
'Parameter --enable-position-stacking detected ...'
|
||||
]
|
||||
|
||||
@ -899,7 +894,7 @@ def test_backtest_start_multi_strat(default_conf, mocker, caplog):
|
||||
'Using stake_currency: BTC ...',
|
||||
'Using stake_amount: 0.001 ...',
|
||||
'Downloading data for all pairs in whitelist ...',
|
||||
'Measuring data from 2017-11-14T19:31:00+00:00 up to 2017-11-14T22:57:00+00:00 (0 days)..',
|
||||
'Measuring data from 2017-11-14T19:31:00+00:00 up to 2017-11-14T22:58:00+00:00 (0 days)..',
|
||||
'Parameter --enable-position-stacking detected ...',
|
||||
'Running backtesting for Strategy DefaultStrategy',
|
||||
'Running backtesting for Strategy TestStrategy',
|
||||
|
@ -6,7 +6,8 @@ from unittest.mock import MagicMock
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from freqtrade.optimize import load_tickerdata_file
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.data.history import load_tickerdata_file
|
||||
from freqtrade.optimize.hyperopt import Hyperopt, start
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
from freqtrade.tests.conftest import log_has, patch_exchange
|
||||
@ -242,7 +243,7 @@ def test_has_space(hyperopt):
|
||||
|
||||
def test_populate_indicators(hyperopt) -> None:
|
||||
tick = load_tickerdata_file(None, 'UNITTEST/BTC', '1m')
|
||||
tickerlist = {'UNITTEST/BTC': tick}
|
||||
tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick)}
|
||||
dataframes = hyperopt.strategy.tickerdata_to_dataframe(tickerlist)
|
||||
dataframe = hyperopt.custom_hyperopt.populate_indicators(dataframes['UNITTEST/BTC'],
|
||||
{'pair': 'UNITTEST/BTC'})
|
||||
@ -255,7 +256,7 @@ def test_populate_indicators(hyperopt) -> None:
|
||||
|
||||
def test_buy_strategy_generator(hyperopt) -> None:
|
||||
tick = load_tickerdata_file(None, 'UNITTEST/BTC', '1m')
|
||||
tickerlist = {'UNITTEST/BTC': tick}
|
||||
tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick)}
|
||||
dataframes = hyperopt.strategy.tickerdata_to_dataframe(tickerlist)
|
||||
dataframe = hyperopt.custom_hyperopt.populate_indicators(dataframes['UNITTEST/BTC'],
|
||||
{'pair': 'UNITTEST/BTC'})
|
||||
|
@ -1,475 +1,9 @@
|
||||
# pragma pylint: disable=missing-docstring, protected-access, C0103
|
||||
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
from shutil import copyfile
|
||||
|
||||
import arrow
|
||||
|
||||
from freqtrade import optimize, constants
|
||||
from freqtrade.arguments import TimeRange
|
||||
from freqtrade.misc import file_dump_json
|
||||
from freqtrade.optimize.__init__ import (download_backtesting_testdata,
|
||||
download_pairs,
|
||||
load_cached_data_for_updating,
|
||||
load_tickerdata_file,
|
||||
make_testdata_path, trim_tickerlist)
|
||||
from freqtrade.data import history
|
||||
from freqtrade.strategy.default_strategy import DefaultStrategy
|
||||
from freqtrade.tests.conftest import get_patched_exchange, log_has, patch_exchange
|
||||
|
||||
# Change this if modifying UNITTEST/BTC testdatafile
|
||||
_BTC_UNITTEST_LENGTH = 13681
|
||||
|
||||
|
||||
def _backup_file(file: str, copy_file: bool = False) -> None:
|
||||
"""
|
||||
Backup existing file to avoid deleting the user file
|
||||
:param file: complete path to the file
|
||||
:param touch_file: create an empty file in replacement
|
||||
:return: None
|
||||
"""
|
||||
file_swp = file + '.swp'
|
||||
if os.path.isfile(file):
|
||||
os.rename(file, file_swp)
|
||||
|
||||
if copy_file:
|
||||
copyfile(file_swp, file)
|
||||
|
||||
|
||||
def _clean_test_file(file: str) -> None:
|
||||
"""
|
||||
Backup existing file to avoid deleting the user file
|
||||
:param file: complete path to the file
|
||||
:return: None
|
||||
"""
|
||||
file_swp = file + '.swp'
|
||||
# 1. Delete file from the test
|
||||
if os.path.isfile(file):
|
||||
os.remove(file)
|
||||
|
||||
# 2. Rollback to the initial file
|
||||
if os.path.isfile(file_swp):
|
||||
os.rename(file_swp, file)
|
||||
|
||||
|
||||
def test_load_data_30min_ticker(ticker_history, mocker, caplog, default_conf) -> None:
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history)
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'UNITTEST_BTC-30m.json')
|
||||
_backup_file(file, copy_file=True)
|
||||
ld = optimize.load_data(None, pairs=['UNITTEST/BTC'], ticker_interval='30m')
|
||||
assert isinstance(ld, dict)
|
||||
assert os.path.isfile(file) is True
|
||||
assert not log_has('Download the pair: "UNITTEST/BTC", Interval: 30m', caplog.record_tuples)
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_load_data_5min_ticker(ticker_history, mocker, caplog, default_conf) -> None:
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history)
|
||||
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'UNITTEST_BTC-5m.json')
|
||||
_backup_file(file, copy_file=True)
|
||||
optimize.load_data(None, pairs=['UNITTEST/BTC'], ticker_interval='5m')
|
||||
assert os.path.isfile(file) is True
|
||||
assert not log_has('Download the pair: "UNITTEST/BTC", Interval: 5m', caplog.record_tuples)
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_load_data_1min_ticker(ticker_history, mocker, caplog) -> None:
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history)
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'UNITTEST_BTC-1m.json')
|
||||
_backup_file(file, copy_file=True)
|
||||
optimize.load_data(None, ticker_interval='1m', pairs=['UNITTEST/BTC'])
|
||||
assert os.path.isfile(file) is True
|
||||
assert not log_has('Download the pair: "UNITTEST/BTC", Interval: 1m', caplog.record_tuples)
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog, default_conf) -> None:
|
||||
"""
|
||||
Test load_data() with 1 min ticker
|
||||
"""
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history_list)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-1m.json')
|
||||
|
||||
_backup_file(file)
|
||||
# do not download a new pair if refresh_pairs isn't set
|
||||
optimize.load_data(None,
|
||||
ticker_interval='1m',
|
||||
refresh_pairs=False,
|
||||
pairs=['MEME/BTC'])
|
||||
assert os.path.isfile(file) is False
|
||||
assert log_has('No data for pair: "MEME/BTC", Interval: 1m. '
|
||||
'Use --refresh-pairs-cached to download the data',
|
||||
caplog.record_tuples)
|
||||
|
||||
# download a new pair if refresh_pairs is set
|
||||
optimize.load_data(None,
|
||||
ticker_interval='1m',
|
||||
refresh_pairs=True,
|
||||
exchange=exchange,
|
||||
pairs=['MEME/BTC'])
|
||||
assert os.path.isfile(file) is True
|
||||
assert log_has('Download the pair: "MEME/BTC", Interval: 1m', caplog.record_tuples)
|
||||
_clean_test_file(file)
|
||||
|
||||
|
||||
def test_testdata_path() -> None:
|
||||
assert os.path.join('freqtrade', 'tests', 'testdata') in make_testdata_path(None)
|
||||
|
||||
|
||||
def test_download_pairs(ticker_history_list, mocker, default_conf) -> None:
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history_list)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file1_1 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-1m.json')
|
||||
file1_5 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-5m.json')
|
||||
file2_1 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'CFI_BTC-1m.json')
|
||||
file2_5 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'CFI_BTC-5m.json')
|
||||
|
||||
_backup_file(file1_1)
|
||||
_backup_file(file1_5)
|
||||
_backup_file(file2_1)
|
||||
_backup_file(file2_5)
|
||||
|
||||
assert os.path.isfile(file1_1) is False
|
||||
assert os.path.isfile(file2_1) is False
|
||||
|
||||
assert download_pairs(None, exchange,
|
||||
pairs=['MEME/BTC', 'CFI/BTC'], ticker_interval='1m') is True
|
||||
|
||||
assert os.path.isfile(file1_1) is True
|
||||
assert os.path.isfile(file2_1) is True
|
||||
|
||||
# clean files freshly downloaded
|
||||
_clean_test_file(file1_1)
|
||||
_clean_test_file(file2_1)
|
||||
|
||||
assert os.path.isfile(file1_5) is False
|
||||
assert os.path.isfile(file2_5) is False
|
||||
|
||||
assert download_pairs(None, exchange,
|
||||
pairs=['MEME/BTC', 'CFI/BTC'], ticker_interval='5m') is True
|
||||
|
||||
assert os.path.isfile(file1_5) is True
|
||||
assert os.path.isfile(file2_5) is True
|
||||
|
||||
# clean files freshly downloaded
|
||||
_clean_test_file(file1_5)
|
||||
_clean_test_file(file2_5)
|
||||
|
||||
|
||||
def test_load_cached_data_for_updating(mocker) -> None:
|
||||
datadir = os.path.join(os.path.dirname(__file__), '..', 'testdata')
|
||||
|
||||
test_data = None
|
||||
test_filename = os.path.join(datadir, 'UNITTEST_BTC-1m.json')
|
||||
with open(test_filename, "rt") as file:
|
||||
test_data = json.load(file)
|
||||
|
||||
# change now time to test 'line' cases
|
||||
# now = last cached item + 1 hour
|
||||
now_ts = test_data[-1][0] / 1000 + 60 * 60
|
||||
mocker.patch('arrow.utcnow', return_value=arrow.get(now_ts))
|
||||
|
||||
# timeframe starts earlier than the cached data
|
||||
# should fully update data
|
||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == []
|
||||
assert start_ts == test_data[0][0] - 1000
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 120
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
TimeRange(None, 'line', 0, -num_lines))
|
||||
assert data == []
|
||||
assert start_ts < test_data[0][0] - 1
|
||||
|
||||
# timeframe starts in the center of the cached data
|
||||
# should return the chached data w/o the last item
|
||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# timeframe starts after the chached data
|
||||
# should return the chached data w/o the last item
|
||||
timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 1, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# no timeframe is set
|
||||
# should return the chached data w/o the last item
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# no datafile exist
|
||||
# should return timestamp start time
|
||||
timerange = TimeRange('date', None, now_ts - 10000, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename + 'unexist',
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == []
|
||||
assert start_ts == (now_ts - 10000) * 1000
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename + 'unexist',
|
||||
'1m',
|
||||
timerange)
|
||||
assert data == []
|
||||
assert start_ts == (now_ts - num_lines * 60) * 1000
|
||||
|
||||
# no datafile exist, no timeframe is set
|
||||
# should return an empty array and None
|
||||
data, start_ts = load_cached_data_for_updating(test_filename + 'unexist',
|
||||
'1m',
|
||||
None)
|
||||
assert data == []
|
||||
assert start_ts is None
|
||||
|
||||
|
||||
def test_download_pairs_exception(ticker_history, mocker, caplog, default_conf) -> None:
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history)
|
||||
mocker.patch('freqtrade.optimize.__init__.download_backtesting_testdata',
|
||||
side_effect=BaseException('File Error'))
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
|
||||
file1_1 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-1m.json')
|
||||
file1_5 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'MEME_BTC-5m.json')
|
||||
_backup_file(file1_1)
|
||||
_backup_file(file1_5)
|
||||
|
||||
download_pairs(None, exchange, pairs=['MEME/BTC'], ticker_interval='1m')
|
||||
# clean files freshly downloaded
|
||||
_clean_test_file(file1_1)
|
||||
_clean_test_file(file1_5)
|
||||
assert log_has('Failed to download the pair: "MEME/BTC", Interval: 1m', caplog.record_tuples)
|
||||
|
||||
|
||||
def test_download_backtesting_testdata(ticker_history_list, mocker, default_conf) -> None:
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=ticker_history_list)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
# Tst that pairs-cached is not touched.
|
||||
assert not exchange._pairs_last_refresh_time
|
||||
# Download a 1 min ticker file
|
||||
file1 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'XEL_BTC-1m.json')
|
||||
_backup_file(file1)
|
||||
download_backtesting_testdata(None, exchange, pair="XEL/BTC", tick_interval='1m')
|
||||
assert os.path.isfile(file1) is True
|
||||
_clean_test_file(file1)
|
||||
assert not exchange._pairs_last_refresh_time
|
||||
|
||||
# Download a 5 min ticker file
|
||||
file2 = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'STORJ_BTC-5m.json')
|
||||
_backup_file(file2)
|
||||
|
||||
download_backtesting_testdata(None, exchange, pair="STORJ/BTC", tick_interval='5m')
|
||||
assert os.path.isfile(file2) is True
|
||||
_clean_test_file(file2)
|
||||
assert not exchange._pairs_last_refresh_time
|
||||
|
||||
|
||||
def test_download_backtesting_testdata2(mocker, default_conf) -> None:
|
||||
tick = [
|
||||
[1509836520000, 0.00162008, 0.00162008, 0.00162008, 0.00162008, 108.14853839],
|
||||
[1509836580000, 0.00161, 0.00161, 0.00161, 0.00161, 82.390199]
|
||||
]
|
||||
json_dump_mock = mocker.patch('freqtrade.misc.file_dump_json', return_value=None)
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=tick)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
download_backtesting_testdata(None, exchange, pair="UNITTEST/BTC", tick_interval='1m')
|
||||
download_backtesting_testdata(None, exchange, pair="UNITTEST/BTC", tick_interval='3m')
|
||||
assert json_dump_mock.call_count == 2
|
||||
|
||||
|
||||
def test_load_tickerdata_file() -> None:
|
||||
# 7 does not exist in either format.
|
||||
assert not load_tickerdata_file(None, 'UNITTEST/BTC', '7m')
|
||||
# 1 exists only as a .json
|
||||
tickerdata = load_tickerdata_file(None, 'UNITTEST/BTC', '1m')
|
||||
assert _BTC_UNITTEST_LENGTH == len(tickerdata)
|
||||
# 8 .json is empty and will fail if it's loaded. .json.gz is a copy of 1.json
|
||||
tickerdata = load_tickerdata_file(None, 'UNITTEST/BTC', '8m')
|
||||
assert _BTC_UNITTEST_LENGTH == len(tickerdata)
|
||||
|
||||
|
||||
def test_load_partial_missing(caplog) -> None:
|
||||
# Make sure we start fresh - test missing data at start
|
||||
start = arrow.get('2018-01-01T00:00:00')
|
||||
end = arrow.get('2018-01-11T00:00:00')
|
||||
tickerdata = optimize.load_data(None, '5m', ['UNITTEST/BTC'],
|
||||
refresh_pairs=False,
|
||||
timerange=TimeRange('date', 'date',
|
||||
start.timestamp, end.timestamp))
|
||||
# timedifference in 5 minutes
|
||||
td = ((end - start).total_seconds() // 60 // 5) + 1
|
||||
assert td != len(tickerdata['UNITTEST/BTC'])
|
||||
start_real = arrow.get(tickerdata['UNITTEST/BTC'][0][0] / 1000)
|
||||
assert log_has(f'Missing data at start for pair '
|
||||
f'UNITTEST/BTC, data starts at {start_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
||||
caplog.record_tuples)
|
||||
# Make sure we start fresh - test missing data at end
|
||||
caplog.clear()
|
||||
start = arrow.get('2018-01-10T00:00:00')
|
||||
end = arrow.get('2018-02-20T00:00:00')
|
||||
tickerdata = optimize.load_data(None, '5m', ['UNITTEST/BTC'],
|
||||
refresh_pairs=False,
|
||||
timerange=TimeRange('date', 'date',
|
||||
start.timestamp, end.timestamp))
|
||||
# timedifference in 5 minutes
|
||||
td = ((end - start).total_seconds() // 60 // 5) + 1
|
||||
assert td != len(tickerdata['UNITTEST/BTC'])
|
||||
end_real = arrow.get(tickerdata['UNITTEST/BTC'][-1][0] / 1000)
|
||||
assert log_has(f'Missing data at end for pair '
|
||||
f'UNITTEST/BTC, data ends at {end_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
||||
caplog.record_tuples)
|
||||
|
||||
|
||||
def test_init(default_conf, mocker) -> None:
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
assert {} == optimize.load_data(
|
||||
'',
|
||||
exchange=exchange,
|
||||
pairs=[],
|
||||
refresh_pairs=True,
|
||||
ticker_interval=default_conf['ticker_interval']
|
||||
)
|
||||
|
||||
|
||||
def test_trim_tickerlist() -> None:
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'UNITTEST_BTC-1m.json')
|
||||
with open(file) as data_file:
|
||||
ticker_list = json.load(data_file)
|
||||
ticker_list_len = len(ticker_list)
|
||||
|
||||
# Test the pattern ^(-\d+)$
|
||||
# This pattern uses the latest N elements
|
||||
timerange = TimeRange(None, 'line', 0, -5)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is not ticker[0] # The first element should be different
|
||||
assert ticker_list[-1] is ticker[-1] # The last element must be the same
|
||||
|
||||
# Test the pattern ^(\d+)-$
|
||||
# This pattern keep X element from the end
|
||||
timerange = TimeRange('line', None, 5, 0)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is ticker[0] # The first element must be the same
|
||||
assert ticker_list[-1] is not ticker[-1] # The last element should be different
|
||||
|
||||
# Test the pattern ^(\d+)-(\d+)$
|
||||
# This pattern extract a window
|
||||
timerange = TimeRange('index', 'index', 5, 10)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is not ticker[0] # The first element should be different
|
||||
assert ticker_list[5] is ticker[0] # The list starts at the index 5
|
||||
assert ticker_list[9] is ticker[-1] # The list ends at the index 9 (5 elements)
|
||||
|
||||
# Test the pattern ^(\d{8})-(\d{8})$
|
||||
# This pattern extract a window between the dates
|
||||
timerange = TimeRange('date', 'date', ticker_list[5][0] / 1000, ticker_list[10][0] / 1000 - 1)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is not ticker[0] # The first element should be different
|
||||
assert ticker_list[5] is ticker[0] # The list starts at the index 5
|
||||
assert ticker_list[9] is ticker[-1] # The list ends at the index 9 (5 elements)
|
||||
|
||||
# Test the pattern ^-(\d{8})$
|
||||
# This pattern extracts elements from the start to the date
|
||||
timerange = TimeRange(None, 'date', 0, ticker_list[10][0] / 1000 - 1)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 10
|
||||
assert ticker_list[0] is ticker[0] # The start of the list is included
|
||||
assert ticker_list[9] is ticker[-1] # The element 10 is not included
|
||||
|
||||
# Test the pattern ^(\d{8})-$
|
||||
# This pattern extracts elements from the date to now
|
||||
timerange = TimeRange('date', None, ticker_list[10][0] / 1000 - 1, None)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == ticker_list_len - 10
|
||||
assert ticker_list[10] is ticker[0] # The first element is element #10
|
||||
assert ticker_list[-1] is ticker[-1] # The last element is the same
|
||||
|
||||
# Test a wrong pattern
|
||||
# This pattern must return the list unchanged
|
||||
timerange = TimeRange(None, None, None, 5)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_list_len == ticker_len
|
||||
|
||||
|
||||
def test_file_dump_json() -> None:
|
||||
file = os.path.join(os.path.dirname(__file__), '..', 'testdata',
|
||||
'test_{id}.json'.format(id=str(uuid.uuid4())))
|
||||
data = {'bar': 'foo'}
|
||||
|
||||
# check the file we will create does not exist
|
||||
assert os.path.isfile(file) is False
|
||||
|
||||
# Create the Json file
|
||||
file_dump_json(file, data)
|
||||
|
||||
# Check the file was create
|
||||
assert os.path.isfile(file) is True
|
||||
|
||||
# Open the Json file created and test the data is in it
|
||||
with open(file) as data_file:
|
||||
json_from_file = json.load(data_file)
|
||||
|
||||
assert 'bar' in json_from_file
|
||||
assert json_from_file['bar'] == 'foo'
|
||||
|
||||
# Remove the file
|
||||
_clean_test_file(file)
|
||||
from freqtrade.tests.conftest import log_has, patch_exchange
|
||||
|
||||
|
||||
def test_get_timeframe(default_conf, mocker) -> None:
|
||||
@ -477,8 +11,8 @@ def test_get_timeframe(default_conf, mocker) -> None:
|
||||
strategy = DefaultStrategy(default_conf)
|
||||
|
||||
data = strategy.tickerdata_to_dataframe(
|
||||
optimize.load_data(
|
||||
None,
|
||||
history.load_data(
|
||||
datadir=None,
|
||||
ticker_interval='1m',
|
||||
pairs=['UNITTEST/BTC']
|
||||
)
|
||||
@ -493,8 +27,8 @@ def test_validate_backtest_data_warn(default_conf, mocker, caplog) -> None:
|
||||
strategy = DefaultStrategy(default_conf)
|
||||
|
||||
data = strategy.tickerdata_to_dataframe(
|
||||
optimize.load_data(
|
||||
None,
|
||||
history.load_data(
|
||||
datadir=None,
|
||||
ticker_interval='1m',
|
||||
pairs=['UNITTEST/BTC']
|
||||
)
|
||||
@ -515,8 +49,8 @@ def test_validate_backtest_data(default_conf, mocker, caplog) -> None:
|
||||
|
||||
timerange = TimeRange('index', 'index', 200, 250)
|
||||
data = strategy.tickerdata_to_dataframe(
|
||||
optimize.load_data(
|
||||
None,
|
||||
history.load_data(
|
||||
datadir=None,
|
||||
ticker_interval='5m',
|
||||
pairs=['UNITTEST/BTC'],
|
||||
timerange=timerange
|
||||
|
@ -3,7 +3,7 @@ import json
|
||||
import pytest
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.exchange.exchange_helpers import parse_ticker_dataframe
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.strategy.default_strategy import DefaultStrategy
|
||||
|
||||
|
||||
|
@ -7,7 +7,8 @@ import arrow
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.arguments import TimeRange
|
||||
from freqtrade.optimize.__init__ import load_tickerdata_file
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.data.history import load_tickerdata_file
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.tests.conftest import get_patched_exchange, log_has
|
||||
from freqtrade.strategy.default_strategy import DefaultStrategy
|
||||
@ -110,7 +111,7 @@ def test_tickerdata_to_dataframe(default_conf) -> None:
|
||||
|
||||
timerange = TimeRange(None, 'line', 0, -100)
|
||||
tick = load_tickerdata_file(None, 'UNITTEST/BTC', '1m', timerange=timerange)
|
||||
tickerlist = {'UNITTEST/BTC': tick}
|
||||
tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick)}
|
||||
data = strategy.tickerdata_to_dataframe(tickerlist)
|
||||
assert len(data['UNITTEST/BTC']) == 99 # partial candle was removed
|
||||
|
||||
|
@ -3,10 +3,10 @@
|
||||
import datetime
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from freqtrade.exchange.exchange_helpers import parse_ticker_dataframe
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.misc import (common_datearray, datesarray_to_datetimearray,
|
||||
file_dump_json, format_ms_time, shorten_date)
|
||||
from freqtrade.optimize.__init__ import load_tickerdata_file
|
||||
from freqtrade.data.history import load_tickerdata_file
|
||||
from freqtrade.strategy.default_strategy import DefaultStrategy
|
||||
|
||||
|
||||
@ -34,7 +34,7 @@ def test_datesarray_to_datetimearray(ticker_history_list):
|
||||
def test_common_datearray(default_conf) -> None:
|
||||
strategy = DefaultStrategy(default_conf)
|
||||
tick = load_tickerdata_file(None, 'UNITTEST/BTC', '1m')
|
||||
tickerlist = {'UNITTEST/BTC': tick}
|
||||
tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick)}
|
||||
dataframes = strategy.tickerdata_to_dataframe(tickerlist)
|
||||
|
||||
dates = common_datearray(dataframes)
|
||||
|
@ -9,7 +9,7 @@ import arrow
|
||||
from freqtrade import arguments
|
||||
from freqtrade.arguments import TimeRange
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.optimize import download_backtesting_testdata
|
||||
from freqtrade.data.history import download_pair_history
|
||||
from freqtrade.configuration import set_loggers
|
||||
|
||||
import logging
|
||||
@ -82,7 +82,7 @@ for pair in PAIRS:
|
||||
dl_file.unlink()
|
||||
|
||||
print(f'downloading pair {pair}, interval {tick_interval}')
|
||||
download_backtesting_testdata(str(dl_path), exchange=exchange,
|
||||
download_pair_history(datadir=dl_path, exchange=exchange,
|
||||
pair=pair,
|
||||
tick_interval=tick_interval,
|
||||
timerange=timerange)
|
||||
|
@ -38,9 +38,9 @@ import pytz
|
||||
from plotly import tools
|
||||
from plotly.offline import plot
|
||||
|
||||
import freqtrade.optimize as optimize
|
||||
from freqtrade import persistence
|
||||
from freqtrade.arguments import Arguments, TimeRange
|
||||
from freqtrade.data import history
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.optimize.backtesting import setup_configuration
|
||||
from freqtrade.persistence import Trade
|
||||
@ -141,8 +141,8 @@ def plot_analyzed_dataframe(args: Namespace) -> None:
|
||||
exchange.refresh_tickers([pair], tick_interval)
|
||||
tickers[pair] = exchange.klines(pair)
|
||||
else:
|
||||
tickers = optimize.load_data(
|
||||
datadir=_CONF.get("datadir"),
|
||||
tickers = history.load_data(
|
||||
datadir=Path(_CONF.get("datadir")),
|
||||
pairs=[pair],
|
||||
ticker_interval=tick_interval,
|
||||
refresh_pairs=_CONF.get('refresh_pairs', False),
|
||||
|
@ -13,10 +13,10 @@ Optional Cli parameters
|
||||
--export-filename: Specify where the backtest export is located.
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
from argparse import Namespace
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
import numpy as np
|
||||
|
||||
@ -27,8 +27,8 @@ import plotly.graph_objs as go
|
||||
from freqtrade.arguments import Arguments
|
||||
from freqtrade.configuration import Configuration
|
||||
from freqtrade import constants
|
||||
from freqtrade.data import history
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
import freqtrade.optimize as optimize
|
||||
import freqtrade.misc as misc
|
||||
|
||||
|
||||
@ -120,8 +120,8 @@ def plot_profit(args: Namespace) -> None:
|
||||
pairs = list(set(pairs) & set(filter_pairs))
|
||||
logger.info('Filter, keep pairs %s' % pairs)
|
||||
|
||||
tickers = optimize.load_data(
|
||||
datadir=config.get('datadir'),
|
||||
tickers = history.load_data(
|
||||
datadir=Path(config.get('datadir')),
|
||||
pairs=pairs,
|
||||
ticker_interval=tick_interval,
|
||||
refresh_pairs=False,
|
||||
@ -187,7 +187,7 @@ def plot_profit(args: Namespace) -> None:
|
||||
)
|
||||
fig.append_trace(pair_profit, 3, 1)
|
||||
|
||||
plot(fig, filename=os.path.join('user_data', 'freqtrade-profit-plot.html'))
|
||||
plot(fig, filename=str(Path('user_data').joinpath('freqtrade-profit-plot.html')))
|
||||
|
||||
|
||||
def define_index(min_date: int, max_date: int, interval: str) -> int:
|
||||
|
Loading…
Reference in New Issue
Block a user