Replace some occurances of ticker_interval with timeframe

This commit is contained in:
Matthias 2019-11-02 20:19:13 +01:00
parent 7a2d917c66
commit e4bdb92521
13 changed files with 107 additions and 106 deletions

View File

@ -10,13 +10,13 @@ from pandas import DataFrame, to_datetime
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def parse_ticker_dataframe(ticker: list, ticker_interval: str, pair: str, *, def parse_ticker_dataframe(ticker: list, timeframe: str, pair: str, *,
fill_missing: bool = True, fill_missing: bool = True,
drop_incomplete: bool = True) -> DataFrame: drop_incomplete: bool = True) -> DataFrame:
""" """
Converts a ticker-list (format ccxt.fetch_ohlcv) to a Dataframe Converts a ticker-list (format ccxt.fetch_ohlcv) to a Dataframe
:param ticker: ticker list, as returned by exchange.async_get_candle_history :param ticker: ticker list, as returned by exchange.async_get_candle_history
:param ticker_interval: ticker_interval (e.g. 5m). Used to fill up eventual missing data :param timeframe: timeframe (e.g. 5m). Used to fill up eventual missing data
:param pair: Pair this data is for (used to warn if fillup was necessary) :param pair: Pair this data is for (used to warn if fillup was necessary)
:param fill_missing: fill up missing candles with 0 candles :param fill_missing: fill up missing candles with 0 candles
(see ohlcv_fill_up_missing_data for details) (see ohlcv_fill_up_missing_data for details)
@ -52,12 +52,12 @@ def parse_ticker_dataframe(ticker: list, ticker_interval: str, pair: str, *,
logger.debug('Dropping last candle') logger.debug('Dropping last candle')
if fill_missing: if fill_missing:
return ohlcv_fill_up_missing_data(frame, ticker_interval, pair) return ohlcv_fill_up_missing_data(frame, timeframe, pair)
else: else:
return frame return frame
def ohlcv_fill_up_missing_data(dataframe: DataFrame, ticker_interval: str, pair: str) -> DataFrame: def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str) -> DataFrame:
""" """
Fills up missing data with 0 volume rows, Fills up missing data with 0 volume rows,
using the previous close as price for "open", "high" "low" and "close", volume is set to 0 using the previous close as price for "open", "high" "low" and "close", volume is set to 0
@ -72,7 +72,7 @@ def ohlcv_fill_up_missing_data(dataframe: DataFrame, ticker_interval: str, pair:
'close': 'last', 'close': 'last',
'volume': 'sum' 'volume': 'sum'
} }
ticker_minutes = timeframe_to_minutes(ticker_interval) ticker_minutes = timeframe_to_minutes(timeframe)
# Resample to create "NAN" values # Resample to create "NAN" values
df = dataframe.resample(f'{ticker_minutes}min', on='date').agg(ohlc_dict) df = dataframe.resample(f'{ticker_minutes}min', on='date').agg(ohlc_dict)

View File

@ -42,29 +42,29 @@ class DataProvider:
""" """
return list(self._exchange._klines.keys()) return list(self._exchange._klines.keys())
def ohlcv(self, pair: str, ticker_interval: str = None, copy: bool = True) -> DataFrame: def ohlcv(self, pair: str, timeframe: str = None, copy: bool = True) -> DataFrame:
""" """
Get ohlcv data for the given pair as DataFrame Get ohlcv data for the given pair as DataFrame
Please use the `available_pairs` method to verify which pairs are currently cached. Please use the `available_pairs` method to verify which pairs are currently cached.
:param pair: pair to get the data for :param pair: pair to get the data for
:param ticker_interval: ticker interval to get data for :param timeframe: Ticker timeframe to get data for
:param copy: copy dataframe before returning if True. :param copy: copy dataframe before returning if True.
Use False only for read-only operations (where the dataframe is not modified) Use False only for read-only operations (where the dataframe is not modified)
""" """
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
return self._exchange.klines((pair, ticker_interval or self._config['ticker_interval']), return self._exchange.klines((pair, timeframe or self._config['ticker_interval']),
copy=copy) copy=copy)
else: else:
return DataFrame() return DataFrame()
def historic_ohlcv(self, pair: str, ticker_interval: str = None) -> DataFrame: def historic_ohlcv(self, pair: str, timeframe: str = None) -> DataFrame:
""" """
Get stored historic ohlcv data Get stored historic ohlcv data
:param pair: pair to get the data for :param pair: pair to get the data for
:param ticker_interval: ticker interval to get data for :param timeframe: ticker interval to get data for
""" """
return load_pair_history(pair=pair, return load_pair_history(pair=pair,
ticker_interval=ticker_interval or self._config['ticker_interval'], timeframe=timeframe or self._config['ticker_interval'],
datadir=Path(self._config['datadir']) datadir=Path(self._config['datadir'])
) )
@ -77,10 +77,10 @@ class DataProvider:
""" """
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
# Get live ohlcv data. # Get live ohlcv data.
data = self.ohlcv(pair=pair, ticker_interval=ticker_interval) data = self.ohlcv(pair=pair, timeframe=ticker_interval)
else: else:
# Get historic ohlcv data (cached on disk). # Get historic ohlcv data (cached on disk).
data = self.historic_ohlcv(pair=pair, ticker_interval=ticker_interval) data = self.historic_ohlcv(pair=pair, timeframe=ticker_interval)
if len(data) == 0: if len(data) == 0:
logger.warning(f"No data found for ({pair}, {ticker_interval}).") logger.warning(f"No data found for ({pair}, {ticker_interval}).")
return data return data

View File

@ -63,13 +63,13 @@ def trim_dataframe(df: DataFrame, timerange: TimeRange) -> DataFrame:
return df return df
def load_tickerdata_file(datadir: Path, pair: str, ticker_interval: str, def load_tickerdata_file(datadir: Path, pair: str, timeframe: str,
timerange: Optional[TimeRange] = None) -> Optional[list]: timerange: Optional[TimeRange] = None) -> Optional[list]:
""" """
Load a pair from file, either .json.gz or .json Load a pair from file, either .json.gz or .json
:return: tickerlist or None if unsuccessful :return: tickerlist or None if unsuccessful
""" """
filename = pair_data_filename(datadir, pair, ticker_interval) filename = pair_data_filename(datadir, pair, timeframe)
pairdata = misc.file_load_json(filename) pairdata = misc.file_load_json(filename)
if not pairdata: if not pairdata:
return [] return []
@ -80,11 +80,11 @@ def load_tickerdata_file(datadir: Path, pair: str, ticker_interval: str,
def store_tickerdata_file(datadir: Path, pair: str, def store_tickerdata_file(datadir: Path, pair: str,
ticker_interval: str, data: list, is_zip: bool = False): timeframe: str, data: list, is_zip: bool = False):
""" """
Stores tickerdata to file Stores tickerdata to file
""" """
filename = pair_data_filename(datadir, pair, ticker_interval) filename = pair_data_filename(datadir, pair, timeframe)
misc.file_dump_json(filename, data, is_zip=is_zip) misc.file_dump_json(filename, data, is_zip=is_zip)
@ -121,7 +121,7 @@ def _validate_pairdata(pair, pairdata, timerange: TimeRange):
def load_pair_history(pair: str, def load_pair_history(pair: str,
ticker_interval: str, timeframe: str,
datadir: Path, datadir: Path,
timerange: Optional[TimeRange] = None, timerange: Optional[TimeRange] = None,
refresh_pairs: bool = False, refresh_pairs: bool = False,
@ -133,7 +133,7 @@ def load_pair_history(pair: str,
""" """
Loads cached ticker history for the given pair. Loads cached ticker history for the given pair.
:param pair: Pair to load data for :param pair: Pair to load data for
:param ticker_interval: Ticker-interval (e.g. "5m") :param timeframe: Ticker timeframe (e.g. "5m")
:param datadir: Path to the data storage location. :param datadir: Path to the data storage location.
:param timerange: Limit data to be loaded to this timerange :param timerange: Limit data to be loaded to this timerange
:param refresh_pairs: Refresh pairs from exchange. :param refresh_pairs: Refresh pairs from exchange.
@ -147,34 +147,34 @@ def load_pair_history(pair: str,
timerange_startup = deepcopy(timerange) timerange_startup = deepcopy(timerange)
if startup_candles > 0 and timerange_startup: if startup_candles > 0 and timerange_startup:
timerange_startup.subtract_start(timeframe_to_seconds(ticker_interval) * startup_candles) timerange_startup.subtract_start(timeframe_to_seconds(timeframe) * startup_candles)
# The user forced the refresh of pairs # The user forced the refresh of pairs
if refresh_pairs: if refresh_pairs:
download_pair_history(datadir=datadir, download_pair_history(datadir=datadir,
exchange=exchange, exchange=exchange,
pair=pair, pair=pair,
ticker_interval=ticker_interval, timeframe=timeframe,
timerange=timerange) timerange=timerange)
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange_startup) pairdata = load_tickerdata_file(datadir, pair, timeframe, timerange=timerange_startup)
if pairdata: if pairdata:
if timerange_startup: if timerange_startup:
_validate_pairdata(pair, pairdata, timerange_startup) _validate_pairdata(pair, pairdata, timerange_startup)
return parse_ticker_dataframe(pairdata, ticker_interval, pair=pair, return parse_ticker_dataframe(pairdata, timeframe, pair=pair,
fill_missing=fill_up_missing, fill_missing=fill_up_missing,
drop_incomplete=drop_incomplete) drop_incomplete=drop_incomplete)
else: else:
logger.warning( logger.warning(
f'No history data for pair: "{pair}", interval: {ticker_interval}. ' f'No history data for pair: "{pair}", timeframe: {timeframe}. '
'Use `freqtrade download-data` to download the data' 'Use `freqtrade download-data` to download the data'
) )
return None return None
def load_data(datadir: Path, def load_data(datadir: Path,
ticker_interval: str, timeframe: str,
pairs: List[str], pairs: List[str],
refresh_pairs: bool = False, refresh_pairs: bool = False,
exchange: Optional[Exchange] = None, exchange: Optional[Exchange] = None,
@ -186,7 +186,7 @@ def load_data(datadir: Path,
""" """
Loads ticker history data for a list of pairs Loads ticker history data for a list of pairs
:param datadir: Path to the data storage location. :param datadir: Path to the data storage location.
:param ticker_interval: Ticker-interval (e.g. "5m") :param timeframe: Ticker Timeframe (e.g. "5m")
:param pairs: List of pairs to load :param pairs: List of pairs to load
:param refresh_pairs: Refresh pairs from exchange. :param refresh_pairs: Refresh pairs from exchange.
(Note: Requires exchange to be passed as well.) (Note: Requires exchange to be passed as well.)
@ -206,7 +206,7 @@ def load_data(datadir: Path,
logger.info(f'Using indicator startup period: {startup_candles} ...') logger.info(f'Using indicator startup period: {startup_candles} ...')
for pair in pairs: for pair in pairs:
hist = load_pair_history(pair=pair, ticker_interval=ticker_interval, hist = load_pair_history(pair=pair, timeframe=timeframe,
datadir=datadir, timerange=timerange, datadir=datadir, timerange=timerange,
refresh_pairs=refresh_pairs, refresh_pairs=refresh_pairs,
exchange=exchange, exchange=exchange,
@ -220,9 +220,9 @@ def load_data(datadir: Path,
return result return result
def pair_data_filename(datadir: Path, pair: str, ticker_interval: str) -> Path: def pair_data_filename(datadir: Path, pair: str, timeframe: str) -> Path:
pair_s = pair.replace("/", "_") pair_s = pair.replace("/", "_")
filename = datadir.joinpath(f'{pair_s}-{ticker_interval}.json') filename = datadir.joinpath(f'{pair_s}-{timeframe}.json')
return filename return filename
@ -232,7 +232,7 @@ def pair_trades_filename(datadir: Path, pair: str) -> Path:
return filename return filename
def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: str, def _load_cached_data_for_updating(datadir: Path, pair: str, timeframe: str,
timerange: Optional[TimeRange]) -> Tuple[List[Any], timerange: Optional[TimeRange]) -> Tuple[List[Any],
Optional[int]]: Optional[int]]:
""" """
@ -250,12 +250,12 @@ def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: st
if timerange.starttype == 'date': if timerange.starttype == 'date':
since_ms = timerange.startts * 1000 since_ms = timerange.startts * 1000
elif timerange.stoptype == 'line': elif timerange.stoptype == 'line':
num_minutes = timerange.stopts * timeframe_to_minutes(ticker_interval) num_minutes = timerange.stopts * timeframe_to_minutes(timeframe)
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000 since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
# read the cached file # read the cached file
# Intentionally don't pass timerange in - since we need to load the full dataset. # Intentionally don't pass timerange in - since we need to load the full dataset.
data = load_tickerdata_file(datadir, pair, ticker_interval) data = load_tickerdata_file(datadir, pair, timeframe)
# remove the last item, could be incomplete candle # remove the last item, could be incomplete candle
if data: if data:
data.pop() data.pop()
@ -276,18 +276,18 @@ def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: st
def download_pair_history(datadir: Path, def download_pair_history(datadir: Path,
exchange: Optional[Exchange], exchange: Optional[Exchange],
pair: str, pair: str,
ticker_interval: str = '5m', timeframe: str = '5m',
timerange: Optional[TimeRange] = None) -> bool: timerange: Optional[TimeRange] = None) -> bool:
""" """
Download the latest ticker intervals from the exchange for the pair passed in parameters Download the latest ticker intervals from the exchange for the pair passed in parameters
The data is downloaded starting from the last correct ticker interval data that The data is downloaded starting from the last correct data that
exists in a cache. If timerange starts earlier than the data in the cache, exists in a cache. If timerange starts earlier than the data in the cache,
the full data will be redownloaded the full data will be redownloaded
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
:param pair: pair to download :param pair: pair to download
:param ticker_interval: ticker interval :param timeframe: Ticker Timeframe (e.g 5m)
:param timerange: range of time to download :param timerange: range of time to download
:return: bool with success state :return: bool with success state
""" """
@ -298,17 +298,17 @@ def download_pair_history(datadir: Path,
try: try:
logger.info( logger.info(
f'Download history data for pair: "{pair}", interval: {ticker_interval} ' f'Download history data for pair: "{pair}", timeframe: {timeframe} '
f'and store in {datadir}.' f'and store in {datadir}.'
) )
data, since_ms = _load_cached_data_for_updating(datadir, pair, ticker_interval, timerange) data, since_ms = _load_cached_data_for_updating(datadir, pair, timeframe, timerange)
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None') logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None') logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
# Default since_ms to 30 days if nothing is given # Default since_ms to 30 days if nothing is given
new_data = exchange.get_historic_ohlcv(pair=pair, ticker_interval=ticker_interval, new_data = exchange.get_historic_ohlcv(pair=pair, ticker_interval=timeframe,
since_ms=since_ms if since_ms since_ms=since_ms if since_ms
else else
int(arrow.utcnow().shift( int(arrow.utcnow().shift(
@ -318,12 +318,12 @@ def download_pair_history(datadir: Path,
logger.debug("New Start: %s", misc.format_ms_time(data[0][0])) logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
logger.debug("New End: %s", misc.format_ms_time(data[-1][0])) logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
store_tickerdata_file(datadir, pair, ticker_interval, data=data) store_tickerdata_file(datadir, pair, timeframe, data=data)
return True return True
except Exception as e: except Exception as e:
logger.error( logger.error(
f'Failed to download history data for pair: "{pair}", interval: {ticker_interval}. ' f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}. '
f'Error: {e}' f'Error: {e}'
) )
return False return False
@ -343,17 +343,17 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
pairs_not_available.append(pair) pairs_not_available.append(pair)
logger.info(f"Skipping pair {pair}...") logger.info(f"Skipping pair {pair}...")
continue continue
for ticker_interval in timeframes: for timeframe in timeframes:
dl_file = pair_data_filename(dl_path, pair, ticker_interval) dl_file = pair_data_filename(dl_path, pair, timeframe)
if erase and dl_file.exists(): if erase and dl_file.exists():
logger.info( logger.info(
f'Deleting existing data for pair {pair}, interval {ticker_interval}.') f'Deleting existing data for pair {pair}, interval {timeframe}.')
dl_file.unlink() dl_file.unlink()
logger.info(f'Downloading pair {pair}, interval {ticker_interval}.') logger.info(f'Downloading pair {pair}, interval {timeframe}.')
download_pair_history(datadir=dl_path, exchange=exchange, download_pair_history(datadir=dl_path, exchange=exchange,
pair=pair, ticker_interval=str(ticker_interval), pair=pair, timeframe=str(timeframe),
timerange=timerange) timerange=timerange)
return pairs_not_available return pairs_not_available
@ -459,7 +459,7 @@ def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]
def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime, def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime,
max_date: datetime, ticker_interval_mins: int) -> bool: max_date: datetime, timeframe_mins: int) -> bool:
""" """
Validates preprocessed backtesting data for missing values and shows warnings about it that. Validates preprocessed backtesting data for missing values and shows warnings about it that.
@ -467,10 +467,10 @@ def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime,
:param pair: pair used for log output. :param pair: pair used for log output.
:param min_date: start-date of the data :param min_date: start-date of the data
:param max_date: end-date of the data :param max_date: end-date of the data
:param ticker_interval_mins: ticker interval in minutes :param timeframe_mins: ticker Timeframe in minutes
""" """
# total difference in minutes / interval-minutes # total difference in minutes / timeframe-minutes
expected_frames = int((max_date - min_date).total_seconds() // 60 // ticker_interval_mins) expected_frames = int((max_date - min_date).total_seconds() // 60 // timeframe_mins)
found_missing = False found_missing = False
dflen = len(data) dflen = len(data)
if dflen < expected_frames: if dflen < expected_frames:

View File

@ -97,7 +97,7 @@ class Edge:
data = history.load_data( data = history.load_data(
datadir=Path(self.config['datadir']), datadir=Path(self.config['datadir']),
pairs=pairs, pairs=pairs,
ticker_interval=self.strategy.ticker_interval, timeframe=self.strategy.ticker_interval,
refresh_pairs=self._refresh_pairs, refresh_pairs=self._refresh_pairs,
exchange=self.exchange, exchange=self.exchange,
timerange=self._timerange, timerange=self._timerange,

View File

@ -108,7 +108,7 @@ class Backtesting:
data = history.load_data( data = history.load_data(
datadir=Path(self.config['datadir']), datadir=Path(self.config['datadir']),
pairs=self.config['exchange']['pair_whitelist'], pairs=self.config['exchange']['pair_whitelist'],
ticker_interval=self.ticker_interval, timeframe=self.ticker_interval,
timerange=timerange, timerange=timerange,
startup_candles=self.required_startup, startup_candles=self.required_startup,
fail_without_data=True, fail_without_data=True,

View File

@ -39,7 +39,7 @@ def init_plotscript(config):
tickers = history.load_data( tickers = history.load_data(
datadir=Path(str(config.get("datadir"))), datadir=Path(str(config.get("datadir"))),
pairs=pairs, pairs=pairs,
ticker_interval=config.get('ticker_interval', '5m'), timeframe=config.get('ticker_interval', '5m'),
timerange=timerange, timerange=timerange,
) )
@ -300,12 +300,12 @@ def generate_profit_graph(pairs: str, tickers: Dict[str, pd.DataFrame],
return fig return fig
def generate_plot_filename(pair, ticker_interval) -> str: def generate_plot_filename(pair, timeframe) -> str:
""" """
Generate filenames per pair/ticker_interval to be used for storing plots Generate filenames per pair/timeframe to be used for storing plots
""" """
pair_name = pair.replace("/", "_") pair_name = pair.replace("/", "_")
file_name = 'freqtrade-plot-' + pair_name + '-' + ticker_interval + '.html' file_name = 'freqtrade-plot-' + pair_name + '-' + timeframe + '.html'
logger.info('Generate plot file for %s', pair) logger.info('Generate plot file for %s', pair)
@ -316,8 +316,9 @@ def store_plot_file(fig, filename: str, directory: Path, auto_open: bool = False
""" """
Generate a plot html file from pre populated fig plotly object Generate a plot html file from pre populated fig plotly object
:param fig: Plotly Figure to plot :param fig: Plotly Figure to plot
:param pair: Pair to plot (used as filename and Plot title) :param filename: Name to store the file as
:param ticker_interval: Used as part of the filename :param directory: Directory to store the file in
:param auto_open: Automatically open files saved
:return: None :return: None
""" """
directory.mkdir(parents=True, exist_ok=True) directory.mkdir(parents=True, exist_ok=True)

View File

@ -56,7 +56,7 @@ def test_extract_trades_of_period(testdatadir):
# 2018-11-14 06:07:00 # 2018-11-14 06:07:00
timerange = TimeRange('date', None, 1510639620, 0) timerange = TimeRange('date', None, 1510639620, 0)
data = load_pair_history(pair=pair, ticker_interval='1m', data = load_pair_history(pair=pair, timeframe='1m',
datadir=testdatadir, timerange=timerange) datadir=testdatadir, timerange=timerange)
trades = DataFrame( trades = DataFrame(
@ -122,7 +122,7 @@ def test_combine_tickers_with_mean(testdatadir):
pairs = ["ETH/BTC", "ADA/BTC"] pairs = ["ETH/BTC", "ADA/BTC"]
tickers = load_data(datadir=testdatadir, tickers = load_data(datadir=testdatadir,
pairs=pairs, pairs=pairs,
ticker_interval='5m' timeframe='5m'
) )
df = combine_tickers_with_mean(tickers) df = combine_tickers_with_mean(tickers)
assert isinstance(df, DataFrame) assert isinstance(df, DataFrame)
@ -136,7 +136,7 @@ def test_create_cum_profit(testdatadir):
bt_data = load_backtest_data(filename) bt_data = load_backtest_data(filename)
timerange = TimeRange.parse_timerange("20180110-20180112") timerange = TimeRange.parse_timerange("20180110-20180112")
df = load_pair_history(pair="TRX/BTC", ticker_interval='5m', df = load_pair_history(pair="TRX/BTC", timeframe='5m',
datadir=testdatadir, timerange=timerange) datadir=testdatadir, timerange=timerange)
cum_profits = create_cum_profit(df.set_index('date'), cum_profits = create_cum_profit(df.set_index('date'),
@ -154,7 +154,7 @@ def test_create_cum_profit1(testdatadir):
bt_data.loc[:, 'close_time'] = bt_data.loc[:, 'close_time'] + DateOffset(seconds=20) bt_data.loc[:, 'close_time'] = bt_data.loc[:, 'close_time'] + DateOffset(seconds=20)
timerange = TimeRange.parse_timerange("20180110-20180112") timerange = TimeRange.parse_timerange("20180110-20180112")
df = load_pair_history(pair="TRX/BTC", ticker_interval='5m', df = load_pair_history(pair="TRX/BTC", timeframe='5m',
datadir=testdatadir, timerange=timerange) datadir=testdatadir, timerange=timerange)
cum_profits = create_cum_profit(df.set_index('date'), cum_profits = create_cum_profit(df.set_index('date'),

View File

@ -23,7 +23,7 @@ def test_parse_ticker_dataframe(ticker_history_list, caplog):
def test_ohlcv_fill_up_missing_data(testdatadir, caplog): def test_ohlcv_fill_up_missing_data(testdatadir, caplog):
data = load_pair_history(datadir=testdatadir, data = load_pair_history(datadir=testdatadir,
ticker_interval='1m', timeframe='1m',
pair='UNITTEST/BTC', pair='UNITTEST/BTC',
fill_up_missing=False) fill_up_missing=False)
caplog.set_level(logging.DEBUG) caplog.set_level(logging.DEBUG)

View File

@ -45,7 +45,7 @@ def test_historic_ohlcv(mocker, default_conf, ticker_history):
data = dp.historic_ohlcv("UNITTEST/BTC", "5m") data = dp.historic_ohlcv("UNITTEST/BTC", "5m")
assert isinstance(data, DataFrame) assert isinstance(data, DataFrame)
assert historymock.call_count == 1 assert historymock.call_count == 1
assert historymock.call_args_list[0][1]["ticker_interval"] == "5m" assert historymock.call_args_list[0][1]["timeframe"] == "5m"
def test_get_pair_dataframe(mocker, default_conf, ticker_history): def test_get_pair_dataframe(mocker, default_conf, ticker_history):

View File

@ -64,20 +64,20 @@ def _clean_test_file(file: Path) -> None:
def test_load_data_30min_ticker(mocker, caplog, default_conf, testdatadir) -> None: def test_load_data_30min_ticker(mocker, caplog, default_conf, testdatadir) -> None:
ld = history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='30m', datadir=testdatadir) ld = history.load_pair_history(pair='UNITTEST/BTC', timeframe='30m', datadir=testdatadir)
assert isinstance(ld, DataFrame) assert isinstance(ld, DataFrame)
assert not log_has( assert not log_has(
'Download history data for pair: "UNITTEST/BTC", interval: 30m ' 'Download history data for pair: "UNITTEST/BTC", timeframe: 30m '
'and store in None.', caplog 'and store in None.', caplog
) )
def test_load_data_7min_ticker(mocker, caplog, default_conf, testdatadir) -> None: def test_load_data_7min_ticker(mocker, caplog, default_conf, testdatadir) -> None:
ld = history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='7m', datadir=testdatadir) ld = history.load_pair_history(pair='UNITTEST/BTC', timeframe='7m', datadir=testdatadir)
assert not isinstance(ld, DataFrame) assert not isinstance(ld, DataFrame)
assert ld is None assert ld is None
assert log_has( assert log_has(
'No history data for pair: "UNITTEST/BTC", interval: 7m. ' 'No history data for pair: "UNITTEST/BTC", timeframe: 7m. '
'Use `freqtrade download-data` to download the data', caplog 'Use `freqtrade download-data` to download the data', caplog
) )
@ -86,7 +86,7 @@ def test_load_data_1min_ticker(ticker_history, mocker, caplog, testdatadir) -> N
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ticker_history) mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ticker_history)
file = testdatadir / 'UNITTEST_BTC-1m.json' file = testdatadir / 'UNITTEST_BTC-1m.json'
_backup_file(file, copy_file=True) _backup_file(file, copy_file=True)
history.load_data(datadir=testdatadir, ticker_interval='1m', pairs=['UNITTEST/BTC']) history.load_data(datadir=testdatadir, timeframe='1m', pairs=['UNITTEST/BTC'])
assert file.is_file() assert file.is_file()
assert not log_has( assert not log_has(
'Download history data for pair: "UNITTEST/BTC", interval: 1m ' 'Download history data for pair: "UNITTEST/BTC", interval: 1m '
@ -99,7 +99,7 @@ def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) ->
ltfmock = mocker.patch('freqtrade.data.history.load_tickerdata_file', ltfmock = mocker.patch('freqtrade.data.history.load_tickerdata_file',
MagicMock(return_value=None)) MagicMock(return_value=None))
timerange = TimeRange('date', None, 1510639620, 0) timerange = TimeRange('date', None, 1510639620, 0)
history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='1m', history.load_pair_history(pair='UNITTEST/BTC', timeframe='1m',
datadir=testdatadir, timerange=timerange, datadir=testdatadir, timerange=timerange,
startup_candles=20, startup_candles=20,
) )
@ -122,28 +122,28 @@ def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog,
_backup_file(file) _backup_file(file)
# do not download a new pair if refresh_pairs isn't set # do not download a new pair if refresh_pairs isn't set
history.load_pair_history(datadir=testdatadir, history.load_pair_history(datadir=testdatadir,
ticker_interval='1m', timeframe='1m',
pair='MEME/BTC') pair='MEME/BTC')
assert not file.is_file() assert not file.is_file()
assert log_has( assert log_has(
'No history data for pair: "MEME/BTC", interval: 1m. ' 'No history data for pair: "MEME/BTC", timeframe: 1m. '
'Use `freqtrade download-data` to download the data', caplog 'Use `freqtrade download-data` to download the data', caplog
) )
# download a new pair if refresh_pairs is set # download a new pair if refresh_pairs is set
history.load_pair_history(datadir=testdatadir, history.load_pair_history(datadir=testdatadir,
ticker_interval='1m', timeframe='1m',
refresh_pairs=True, refresh_pairs=True,
exchange=exchange, exchange=exchange,
pair='MEME/BTC') pair='MEME/BTC')
assert file.is_file() assert file.is_file()
assert log_has_re( assert log_has_re(
'Download history data for pair: "MEME/BTC", interval: 1m ' 'Download history data for pair: "MEME/BTC", timeframe: 1m '
'and store in .*', caplog 'and store in .*', caplog
) )
with pytest.raises(OperationalException, match=r'Exchange needs to be initialized when.*'): with pytest.raises(OperationalException, match=r'Exchange needs to be initialized when.*'):
history.load_pair_history(datadir=testdatadir, history.load_pair_history(datadir=testdatadir,
ticker_interval='1m', timeframe='1m',
refresh_pairs=True, refresh_pairs=True,
exchange=None, exchange=None,
pair='MEME/BTC') pair='MEME/BTC')
@ -269,10 +269,10 @@ def test_download_pair_history(ticker_history_list, mocker, default_conf, testda
assert download_pair_history(datadir=testdatadir, exchange=exchange, assert download_pair_history(datadir=testdatadir, exchange=exchange,
pair='MEME/BTC', pair='MEME/BTC',
ticker_interval='1m') timeframe='1m')
assert download_pair_history(datadir=testdatadir, exchange=exchange, assert download_pair_history(datadir=testdatadir, exchange=exchange,
pair='CFI/BTC', pair='CFI/BTC',
ticker_interval='1m') timeframe='1m')
assert not exchange._pairs_last_refresh_time assert not exchange._pairs_last_refresh_time
assert file1_1.is_file() assert file1_1.is_file()
assert file2_1.is_file() assert file2_1.is_file()
@ -286,10 +286,10 @@ def test_download_pair_history(ticker_history_list, mocker, default_conf, testda
assert download_pair_history(datadir=testdatadir, exchange=exchange, assert download_pair_history(datadir=testdatadir, exchange=exchange,
pair='MEME/BTC', pair='MEME/BTC',
ticker_interval='5m') timeframe='5m')
assert download_pair_history(datadir=testdatadir, exchange=exchange, assert download_pair_history(datadir=testdatadir, exchange=exchange,
pair='CFI/BTC', pair='CFI/BTC',
ticker_interval='5m') timeframe='5m')
assert not exchange._pairs_last_refresh_time assert not exchange._pairs_last_refresh_time
assert file1_5.is_file() assert file1_5.is_file()
assert file2_5.is_file() assert file2_5.is_file()
@ -307,8 +307,8 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
json_dump_mock = mocker.patch('freqtrade.misc.file_dump_json', return_value=None) json_dump_mock = mocker.patch('freqtrade.misc.file_dump_json', return_value=None)
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=tick) mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=tick)
exchange = get_patched_exchange(mocker, default_conf) exchange = get_patched_exchange(mocker, default_conf)
download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", ticker_interval='1m') download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", timeframe='1m')
download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", ticker_interval='3m') download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", timeframe='3m')
assert json_dump_mock.call_count == 2 assert json_dump_mock.call_count == 2
@ -326,12 +326,12 @@ def test_download_backtesting_data_exception(ticker_history, mocker, caplog,
assert not download_pair_history(datadir=testdatadir, exchange=exchange, assert not download_pair_history(datadir=testdatadir, exchange=exchange,
pair='MEME/BTC', pair='MEME/BTC',
ticker_interval='1m') timeframe='1m')
# clean files freshly downloaded # clean files freshly downloaded
_clean_test_file(file1_1) _clean_test_file(file1_1)
_clean_test_file(file1_5) _clean_test_file(file1_5)
assert log_has( assert log_has(
'Failed to download history data for pair: "MEME/BTC", interval: 1m. ' 'Failed to download history data for pair: "MEME/BTC", timeframe: 1m. '
'Error: File Error', caplog 'Error: File Error', caplog
) )
@ -369,7 +369,7 @@ def test_load_partial_missing(testdatadir, caplog) -> None:
caplog.clear() caplog.clear()
start = arrow.get('2018-01-10T00:00:00') start = arrow.get('2018-01-10T00:00:00')
end = arrow.get('2018-02-20T00:00:00') end = arrow.get('2018-02-20T00:00:00')
tickerdata = history.load_data(datadir=testdatadir, ticker_interval='5m', tickerdata = history.load_data(datadir=testdatadir, timeframe='5m',
pairs=['UNITTEST/BTC'], pairs=['UNITTEST/BTC'],
timerange=TimeRange('date', 'date', timerange=TimeRange('date', 'date',
start.timestamp, end.timestamp)) start.timestamp, end.timestamp))
@ -390,7 +390,7 @@ def test_init(default_conf, mocker) -> None:
exchange=exchange, exchange=exchange,
pairs=[], pairs=[],
refresh_pairs=True, refresh_pairs=True,
ticker_interval=default_conf['ticker_interval'] timeframe=default_conf['ticker_interval']
) )
@ -449,7 +449,7 @@ def test_trim_tickerlist(testdatadir) -> None:
def test_trim_dataframe(testdatadir) -> None: def test_trim_dataframe(testdatadir) -> None:
data = history.load_data( data = history.load_data(
datadir=testdatadir, datadir=testdatadir,
ticker_interval='1m', timeframe='1m',
pairs=['UNITTEST/BTC'] pairs=['UNITTEST/BTC']
)['UNITTEST/BTC'] )['UNITTEST/BTC']
min_date = int(data.iloc[0]['date'].timestamp()) min_date = int(data.iloc[0]['date'].timestamp())
@ -517,7 +517,7 @@ def test_get_timeframe(default_conf, mocker, testdatadir) -> None:
data = strategy.tickerdata_to_dataframe( data = strategy.tickerdata_to_dataframe(
history.load_data( history.load_data(
datadir=testdatadir, datadir=testdatadir,
ticker_interval='1m', timeframe='1m',
pairs=['UNITTEST/BTC'] pairs=['UNITTEST/BTC']
) )
) )
@ -533,7 +533,7 @@ def test_validate_backtest_data_warn(default_conf, mocker, caplog, testdatadir)
data = strategy.tickerdata_to_dataframe( data = strategy.tickerdata_to_dataframe(
history.load_data( history.load_data(
datadir=testdatadir, datadir=testdatadir,
ticker_interval='1m', timeframe='1m',
pairs=['UNITTEST/BTC'], pairs=['UNITTEST/BTC'],
fill_up_missing=False fill_up_missing=False
) )
@ -556,7 +556,7 @@ def test_validate_backtest_data(default_conf, mocker, caplog, testdatadir) -> No
data = strategy.tickerdata_to_dataframe( data = strategy.tickerdata_to_dataframe(
history.load_data( history.load_data(
datadir=testdatadir, datadir=testdatadir,
ticker_interval='5m', timeframe='5m',
pairs=['UNITTEST/BTC'], pairs=['UNITTEST/BTC'],
timerange=timerange timerange=timerange
) )
@ -669,10 +669,10 @@ def test_convert_trades_to_ohlcv(mocker, default_conf, testdatadir, caplog):
file5 = testdatadir / 'XRP_ETH-5m.json' file5 = testdatadir / 'XRP_ETH-5m.json'
# Compare downloaded dataset with converted dataset # Compare downloaded dataset with converted dataset
dfbak_1m = history.load_pair_history(datadir=testdatadir, dfbak_1m = history.load_pair_history(datadir=testdatadir,
ticker_interval="1m", timeframe="1m",
pair=pair) pair=pair)
dfbak_5m = history.load_pair_history(datadir=testdatadir, dfbak_5m = history.load_pair_history(datadir=testdatadir,
ticker_interval="5m", timeframe="5m",
pair=pair) pair=pair)
_backup_file(file1, copy_file=True) _backup_file(file1, copy_file=True)
@ -686,10 +686,10 @@ def test_convert_trades_to_ohlcv(mocker, default_conf, testdatadir, caplog):
assert log_has("Deleting existing data for pair XRP/ETH, interval 1m.", caplog) assert log_has("Deleting existing data for pair XRP/ETH, interval 1m.", caplog)
# Load new data # Load new data
df_1m = history.load_pair_history(datadir=testdatadir, df_1m = history.load_pair_history(datadir=testdatadir,
ticker_interval="1m", timeframe="1m",
pair=pair) pair=pair)
df_5m = history.load_pair_history(datadir=testdatadir, df_5m = history.load_pair_history(datadir=testdatadir,
ticker_interval="5m", timeframe="5m",
pair=pair) pair=pair)
assert df_1m.equals(dfbak_1m) assert df_1m.equals(dfbak_1m)

View File

@ -255,7 +255,7 @@ def test_edge_heartbeat_calculate(mocker, edge_conf):
assert edge.calculate() is False assert edge.calculate() is False
def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=False, def mocked_load_data(datadir, pairs=[], timeframe='0m', refresh_pairs=False,
timerange=None, exchange=None, *args, **kwargs): timerange=None, exchange=None, *args, **kwargs):
hz = 0.1 hz = 0.1
base = 0.001 base = 0.001

View File

@ -50,7 +50,7 @@ def trim_dictlist(dict_list, num):
def load_data_test(what, testdatadir): def load_data_test(what, testdatadir):
timerange = TimeRange.parse_timerange('1510694220-1510700340') timerange = TimeRange.parse_timerange('1510694220-1510700340')
pair = history.load_tickerdata_file(testdatadir, ticker_interval='1m', pair = history.load_tickerdata_file(testdatadir, timeframe='1m',
pair='UNITTEST/BTC', timerange=timerange) pair='UNITTEST/BTC', timerange=timerange)
datalen = len(pair) datalen = len(pair)
@ -116,7 +116,7 @@ def simple_backtest(config, contour, num_results, mocker, testdatadir) -> None:
assert len(results) == num_results assert len(results) == num_results
def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=False, def mocked_load_data(datadir, pairs=[], timeframe='0m', refresh_pairs=False,
timerange=None, exchange=None, live=False, *args, **kwargs): timerange=None, exchange=None, live=False, *args, **kwargs):
tickerdata = history.load_tickerdata_file(datadir, 'UNITTEST/BTC', '1m', timerange=timerange) tickerdata = history.load_tickerdata_file(datadir, 'UNITTEST/BTC', '1m', timerange=timerange)
pairdata = {'UNITTEST/BTC': parse_ticker_dataframe(tickerdata, '1m', pair="UNITTEST/BTC", pairdata = {'UNITTEST/BTC': parse_ticker_dataframe(tickerdata, '1m', pair="UNITTEST/BTC",
@ -126,14 +126,14 @@ def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=Fals
# use for mock ccxt.fetch_ohlvc' # use for mock ccxt.fetch_ohlvc'
def _load_pair_as_ticks(pair, tickfreq): def _load_pair_as_ticks(pair, tickfreq):
ticks = history.load_tickerdata_file(None, ticker_interval=tickfreq, pair=pair) ticks = history.load_tickerdata_file(None, timeframe=tickfreq, pair=pair)
ticks = ticks[-201:] ticks = ticks[-201:]
return ticks return ticks
# FIX: fixturize this? # FIX: fixturize this?
def _make_backtest_conf(mocker, datadir, conf=None, pair='UNITTEST/BTC', record=None): def _make_backtest_conf(mocker, datadir, conf=None, pair='UNITTEST/BTC', record=None):
data = history.load_data(datadir=datadir, ticker_interval='1m', pairs=[pair]) data = history.load_data(datadir=datadir, timeframe='1m', pairs=[pair])
data = trim_dictlist(data, -201) data = trim_dictlist(data, -201)
patch_exchange(mocker) patch_exchange(mocker)
backtesting = Backtesting(conf) backtesting = Backtesting(conf)
@ -522,7 +522,7 @@ def test_backtest(default_conf, fee, mocker, testdatadir) -> None:
backtesting = Backtesting(default_conf) backtesting = Backtesting(default_conf)
pair = 'UNITTEST/BTC' pair = 'UNITTEST/BTC'
timerange = TimeRange('date', None, 1517227800, 0) timerange = TimeRange('date', None, 1517227800, 0)
data = history.load_data(datadir=testdatadir, ticker_interval='5m', pairs=['UNITTEST/BTC'], data = history.load_data(datadir=testdatadir, timeframe='5m', pairs=['UNITTEST/BTC'],
timerange=timerange) timerange=timerange)
data_processed = backtesting.strategy.tickerdata_to_dataframe(data) data_processed = backtesting.strategy.tickerdata_to_dataframe(data)
min_date, max_date = get_timeframe(data_processed) min_date, max_date = get_timeframe(data_processed)
@ -576,9 +576,9 @@ def test_backtest_1min_ticker_interval(default_conf, fee, mocker, testdatadir) -
patch_exchange(mocker) patch_exchange(mocker)
backtesting = Backtesting(default_conf) backtesting = Backtesting(default_conf)
# Run a backtesting for an exiting 1min ticker_interval # Run a backtesting for an exiting 1min timeframe
timerange = TimeRange.parse_timerange('1510688220-1510700340') timerange = TimeRange.parse_timerange('1510688220-1510700340')
data = history.load_data(datadir=testdatadir, ticker_interval='1m', pairs=['UNITTEST/BTC'], data = history.load_data(datadir=testdatadir, timeframe='1m', pairs=['UNITTEST/BTC'],
timerange=timerange) timerange=timerange)
processed = backtesting.strategy.tickerdata_to_dataframe(data) processed = backtesting.strategy.tickerdata_to_dataframe(data)
min_date, max_date = get_timeframe(processed) min_date, max_date = get_timeframe(processed)
@ -688,7 +688,7 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir)
patch_exchange(mocker) patch_exchange(mocker)
pairs = ['ADA/BTC', 'DASH/BTC', 'ETH/BTC', 'LTC/BTC', 'NXT/BTC'] pairs = ['ADA/BTC', 'DASH/BTC', 'ETH/BTC', 'LTC/BTC', 'NXT/BTC']
data = history.load_data(datadir=testdatadir, ticker_interval='5m', pairs=pairs) data = history.load_data(datadir=testdatadir, timeframe='5m', pairs=pairs)
# Only use 500 lines to increase performance # Only use 500 lines to increase performance
data = trim_dictlist(data, -500) data = trim_dictlist(data, -500)

View File

@ -64,7 +64,7 @@ def test_add_indicators(default_conf, testdatadir, caplog):
pair = "UNITTEST/BTC" pair = "UNITTEST/BTC"
timerange = TimeRange(None, 'line', 0, -1000) timerange = TimeRange(None, 'line', 0, -1000)
data = history.load_pair_history(pair=pair, ticker_interval='1m', data = history.load_pair_history(pair=pair, timeframe='1m',
datadir=testdatadir, timerange=timerange) datadir=testdatadir, timerange=timerange)
indicators1 = ["ema10"] indicators1 = ["ema10"]
indicators2 = ["macd"] indicators2 = ["macd"]
@ -129,7 +129,7 @@ def test_generate_candlestick_graph_no_signals_no_trades(default_conf, mocker, t
pair = "UNITTEST/BTC" pair = "UNITTEST/BTC"
timerange = TimeRange(None, 'line', 0, -1000) timerange = TimeRange(None, 'line', 0, -1000)
data = history.load_pair_history(pair=pair, ticker_interval='1m', data = history.load_pair_history(pair=pair, timeframe='1m',
datadir=testdatadir, timerange=timerange) datadir=testdatadir, timerange=timerange)
data['buy'] = 0 data['buy'] = 0
data['sell'] = 0 data['sell'] = 0
@ -164,7 +164,7 @@ def test_generate_candlestick_graph_no_trades(default_conf, mocker, testdatadir)
MagicMock(side_effect=fig_generating_mock)) MagicMock(side_effect=fig_generating_mock))
pair = 'UNITTEST/BTC' pair = 'UNITTEST/BTC'
timerange = TimeRange(None, 'line', 0, -1000) timerange = TimeRange(None, 'line', 0, -1000)
data = history.load_pair_history(pair=pair, ticker_interval='1m', data = history.load_pair_history(pair=pair, timeframe='1m',
datadir=testdatadir, timerange=timerange) datadir=testdatadir, timerange=timerange)
# Generate buy/sell signals and indicators # Generate buy/sell signals and indicators
@ -228,7 +228,7 @@ def test_add_profit(testdatadir):
bt_data = load_backtest_data(filename) bt_data = load_backtest_data(filename)
timerange = TimeRange.parse_timerange("20180110-20180112") timerange = TimeRange.parse_timerange("20180110-20180112")
df = history.load_pair_history(pair="TRX/BTC", ticker_interval='5m', df = history.load_pair_history(pair="TRX/BTC", timeframe='5m',
datadir=testdatadir, timerange=timerange) datadir=testdatadir, timerange=timerange)
fig = generate_empty_figure() fig = generate_empty_figure()
@ -251,7 +251,7 @@ def test_generate_profit_graph(testdatadir):
tickers = history.load_data(datadir=testdatadir, tickers = history.load_data(datadir=testdatadir,
pairs=pairs, pairs=pairs,
ticker_interval='5m', timeframe='5m',
timerange=timerange timerange=timerange
) )
trades = trades[trades['pair'].isin(pairs)] trades = trades[trades['pair'].isin(pairs)]