From e4bdb92521d38c062a0b266345727cfbe893899a Mon Sep 17 00:00:00 2001 From: Matthias Date: Sat, 2 Nov 2019 20:19:13 +0100 Subject: [PATCH 1/7] Replace some occurances of ticker_interval with timeframe --- freqtrade/data/converter.py | 10 ++--- freqtrade/data/dataprovider.py | 16 +++---- freqtrade/data/history.py | 72 +++++++++++++++--------------- freqtrade/edge/__init__.py | 2 +- freqtrade/optimize/backtesting.py | 2 +- freqtrade/plot/plotting.py | 13 +++--- tests/data/test_btanalysis.py | 8 ++-- tests/data/test_converter.py | 2 +- tests/data/test_dataprovider.py | 2 +- tests/data/test_history.py | 58 ++++++++++++------------ tests/edge/test_edge.py | 2 +- tests/optimize/test_backtesting.py | 16 +++---- tests/test_plotting.py | 10 ++--- 13 files changed, 107 insertions(+), 106 deletions(-) diff --git a/freqtrade/data/converter.py b/freqtrade/data/converter.py index 1ef224978..e45dd451e 100644 --- a/freqtrade/data/converter.py +++ b/freqtrade/data/converter.py @@ -10,13 +10,13 @@ from pandas import DataFrame, to_datetime logger = logging.getLogger(__name__) -def parse_ticker_dataframe(ticker: list, ticker_interval: str, pair: str, *, +def parse_ticker_dataframe(ticker: list, timeframe: str, pair: str, *, fill_missing: bool = True, drop_incomplete: bool = True) -> DataFrame: """ Converts a ticker-list (format ccxt.fetch_ohlcv) to a Dataframe :param ticker: ticker list, as returned by exchange.async_get_candle_history - :param ticker_interval: ticker_interval (e.g. 5m). Used to fill up eventual missing data + :param timeframe: timeframe (e.g. 5m). Used to fill up eventual missing data :param pair: Pair this data is for (used to warn if fillup was necessary) :param fill_missing: fill up missing candles with 0 candles (see ohlcv_fill_up_missing_data for details) @@ -52,12 +52,12 @@ def parse_ticker_dataframe(ticker: list, ticker_interval: str, pair: str, *, logger.debug('Dropping last candle') if fill_missing: - return ohlcv_fill_up_missing_data(frame, ticker_interval, pair) + return ohlcv_fill_up_missing_data(frame, timeframe, pair) else: return frame -def ohlcv_fill_up_missing_data(dataframe: DataFrame, ticker_interval: str, pair: str) -> DataFrame: +def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str) -> DataFrame: """ Fills up missing data with 0 volume rows, using the previous close as price for "open", "high" "low" and "close", volume is set to 0 @@ -72,7 +72,7 @@ def ohlcv_fill_up_missing_data(dataframe: DataFrame, ticker_interval: str, pair: 'close': 'last', 'volume': 'sum' } - ticker_minutes = timeframe_to_minutes(ticker_interval) + ticker_minutes = timeframe_to_minutes(timeframe) # Resample to create "NAN" values df = dataframe.resample(f'{ticker_minutes}min', on='date').agg(ohlc_dict) diff --git a/freqtrade/data/dataprovider.py b/freqtrade/data/dataprovider.py index f0787281a..ce4554cbb 100644 --- a/freqtrade/data/dataprovider.py +++ b/freqtrade/data/dataprovider.py @@ -42,29 +42,29 @@ class DataProvider: """ return list(self._exchange._klines.keys()) - def ohlcv(self, pair: str, ticker_interval: str = None, copy: bool = True) -> DataFrame: + def ohlcv(self, pair: str, timeframe: str = None, copy: bool = True) -> DataFrame: """ Get ohlcv data for the given pair as DataFrame Please use the `available_pairs` method to verify which pairs are currently cached. :param pair: pair to get the data for - :param ticker_interval: ticker interval to get data for + :param timeframe: Ticker timeframe to get data for :param copy: copy dataframe before returning if True. Use False only for read-only operations (where the dataframe is not modified) """ if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): - return self._exchange.klines((pair, ticker_interval or self._config['ticker_interval']), + return self._exchange.klines((pair, timeframe or self._config['ticker_interval']), copy=copy) else: return DataFrame() - def historic_ohlcv(self, pair: str, ticker_interval: str = None) -> DataFrame: + def historic_ohlcv(self, pair: str, timeframe: str = None) -> DataFrame: """ Get stored historic ohlcv data :param pair: pair to get the data for - :param ticker_interval: ticker interval to get data for + :param timeframe: ticker interval to get data for """ return load_pair_history(pair=pair, - ticker_interval=ticker_interval or self._config['ticker_interval'], + timeframe=timeframe or self._config['ticker_interval'], datadir=Path(self._config['datadir']) ) @@ -77,10 +77,10 @@ class DataProvider: """ if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): # Get live ohlcv data. - data = self.ohlcv(pair=pair, ticker_interval=ticker_interval) + data = self.ohlcv(pair=pair, timeframe=ticker_interval) else: # Get historic ohlcv data (cached on disk). - data = self.historic_ohlcv(pair=pair, ticker_interval=ticker_interval) + data = self.historic_ohlcv(pair=pair, timeframe=ticker_interval) if len(data) == 0: logger.warning(f"No data found for ({pair}, {ticker_interval}).") return data diff --git a/freqtrade/data/history.py b/freqtrade/data/history.py index 3dd40d2b4..8e4bc8ced 100644 --- a/freqtrade/data/history.py +++ b/freqtrade/data/history.py @@ -63,13 +63,13 @@ def trim_dataframe(df: DataFrame, timerange: TimeRange) -> DataFrame: return df -def load_tickerdata_file(datadir: Path, pair: str, ticker_interval: str, +def load_tickerdata_file(datadir: Path, pair: str, timeframe: str, timerange: Optional[TimeRange] = None) -> Optional[list]: """ Load a pair from file, either .json.gz or .json :return: tickerlist or None if unsuccessful """ - filename = pair_data_filename(datadir, pair, ticker_interval) + filename = pair_data_filename(datadir, pair, timeframe) pairdata = misc.file_load_json(filename) if not pairdata: return [] @@ -80,11 +80,11 @@ def load_tickerdata_file(datadir: Path, pair: str, ticker_interval: str, def store_tickerdata_file(datadir: Path, pair: str, - ticker_interval: str, data: list, is_zip: bool = False): + timeframe: str, data: list, is_zip: bool = False): """ Stores tickerdata to file """ - filename = pair_data_filename(datadir, pair, ticker_interval) + filename = pair_data_filename(datadir, pair, timeframe) misc.file_dump_json(filename, data, is_zip=is_zip) @@ -121,7 +121,7 @@ def _validate_pairdata(pair, pairdata, timerange: TimeRange): def load_pair_history(pair: str, - ticker_interval: str, + timeframe: str, datadir: Path, timerange: Optional[TimeRange] = None, refresh_pairs: bool = False, @@ -133,7 +133,7 @@ def load_pair_history(pair: str, """ Loads cached ticker history for the given pair. :param pair: Pair to load data for - :param ticker_interval: Ticker-interval (e.g. "5m") + :param timeframe: Ticker timeframe (e.g. "5m") :param datadir: Path to the data storage location. :param timerange: Limit data to be loaded to this timerange :param refresh_pairs: Refresh pairs from exchange. @@ -147,34 +147,34 @@ def load_pair_history(pair: str, timerange_startup = deepcopy(timerange) if startup_candles > 0 and timerange_startup: - timerange_startup.subtract_start(timeframe_to_seconds(ticker_interval) * startup_candles) + timerange_startup.subtract_start(timeframe_to_seconds(timeframe) * startup_candles) # The user forced the refresh of pairs if refresh_pairs: download_pair_history(datadir=datadir, exchange=exchange, pair=pair, - ticker_interval=ticker_interval, + timeframe=timeframe, timerange=timerange) - pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange_startup) + pairdata = load_tickerdata_file(datadir, pair, timeframe, timerange=timerange_startup) if pairdata: if timerange_startup: _validate_pairdata(pair, pairdata, timerange_startup) - return parse_ticker_dataframe(pairdata, ticker_interval, pair=pair, + return parse_ticker_dataframe(pairdata, timeframe, pair=pair, fill_missing=fill_up_missing, drop_incomplete=drop_incomplete) else: logger.warning( - f'No history data for pair: "{pair}", interval: {ticker_interval}. ' + f'No history data for pair: "{pair}", timeframe: {timeframe}. ' 'Use `freqtrade download-data` to download the data' ) return None def load_data(datadir: Path, - ticker_interval: str, + timeframe: str, pairs: List[str], refresh_pairs: bool = False, exchange: Optional[Exchange] = None, @@ -186,7 +186,7 @@ def load_data(datadir: Path, """ Loads ticker history data for a list of pairs :param datadir: Path to the data storage location. - :param ticker_interval: Ticker-interval (e.g. "5m") + :param timeframe: Ticker Timeframe (e.g. "5m") :param pairs: List of pairs to load :param refresh_pairs: Refresh pairs from exchange. (Note: Requires exchange to be passed as well.) @@ -206,7 +206,7 @@ def load_data(datadir: Path, logger.info(f'Using indicator startup period: {startup_candles} ...') for pair in pairs: - hist = load_pair_history(pair=pair, ticker_interval=ticker_interval, + hist = load_pair_history(pair=pair, timeframe=timeframe, datadir=datadir, timerange=timerange, refresh_pairs=refresh_pairs, exchange=exchange, @@ -220,9 +220,9 @@ def load_data(datadir: Path, return result -def pair_data_filename(datadir: Path, pair: str, ticker_interval: str) -> Path: +def pair_data_filename(datadir: Path, pair: str, timeframe: str) -> Path: pair_s = pair.replace("/", "_") - filename = datadir.joinpath(f'{pair_s}-{ticker_interval}.json') + filename = datadir.joinpath(f'{pair_s}-{timeframe}.json') return filename @@ -232,7 +232,7 @@ def pair_trades_filename(datadir: Path, pair: str) -> Path: return filename -def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: str, +def _load_cached_data_for_updating(datadir: Path, pair: str, timeframe: str, timerange: Optional[TimeRange]) -> Tuple[List[Any], Optional[int]]: """ @@ -250,12 +250,12 @@ def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: st if timerange.starttype == 'date': since_ms = timerange.startts * 1000 elif timerange.stoptype == 'line': - num_minutes = timerange.stopts * timeframe_to_minutes(ticker_interval) + num_minutes = timerange.stopts * timeframe_to_minutes(timeframe) since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000 # read the cached file # Intentionally don't pass timerange in - since we need to load the full dataset. - data = load_tickerdata_file(datadir, pair, ticker_interval) + data = load_tickerdata_file(datadir, pair, timeframe) # remove the last item, could be incomplete candle if data: data.pop() @@ -276,18 +276,18 @@ def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: st def download_pair_history(datadir: Path, exchange: Optional[Exchange], pair: str, - ticker_interval: str = '5m', + timeframe: str = '5m', timerange: Optional[TimeRange] = None) -> bool: """ Download the latest ticker intervals from the exchange for the pair passed in parameters - The data is downloaded starting from the last correct ticker interval data that + The data is downloaded starting from the last correct data that exists in a cache. If timerange starts earlier than the data in the cache, the full data will be redownloaded Based on @Rybolov work: https://github.com/rybolov/freqtrade-data :param pair: pair to download - :param ticker_interval: ticker interval + :param timeframe: Ticker Timeframe (e.g 5m) :param timerange: range of time to download :return: bool with success state """ @@ -298,17 +298,17 @@ def download_pair_history(datadir: Path, try: logger.info( - f'Download history data for pair: "{pair}", interval: {ticker_interval} ' + f'Download history data for pair: "{pair}", timeframe: {timeframe} ' f'and store in {datadir}.' ) - data, since_ms = _load_cached_data_for_updating(datadir, pair, ticker_interval, timerange) + data, since_ms = _load_cached_data_for_updating(datadir, pair, timeframe, timerange) logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None') logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None') # Default since_ms to 30 days if nothing is given - new_data = exchange.get_historic_ohlcv(pair=pair, ticker_interval=ticker_interval, + new_data = exchange.get_historic_ohlcv(pair=pair, ticker_interval=timeframe, since_ms=since_ms if since_ms else int(arrow.utcnow().shift( @@ -318,12 +318,12 @@ def download_pair_history(datadir: Path, logger.debug("New Start: %s", misc.format_ms_time(data[0][0])) logger.debug("New End: %s", misc.format_ms_time(data[-1][0])) - store_tickerdata_file(datadir, pair, ticker_interval, data=data) + store_tickerdata_file(datadir, pair, timeframe, data=data) return True except Exception as e: logger.error( - f'Failed to download history data for pair: "{pair}", interval: {ticker_interval}. ' + f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}. ' f'Error: {e}' ) return False @@ -343,17 +343,17 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes pairs_not_available.append(pair) logger.info(f"Skipping pair {pair}...") continue - for ticker_interval in timeframes: + for timeframe in timeframes: - dl_file = pair_data_filename(dl_path, pair, ticker_interval) + dl_file = pair_data_filename(dl_path, pair, timeframe) if erase and dl_file.exists(): logger.info( - f'Deleting existing data for pair {pair}, interval {ticker_interval}.') + f'Deleting existing data for pair {pair}, interval {timeframe}.') dl_file.unlink() - logger.info(f'Downloading pair {pair}, interval {ticker_interval}.') + logger.info(f'Downloading pair {pair}, interval {timeframe}.') download_pair_history(datadir=dl_path, exchange=exchange, - pair=pair, ticker_interval=str(ticker_interval), + pair=pair, timeframe=str(timeframe), timerange=timerange) return pairs_not_available @@ -459,7 +459,7 @@ def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow] def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime, - max_date: datetime, ticker_interval_mins: int) -> bool: + max_date: datetime, timeframe_mins: int) -> bool: """ Validates preprocessed backtesting data for missing values and shows warnings about it that. @@ -467,10 +467,10 @@ def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime, :param pair: pair used for log output. :param min_date: start-date of the data :param max_date: end-date of the data - :param ticker_interval_mins: ticker interval in minutes + :param timeframe_mins: ticker Timeframe in minutes """ - # total difference in minutes / interval-minutes - expected_frames = int((max_date - min_date).total_seconds() // 60 // ticker_interval_mins) + # total difference in minutes / timeframe-minutes + expected_frames = int((max_date - min_date).total_seconds() // 60 // timeframe_mins) found_missing = False dflen = len(data) if dflen < expected_frames: diff --git a/freqtrade/edge/__init__.py b/freqtrade/edge/__init__.py index 883bf4a0f..afd20cf61 100644 --- a/freqtrade/edge/__init__.py +++ b/freqtrade/edge/__init__.py @@ -97,7 +97,7 @@ class Edge: data = history.load_data( datadir=Path(self.config['datadir']), pairs=pairs, - ticker_interval=self.strategy.ticker_interval, + timeframe=self.strategy.ticker_interval, refresh_pairs=self._refresh_pairs, exchange=self.exchange, timerange=self._timerange, diff --git a/freqtrade/optimize/backtesting.py b/freqtrade/optimize/backtesting.py index ee3a135d2..58fd1f772 100644 --- a/freqtrade/optimize/backtesting.py +++ b/freqtrade/optimize/backtesting.py @@ -108,7 +108,7 @@ class Backtesting: data = history.load_data( datadir=Path(self.config['datadir']), pairs=self.config['exchange']['pair_whitelist'], - ticker_interval=self.ticker_interval, + timeframe=self.ticker_interval, timerange=timerange, startup_candles=self.required_startup, fail_without_data=True, diff --git a/freqtrade/plot/plotting.py b/freqtrade/plot/plotting.py index bbdb52ca1..01396aea9 100644 --- a/freqtrade/plot/plotting.py +++ b/freqtrade/plot/plotting.py @@ -39,7 +39,7 @@ def init_plotscript(config): tickers = history.load_data( datadir=Path(str(config.get("datadir"))), pairs=pairs, - ticker_interval=config.get('ticker_interval', '5m'), + timeframe=config.get('ticker_interval', '5m'), timerange=timerange, ) @@ -300,12 +300,12 @@ def generate_profit_graph(pairs: str, tickers: Dict[str, pd.DataFrame], return fig -def generate_plot_filename(pair, ticker_interval) -> str: +def generate_plot_filename(pair, timeframe) -> str: """ - Generate filenames per pair/ticker_interval to be used for storing plots + Generate filenames per pair/timeframe to be used for storing plots """ pair_name = pair.replace("/", "_") - file_name = 'freqtrade-plot-' + pair_name + '-' + ticker_interval + '.html' + file_name = 'freqtrade-plot-' + pair_name + '-' + timeframe + '.html' logger.info('Generate plot file for %s', pair) @@ -316,8 +316,9 @@ def store_plot_file(fig, filename: str, directory: Path, auto_open: bool = False """ Generate a plot html file from pre populated fig plotly object :param fig: Plotly Figure to plot - :param pair: Pair to plot (used as filename and Plot title) - :param ticker_interval: Used as part of the filename + :param filename: Name to store the file as + :param directory: Directory to store the file in + :param auto_open: Automatically open files saved :return: None """ directory.mkdir(parents=True, exist_ok=True) diff --git a/tests/data/test_btanalysis.py b/tests/data/test_btanalysis.py index b49344bbd..13711c63e 100644 --- a/tests/data/test_btanalysis.py +++ b/tests/data/test_btanalysis.py @@ -56,7 +56,7 @@ def test_extract_trades_of_period(testdatadir): # 2018-11-14 06:07:00 timerange = TimeRange('date', None, 1510639620, 0) - data = load_pair_history(pair=pair, ticker_interval='1m', + data = load_pair_history(pair=pair, timeframe='1m', datadir=testdatadir, timerange=timerange) trades = DataFrame( @@ -122,7 +122,7 @@ def test_combine_tickers_with_mean(testdatadir): pairs = ["ETH/BTC", "ADA/BTC"] tickers = load_data(datadir=testdatadir, pairs=pairs, - ticker_interval='5m' + timeframe='5m' ) df = combine_tickers_with_mean(tickers) assert isinstance(df, DataFrame) @@ -136,7 +136,7 @@ def test_create_cum_profit(testdatadir): bt_data = load_backtest_data(filename) timerange = TimeRange.parse_timerange("20180110-20180112") - df = load_pair_history(pair="TRX/BTC", ticker_interval='5m', + df = load_pair_history(pair="TRX/BTC", timeframe='5m', datadir=testdatadir, timerange=timerange) cum_profits = create_cum_profit(df.set_index('date'), @@ -154,7 +154,7 @@ def test_create_cum_profit1(testdatadir): bt_data.loc[:, 'close_time'] = bt_data.loc[:, 'close_time'] + DateOffset(seconds=20) timerange = TimeRange.parse_timerange("20180110-20180112") - df = load_pair_history(pair="TRX/BTC", ticker_interval='5m', + df = load_pair_history(pair="TRX/BTC", timeframe='5m', datadir=testdatadir, timerange=timerange) cum_profits = create_cum_profit(df.set_index('date'), diff --git a/tests/data/test_converter.py b/tests/data/test_converter.py index e773a970e..92494ff1e 100644 --- a/tests/data/test_converter.py +++ b/tests/data/test_converter.py @@ -23,7 +23,7 @@ def test_parse_ticker_dataframe(ticker_history_list, caplog): def test_ohlcv_fill_up_missing_data(testdatadir, caplog): data = load_pair_history(datadir=testdatadir, - ticker_interval='1m', + timeframe='1m', pair='UNITTEST/BTC', fill_up_missing=False) caplog.set_level(logging.DEBUG) diff --git a/tests/data/test_dataprovider.py b/tests/data/test_dataprovider.py index 9a857750b..0318e5a82 100644 --- a/tests/data/test_dataprovider.py +++ b/tests/data/test_dataprovider.py @@ -45,7 +45,7 @@ def test_historic_ohlcv(mocker, default_conf, ticker_history): data = dp.historic_ohlcv("UNITTEST/BTC", "5m") assert isinstance(data, DataFrame) assert historymock.call_count == 1 - assert historymock.call_args_list[0][1]["ticker_interval"] == "5m" + assert historymock.call_args_list[0][1]["timeframe"] == "5m" def test_get_pair_dataframe(mocker, default_conf, ticker_history): diff --git a/tests/data/test_history.py b/tests/data/test_history.py index 89120b4f5..65feaf03e 100644 --- a/tests/data/test_history.py +++ b/tests/data/test_history.py @@ -64,20 +64,20 @@ def _clean_test_file(file: Path) -> None: def test_load_data_30min_ticker(mocker, caplog, default_conf, testdatadir) -> None: - ld = history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='30m', datadir=testdatadir) + ld = history.load_pair_history(pair='UNITTEST/BTC', timeframe='30m', datadir=testdatadir) assert isinstance(ld, DataFrame) assert not log_has( - 'Download history data for pair: "UNITTEST/BTC", interval: 30m ' + 'Download history data for pair: "UNITTEST/BTC", timeframe: 30m ' 'and store in None.', caplog ) def test_load_data_7min_ticker(mocker, caplog, default_conf, testdatadir) -> None: - ld = history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='7m', datadir=testdatadir) + ld = history.load_pair_history(pair='UNITTEST/BTC', timeframe='7m', datadir=testdatadir) assert not isinstance(ld, DataFrame) assert ld is None assert log_has( - 'No history data for pair: "UNITTEST/BTC", interval: 7m. ' + 'No history data for pair: "UNITTEST/BTC", timeframe: 7m. ' 'Use `freqtrade download-data` to download the data', caplog ) @@ -86,7 +86,7 @@ def test_load_data_1min_ticker(ticker_history, mocker, caplog, testdatadir) -> N mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ticker_history) file = testdatadir / 'UNITTEST_BTC-1m.json' _backup_file(file, copy_file=True) - history.load_data(datadir=testdatadir, ticker_interval='1m', pairs=['UNITTEST/BTC']) + history.load_data(datadir=testdatadir, timeframe='1m', pairs=['UNITTEST/BTC']) assert file.is_file() assert not log_has( 'Download history data for pair: "UNITTEST/BTC", interval: 1m ' @@ -99,7 +99,7 @@ def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) -> ltfmock = mocker.patch('freqtrade.data.history.load_tickerdata_file', MagicMock(return_value=None)) timerange = TimeRange('date', None, 1510639620, 0) - history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='1m', + history.load_pair_history(pair='UNITTEST/BTC', timeframe='1m', datadir=testdatadir, timerange=timerange, startup_candles=20, ) @@ -122,28 +122,28 @@ def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog, _backup_file(file) # do not download a new pair if refresh_pairs isn't set history.load_pair_history(datadir=testdatadir, - ticker_interval='1m', + timeframe='1m', pair='MEME/BTC') assert not file.is_file() assert log_has( - 'No history data for pair: "MEME/BTC", interval: 1m. ' + 'No history data for pair: "MEME/BTC", timeframe: 1m. ' 'Use `freqtrade download-data` to download the data', caplog ) # download a new pair if refresh_pairs is set history.load_pair_history(datadir=testdatadir, - ticker_interval='1m', + timeframe='1m', refresh_pairs=True, exchange=exchange, pair='MEME/BTC') assert file.is_file() assert log_has_re( - 'Download history data for pair: "MEME/BTC", interval: 1m ' + 'Download history data for pair: "MEME/BTC", timeframe: 1m ' 'and store in .*', caplog ) with pytest.raises(OperationalException, match=r'Exchange needs to be initialized when.*'): history.load_pair_history(datadir=testdatadir, - ticker_interval='1m', + timeframe='1m', refresh_pairs=True, exchange=None, pair='MEME/BTC') @@ -269,10 +269,10 @@ def test_download_pair_history(ticker_history_list, mocker, default_conf, testda assert download_pair_history(datadir=testdatadir, exchange=exchange, pair='MEME/BTC', - ticker_interval='1m') + timeframe='1m') assert download_pair_history(datadir=testdatadir, exchange=exchange, pair='CFI/BTC', - ticker_interval='1m') + timeframe='1m') assert not exchange._pairs_last_refresh_time assert file1_1.is_file() assert file2_1.is_file() @@ -286,10 +286,10 @@ def test_download_pair_history(ticker_history_list, mocker, default_conf, testda assert download_pair_history(datadir=testdatadir, exchange=exchange, pair='MEME/BTC', - ticker_interval='5m') + timeframe='5m') assert download_pair_history(datadir=testdatadir, exchange=exchange, pair='CFI/BTC', - ticker_interval='5m') + timeframe='5m') assert not exchange._pairs_last_refresh_time assert file1_5.is_file() assert file2_5.is_file() @@ -307,8 +307,8 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None: json_dump_mock = mocker.patch('freqtrade.misc.file_dump_json', return_value=None) mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=tick) exchange = get_patched_exchange(mocker, default_conf) - download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", ticker_interval='1m') - download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", ticker_interval='3m') + download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", timeframe='1m') + download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", timeframe='3m') assert json_dump_mock.call_count == 2 @@ -326,12 +326,12 @@ def test_download_backtesting_data_exception(ticker_history, mocker, caplog, assert not download_pair_history(datadir=testdatadir, exchange=exchange, pair='MEME/BTC', - ticker_interval='1m') + timeframe='1m') # clean files freshly downloaded _clean_test_file(file1_1) _clean_test_file(file1_5) assert log_has( - 'Failed to download history data for pair: "MEME/BTC", interval: 1m. ' + 'Failed to download history data for pair: "MEME/BTC", timeframe: 1m. ' 'Error: File Error', caplog ) @@ -369,7 +369,7 @@ def test_load_partial_missing(testdatadir, caplog) -> None: caplog.clear() start = arrow.get('2018-01-10T00:00:00') end = arrow.get('2018-02-20T00:00:00') - tickerdata = history.load_data(datadir=testdatadir, ticker_interval='5m', + tickerdata = history.load_data(datadir=testdatadir, timeframe='5m', pairs=['UNITTEST/BTC'], timerange=TimeRange('date', 'date', start.timestamp, end.timestamp)) @@ -390,7 +390,7 @@ def test_init(default_conf, mocker) -> None: exchange=exchange, pairs=[], refresh_pairs=True, - ticker_interval=default_conf['ticker_interval'] + timeframe=default_conf['ticker_interval'] ) @@ -449,7 +449,7 @@ def test_trim_tickerlist(testdatadir) -> None: def test_trim_dataframe(testdatadir) -> None: data = history.load_data( datadir=testdatadir, - ticker_interval='1m', + timeframe='1m', pairs=['UNITTEST/BTC'] )['UNITTEST/BTC'] min_date = int(data.iloc[0]['date'].timestamp()) @@ -517,7 +517,7 @@ def test_get_timeframe(default_conf, mocker, testdatadir) -> None: data = strategy.tickerdata_to_dataframe( history.load_data( datadir=testdatadir, - ticker_interval='1m', + timeframe='1m', pairs=['UNITTEST/BTC'] ) ) @@ -533,7 +533,7 @@ def test_validate_backtest_data_warn(default_conf, mocker, caplog, testdatadir) data = strategy.tickerdata_to_dataframe( history.load_data( datadir=testdatadir, - ticker_interval='1m', + timeframe='1m', pairs=['UNITTEST/BTC'], fill_up_missing=False ) @@ -556,7 +556,7 @@ def test_validate_backtest_data(default_conf, mocker, caplog, testdatadir) -> No data = strategy.tickerdata_to_dataframe( history.load_data( datadir=testdatadir, - ticker_interval='5m', + timeframe='5m', pairs=['UNITTEST/BTC'], timerange=timerange ) @@ -669,10 +669,10 @@ def test_convert_trades_to_ohlcv(mocker, default_conf, testdatadir, caplog): file5 = testdatadir / 'XRP_ETH-5m.json' # Compare downloaded dataset with converted dataset dfbak_1m = history.load_pair_history(datadir=testdatadir, - ticker_interval="1m", + timeframe="1m", pair=pair) dfbak_5m = history.load_pair_history(datadir=testdatadir, - ticker_interval="5m", + timeframe="5m", pair=pair) _backup_file(file1, copy_file=True) @@ -686,10 +686,10 @@ def test_convert_trades_to_ohlcv(mocker, default_conf, testdatadir, caplog): assert log_has("Deleting existing data for pair XRP/ETH, interval 1m.", caplog) # Load new data df_1m = history.load_pair_history(datadir=testdatadir, - ticker_interval="1m", + timeframe="1m", pair=pair) df_5m = history.load_pair_history(datadir=testdatadir, - ticker_interval="5m", + timeframe="5m", pair=pair) assert df_1m.equals(dfbak_1m) diff --git a/tests/edge/test_edge.py b/tests/edge/test_edge.py index e1af50768..001dc9591 100644 --- a/tests/edge/test_edge.py +++ b/tests/edge/test_edge.py @@ -255,7 +255,7 @@ def test_edge_heartbeat_calculate(mocker, edge_conf): assert edge.calculate() is False -def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=False, +def mocked_load_data(datadir, pairs=[], timeframe='0m', refresh_pairs=False, timerange=None, exchange=None, *args, **kwargs): hz = 0.1 base = 0.001 diff --git a/tests/optimize/test_backtesting.py b/tests/optimize/test_backtesting.py index 5912c5489..a5ab6d84c 100644 --- a/tests/optimize/test_backtesting.py +++ b/tests/optimize/test_backtesting.py @@ -50,7 +50,7 @@ def trim_dictlist(dict_list, num): def load_data_test(what, testdatadir): timerange = TimeRange.parse_timerange('1510694220-1510700340') - pair = history.load_tickerdata_file(testdatadir, ticker_interval='1m', + pair = history.load_tickerdata_file(testdatadir, timeframe='1m', pair='UNITTEST/BTC', timerange=timerange) datalen = len(pair) @@ -116,7 +116,7 @@ def simple_backtest(config, contour, num_results, mocker, testdatadir) -> None: assert len(results) == num_results -def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=False, +def mocked_load_data(datadir, pairs=[], timeframe='0m', refresh_pairs=False, timerange=None, exchange=None, live=False, *args, **kwargs): tickerdata = history.load_tickerdata_file(datadir, 'UNITTEST/BTC', '1m', timerange=timerange) pairdata = {'UNITTEST/BTC': parse_ticker_dataframe(tickerdata, '1m', pair="UNITTEST/BTC", @@ -126,14 +126,14 @@ def mocked_load_data(datadir, pairs=[], ticker_interval='0m', refresh_pairs=Fals # use for mock ccxt.fetch_ohlvc' def _load_pair_as_ticks(pair, tickfreq): - ticks = history.load_tickerdata_file(None, ticker_interval=tickfreq, pair=pair) + ticks = history.load_tickerdata_file(None, timeframe=tickfreq, pair=pair) ticks = ticks[-201:] return ticks # FIX: fixturize this? def _make_backtest_conf(mocker, datadir, conf=None, pair='UNITTEST/BTC', record=None): - data = history.load_data(datadir=datadir, ticker_interval='1m', pairs=[pair]) + data = history.load_data(datadir=datadir, timeframe='1m', pairs=[pair]) data = trim_dictlist(data, -201) patch_exchange(mocker) backtesting = Backtesting(conf) @@ -522,7 +522,7 @@ def test_backtest(default_conf, fee, mocker, testdatadir) -> None: backtesting = Backtesting(default_conf) pair = 'UNITTEST/BTC' timerange = TimeRange('date', None, 1517227800, 0) - data = history.load_data(datadir=testdatadir, ticker_interval='5m', pairs=['UNITTEST/BTC'], + data = history.load_data(datadir=testdatadir, timeframe='5m', pairs=['UNITTEST/BTC'], timerange=timerange) data_processed = backtesting.strategy.tickerdata_to_dataframe(data) min_date, max_date = get_timeframe(data_processed) @@ -576,9 +576,9 @@ def test_backtest_1min_ticker_interval(default_conf, fee, mocker, testdatadir) - patch_exchange(mocker) backtesting = Backtesting(default_conf) - # Run a backtesting for an exiting 1min ticker_interval + # Run a backtesting for an exiting 1min timeframe timerange = TimeRange.parse_timerange('1510688220-1510700340') - data = history.load_data(datadir=testdatadir, ticker_interval='1m', pairs=['UNITTEST/BTC'], + data = history.load_data(datadir=testdatadir, timeframe='1m', pairs=['UNITTEST/BTC'], timerange=timerange) processed = backtesting.strategy.tickerdata_to_dataframe(data) min_date, max_date = get_timeframe(processed) @@ -688,7 +688,7 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir) patch_exchange(mocker) pairs = ['ADA/BTC', 'DASH/BTC', 'ETH/BTC', 'LTC/BTC', 'NXT/BTC'] - data = history.load_data(datadir=testdatadir, ticker_interval='5m', pairs=pairs) + data = history.load_data(datadir=testdatadir, timeframe='5m', pairs=pairs) # Only use 500 lines to increase performance data = trim_dictlist(data, -500) diff --git a/tests/test_plotting.py b/tests/test_plotting.py index 4a6efcd8e..f0d9578ac 100644 --- a/tests/test_plotting.py +++ b/tests/test_plotting.py @@ -64,7 +64,7 @@ def test_add_indicators(default_conf, testdatadir, caplog): pair = "UNITTEST/BTC" timerange = TimeRange(None, 'line', 0, -1000) - data = history.load_pair_history(pair=pair, ticker_interval='1m', + data = history.load_pair_history(pair=pair, timeframe='1m', datadir=testdatadir, timerange=timerange) indicators1 = ["ema10"] indicators2 = ["macd"] @@ -129,7 +129,7 @@ def test_generate_candlestick_graph_no_signals_no_trades(default_conf, mocker, t pair = "UNITTEST/BTC" timerange = TimeRange(None, 'line', 0, -1000) - data = history.load_pair_history(pair=pair, ticker_interval='1m', + data = history.load_pair_history(pair=pair, timeframe='1m', datadir=testdatadir, timerange=timerange) data['buy'] = 0 data['sell'] = 0 @@ -164,7 +164,7 @@ def test_generate_candlestick_graph_no_trades(default_conf, mocker, testdatadir) MagicMock(side_effect=fig_generating_mock)) pair = 'UNITTEST/BTC' timerange = TimeRange(None, 'line', 0, -1000) - data = history.load_pair_history(pair=pair, ticker_interval='1m', + data = history.load_pair_history(pair=pair, timeframe='1m', datadir=testdatadir, timerange=timerange) # Generate buy/sell signals and indicators @@ -228,7 +228,7 @@ def test_add_profit(testdatadir): bt_data = load_backtest_data(filename) timerange = TimeRange.parse_timerange("20180110-20180112") - df = history.load_pair_history(pair="TRX/BTC", ticker_interval='5m', + df = history.load_pair_history(pair="TRX/BTC", timeframe='5m', datadir=testdatadir, timerange=timerange) fig = generate_empty_figure() @@ -251,7 +251,7 @@ def test_generate_profit_graph(testdatadir): tickers = history.load_data(datadir=testdatadir, pairs=pairs, - ticker_interval='5m', + timeframe='5m', timerange=timerange ) trades = trades[trades['pair'].isin(pairs)] From 08aedc18e1eb2bf7ff0ead368ec043c5585c17f2 Mon Sep 17 00:00:00 2001 From: Matthias Date: Sat, 2 Nov 2019 20:25:18 +0100 Subject: [PATCH 2/7] Exchange ticker_interval with timeframe in some more places --- freqtrade/data/dataprovider.py | 13 ++++--- freqtrade/data/history.py | 2 +- freqtrade/exchange/exchange.py | 67 ++++++++++++++++----------------- tests/exchange/test_exchange.py | 4 +- 4 files changed, 43 insertions(+), 43 deletions(-) diff --git a/freqtrade/data/dataprovider.py b/freqtrade/data/dataprovider.py index ce4554cbb..db71ff029 100644 --- a/freqtrade/data/dataprovider.py +++ b/freqtrade/data/dataprovider.py @@ -37,7 +37,7 @@ class DataProvider: @property def available_pairs(self) -> List[Tuple[str, str]]: """ - Return a list of tuples containing pair, ticker_interval for which data is currently cached. + Return a list of tuples containing (pair, timeframe) for which data is currently cached. Should be whitelist + open trades. """ return list(self._exchange._klines.keys()) @@ -68,21 +68,22 @@ class DataProvider: datadir=Path(self._config['datadir']) ) - def get_pair_dataframe(self, pair: str, ticker_interval: str = None) -> DataFrame: + def get_pair_dataframe(self, pair: str, timeframe: str = None) -> DataFrame: """ Return pair ohlcv data, either live or cached historical -- depending on the runmode. :param pair: pair to get the data for - :param ticker_interval: ticker interval to get data for + :param timeframe: ticker interval to get data for + :return: Dataframe for this pair """ if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): # Get live ohlcv data. - data = self.ohlcv(pair=pair, timeframe=ticker_interval) + data = self.ohlcv(pair=pair, timeframe=timeframe) else: # Get historic ohlcv data (cached on disk). - data = self.historic_ohlcv(pair=pair, timeframe=ticker_interval) + data = self.historic_ohlcv(pair=pair, timeframe=timeframe) if len(data) == 0: - logger.warning(f"No data found for ({pair}, {ticker_interval}).") + logger.warning(f"No data found for ({pair}, {timeframe}).") return data def market(self, pair: str) -> Optional[Dict[str, Any]]: diff --git a/freqtrade/data/history.py b/freqtrade/data/history.py index 8e4bc8ced..3dea41c55 100644 --- a/freqtrade/data/history.py +++ b/freqtrade/data/history.py @@ -308,7 +308,7 @@ def download_pair_history(datadir: Path, logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None') # Default since_ms to 30 days if nothing is given - new_data = exchange.get_historic_ohlcv(pair=pair, ticker_interval=timeframe, + new_data = exchange.get_historic_ohlcv(pair=pair, timeframe=timeframe, since_ms=since_ms if since_ms else int(arrow.utcnow().shift( diff --git a/freqtrade/exchange/exchange.py b/freqtrade/exchange/exchange.py index a198e8cdb..05db45c9b 100644 --- a/freqtrade/exchange/exchange.py +++ b/freqtrade/exchange/exchange.py @@ -536,40 +536,40 @@ class Exchange: logger.info("returning cached ticker-data for %s", pair) return self._cached_ticker[pair] - def get_historic_ohlcv(self, pair: str, ticker_interval: str, + def get_historic_ohlcv(self, pair: str, timeframe: str, since_ms: int) -> List: """ Gets candle history using asyncio and returns the list of candles. Handles all async doing. Async over one pair, assuming we get `_ohlcv_candle_limit` candles per call. :param pair: Pair to download - :param ticker_interval: Interval to get + :param timeframe: Ticker Timeframe to get :param since_ms: Timestamp in milliseconds to get history from :returns List of tickers """ return asyncio.get_event_loop().run_until_complete( - self._async_get_historic_ohlcv(pair=pair, ticker_interval=ticker_interval, + self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe, since_ms=since_ms)) async def _async_get_historic_ohlcv(self, pair: str, - ticker_interval: str, + timeframe: str, since_ms: int) -> List: - one_call = timeframe_to_msecs(ticker_interval) * self._ohlcv_candle_limit + one_call = timeframe_to_msecs(timeframe) * self._ohlcv_candle_limit logger.debug( "one_call: %s msecs (%s)", one_call, arrow.utcnow().shift(seconds=one_call // 1000).humanize(only_distance=True) ) input_coroutines = [self._async_get_candle_history( - pair, ticker_interval, since) for since in + pair, timeframe, since) for since in range(since_ms, arrow.utcnow().timestamp * 1000, one_call)] tickers = await asyncio.gather(*input_coroutines, return_exceptions=True) # Combine tickers data: List = [] - for p, ticker_interval, ticker in tickers: + for p, timeframe, ticker in tickers: if p == pair: data.extend(ticker) # Sort data again after extending the result - above calls return in "async order" @@ -589,14 +589,14 @@ class Exchange: input_coroutines = [] # Gather coroutines to run - for pair, ticker_interval in set(pair_list): - if (not ((pair, ticker_interval) in self._klines) - or self._now_is_time_to_refresh(pair, ticker_interval)): - input_coroutines.append(self._async_get_candle_history(pair, ticker_interval)) + for pair, timeframe in set(pair_list): + if (not ((pair, timeframe) in self._klines) + or self._now_is_time_to_refresh(pair, timeframe)): + input_coroutines.append(self._async_get_candle_history(pair, timeframe)) else: logger.debug( - "Using cached ohlcv data for pair %s, interval %s ...", - pair, ticker_interval + "Using cached ohlcv data for pair %s, timeframe %s ...", + pair, timeframe ) tickers = asyncio.get_event_loop().run_until_complete( @@ -608,40 +608,40 @@ class Exchange: logger.warning("Async code raised an exception: %s", res.__class__.__name__) continue pair = res[0] - ticker_interval = res[1] + timeframe = res[1] ticks = res[2] # keeping last candle time as last refreshed time of the pair if ticks: - self._pairs_last_refresh_time[(pair, ticker_interval)] = ticks[-1][0] // 1000 + self._pairs_last_refresh_time[(pair, timeframe)] = ticks[-1][0] // 1000 # keeping parsed dataframe in cache - self._klines[(pair, ticker_interval)] = parse_ticker_dataframe( - ticks, ticker_interval, pair=pair, fill_missing=True, + self._klines[(pair, timeframe)] = parse_ticker_dataframe( + ticks, timeframe, pair=pair, fill_missing=True, drop_incomplete=self._ohlcv_partial_candle) return tickers - def _now_is_time_to_refresh(self, pair: str, ticker_interval: str) -> bool: + def _now_is_time_to_refresh(self, pair: str, timeframe: str) -> bool: # Calculating ticker interval in seconds - interval_in_sec = timeframe_to_seconds(ticker_interval) + interval_in_sec = timeframe_to_seconds(timeframe) - return not ((self._pairs_last_refresh_time.get((pair, ticker_interval), 0) + return not ((self._pairs_last_refresh_time.get((pair, timeframe), 0) + interval_in_sec) >= arrow.utcnow().timestamp) @retrier_async - async def _async_get_candle_history(self, pair: str, ticker_interval: str, + async def _async_get_candle_history(self, pair: str, timeframe: str, since_ms: Optional[int] = None) -> Tuple[str, str, List]: """ Asynchronously gets candle histories using fetch_ohlcv - returns tuple: (pair, ticker_interval, ohlcv_list) + returns tuple: (pair, timeframe, ohlcv_list) """ try: # fetch ohlcv asynchronously s = '(' + arrow.get(since_ms // 1000).isoformat() + ') ' if since_ms is not None else '' logger.debug( "Fetching pair %s, interval %s, since %s %s...", - pair, ticker_interval, since_ms, s + pair, timeframe, since_ms, s ) - data = await self._api_async.fetch_ohlcv(pair, timeframe=ticker_interval, + data = await self._api_async.fetch_ohlcv(pair, timeframe=timeframe, since=since_ms) # Because some exchange sort Tickers ASC and other DESC. @@ -653,9 +653,9 @@ class Exchange: data = sorted(data, key=lambda x: x[0]) except IndexError: logger.exception("Error loading %s. Result was %s.", pair, data) - return pair, ticker_interval, [] - logger.debug("Done fetching pair %s, interval %s ...", pair, ticker_interval) - return pair, ticker_interval, data + return pair, timeframe, [] + logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe) + return pair, timeframe, data except ccxt.NotSupported as e: raise OperationalException( @@ -802,7 +802,6 @@ class Exchange: Handles all async doing. Async over one pair, assuming we get `_ohlcv_candle_limit` candles per call. :param pair: Pair to download - :param ticker_interval: Interval to get :param since: Timestamp in milliseconds to get history from :param until: Timestamp in milliseconds. Defaults to current timestamp if not defined. :param from_id: Download data starting with ID (if id is known) @@ -958,27 +957,27 @@ def available_exchanges(ccxt_module=None) -> List[str]: return [x for x in exchanges if not is_exchange_bad(x)] -def timeframe_to_seconds(ticker_interval: str) -> int: +def timeframe_to_seconds(timeframe: str) -> int: """ Translates the timeframe interval value written in the human readable form ('1m', '5m', '1h', '1d', '1w', etc.) to the number of seconds for one timeframe interval. """ - return ccxt.Exchange.parse_timeframe(ticker_interval) + return ccxt.Exchange.parse_timeframe(timeframe) -def timeframe_to_minutes(ticker_interval: str) -> int: +def timeframe_to_minutes(timeframe: str) -> int: """ Same as timeframe_to_seconds, but returns minutes. """ - return ccxt.Exchange.parse_timeframe(ticker_interval) // 60 + return ccxt.Exchange.parse_timeframe(timeframe) // 60 -def timeframe_to_msecs(ticker_interval: str) -> int: +def timeframe_to_msecs(timeframe: str) -> int: """ Same as timeframe_to_seconds, but returns milliseconds. """ - return ccxt.Exchange.parse_timeframe(ticker_interval) * 1000 + return ccxt.Exchange.parse_timeframe(timeframe) * 1000 def timeframe_to_prev_date(timeframe: str, date: datetime = None) -> datetime: diff --git a/tests/exchange/test_exchange.py b/tests/exchange/test_exchange.py index 925a53c95..68fac8632 100644 --- a/tests/exchange/test_exchange.py +++ b/tests/exchange/test_exchange.py @@ -1107,7 +1107,7 @@ def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None: exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m'), ('XRP/ETH', '5m')]) assert exchange._api_async.fetch_ohlcv.call_count == 2 - assert log_has(f"Using cached ohlcv data for pair {pairs[0][0]}, interval {pairs[0][1]} ...", + assert log_has(f"Using cached ohlcv data for pair {pairs[0][0]}, timeframe {pairs[0][1]} ...", caplog) @@ -1143,7 +1143,7 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_ # exchange = Exchange(default_conf) await async_ccxt_exception(mocker, default_conf, MagicMock(), "_async_get_candle_history", "fetch_ohlcv", - pair='ABCD/BTC', ticker_interval=default_conf['ticker_interval']) + pair='ABCD/BTC', timeframe=default_conf['ticker_interval']) api_mock = MagicMock() with pytest.raises(OperationalException, match=r'Could not fetch ticker data*'): From d801dec6aa45fb1eb8271ad8166e9b44a9608e53 Mon Sep 17 00:00:00 2001 From: Matthias Date: Sat, 2 Nov 2019 20:26:26 +0100 Subject: [PATCH 3/7] Some more places with ticker_interval gone --- freqtrade/optimize/backtesting.py | 10 +++++----- tests/data/test_converter.py | 12 ++++++------ tests/data/test_dataprovider.py | 24 ++++++++++++------------ tests/exchange/test_exchange.py | 4 ++-- 4 files changed, 25 insertions(+), 25 deletions(-) diff --git a/freqtrade/optimize/backtesting.py b/freqtrade/optimize/backtesting.py index 58fd1f772..79478076b 100644 --- a/freqtrade/optimize/backtesting.py +++ b/freqtrade/optimize/backtesting.py @@ -83,8 +83,8 @@ class Backtesting: if "ticker_interval" not in self.config: raise OperationalException("Ticker-interval needs to be set in either configuration " "or as cli argument `--ticker-interval 5m`") - self.ticker_interval = str(self.config.get('ticker_interval')) - self.ticker_interval_mins = timeframe_to_minutes(self.ticker_interval) + self.timeframe = str(self.config.get('ticker_interval')) + self.timeframe_mins = timeframe_to_minutes(self.timeframe) # Get maximum required startup period self.required_startup = max([strat.startup_candle_count for strat in self.strategylist]) @@ -108,7 +108,7 @@ class Backtesting: data = history.load_data( datadir=Path(self.config['datadir']), pairs=self.config['exchange']['pair_whitelist'], - timeframe=self.ticker_interval, + timeframe=self.timeframe, timerange=timerange, startup_candles=self.required_startup, fail_without_data=True, @@ -375,7 +375,7 @@ class Backtesting: lock_pair_until: Dict = {} # Indexes per pair, so some pairs are allowed to have a missing start. indexes: Dict = {} - tmp = start_date + timedelta(minutes=self.ticker_interval_mins) + tmp = start_date + timedelta(minutes=self.timeframe_mins) # Loop timerange and get candle for each pair at that point in time while tmp < end_date: @@ -427,7 +427,7 @@ class Backtesting: lock_pair_until[pair] = end_date.datetime # Move time one configured time_interval ahead. - tmp += timedelta(minutes=self.ticker_interval_mins) + tmp += timedelta(minutes=self.timeframe_mins) return DataFrame.from_records(trades, columns=BacktestResult._fields) def start(self) -> None: diff --git a/tests/data/test_converter.py b/tests/data/test_converter.py index 92494ff1e..8184167b3 100644 --- a/tests/data/test_converter.py +++ b/tests/data/test_converter.py @@ -42,7 +42,7 @@ def test_ohlcv_fill_up_missing_data(testdatadir, caplog): def test_ohlcv_fill_up_missing_data2(caplog): - ticker_interval = '5m' + timeframe = '5m' ticks = [[ 1511686200000, # 8:50:00 8.794e-05, # open @@ -78,10 +78,10 @@ def test_ohlcv_fill_up_missing_data2(caplog): ] # Generate test-data without filling missing - data = parse_ticker_dataframe(ticks, ticker_interval, pair="UNITTEST/BTC", fill_missing=False) + data = parse_ticker_dataframe(ticks, timeframe, pair="UNITTEST/BTC", fill_missing=False) assert len(data) == 3 caplog.set_level(logging.DEBUG) - data2 = ohlcv_fill_up_missing_data(data, ticker_interval, "UNITTEST/BTC") + data2 = ohlcv_fill_up_missing_data(data, timeframe, "UNITTEST/BTC") assert len(data2) == 4 # 3rd candle has been filled row = data2.loc[2, :] @@ -99,7 +99,7 @@ def test_ohlcv_fill_up_missing_data2(caplog): def test_ohlcv_drop_incomplete(caplog): - ticker_interval = '1d' + timeframe = '1d' ticks = [[ 1559750400000, # 2019-06-04 8.794e-05, # open @@ -134,13 +134,13 @@ def test_ohlcv_drop_incomplete(caplog): ] ] caplog.set_level(logging.DEBUG) - data = parse_ticker_dataframe(ticks, ticker_interval, pair="UNITTEST/BTC", + data = parse_ticker_dataframe(ticks, timeframe, pair="UNITTEST/BTC", fill_missing=False, drop_incomplete=False) assert len(data) == 4 assert not log_has("Dropping last candle", caplog) # Drop last candle - data = parse_ticker_dataframe(ticks, ticker_interval, pair="UNITTEST/BTC", + data = parse_ticker_dataframe(ticks, timeframe, pair="UNITTEST/BTC", fill_missing=False, drop_incomplete=True) assert len(data) == 3 diff --git a/tests/data/test_dataprovider.py b/tests/data/test_dataprovider.py index 0318e5a82..1dbe20936 100644 --- a/tests/data/test_dataprovider.py +++ b/tests/data/test_dataprovider.py @@ -9,32 +9,32 @@ from tests.conftest import get_patched_exchange def test_ohlcv(mocker, default_conf, ticker_history): default_conf["runmode"] = RunMode.DRY_RUN - ticker_interval = default_conf["ticker_interval"] + timeframe = default_conf["ticker_interval"] exchange = get_patched_exchange(mocker, default_conf) - exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history - exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history + exchange._klines[("XRP/BTC", timeframe)] = ticker_history + exchange._klines[("UNITTEST/BTC", timeframe)] = ticker_history dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.DRY_RUN - assert ticker_history.equals(dp.ohlcv("UNITTEST/BTC", ticker_interval)) - assert isinstance(dp.ohlcv("UNITTEST/BTC", ticker_interval), DataFrame) - assert dp.ohlcv("UNITTEST/BTC", ticker_interval) is not ticker_history - assert dp.ohlcv("UNITTEST/BTC", ticker_interval, copy=False) is ticker_history - assert not dp.ohlcv("UNITTEST/BTC", ticker_interval).empty - assert dp.ohlcv("NONESENSE/AAA", ticker_interval).empty + assert ticker_history.equals(dp.ohlcv("UNITTEST/BTC", timeframe)) + assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe), DataFrame) + assert dp.ohlcv("UNITTEST/BTC", timeframe) is not ticker_history + assert dp.ohlcv("UNITTEST/BTC", timeframe, copy=False) is ticker_history + assert not dp.ohlcv("UNITTEST/BTC", timeframe).empty + assert dp.ohlcv("NONESENSE/AAA", timeframe).empty # Test with and without parameter - assert dp.ohlcv("UNITTEST/BTC", ticker_interval).equals(dp.ohlcv("UNITTEST/BTC")) + assert dp.ohlcv("UNITTEST/BTC", timeframe).equals(dp.ohlcv("UNITTEST/BTC")) default_conf["runmode"] = RunMode.LIVE dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.LIVE - assert isinstance(dp.ohlcv("UNITTEST/BTC", ticker_interval), DataFrame) + assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe), DataFrame) default_conf["runmode"] = RunMode.BACKTEST dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.BACKTEST - assert dp.ohlcv("UNITTEST/BTC", ticker_interval).empty + assert dp.ohlcv("UNITTEST/BTC", timeframe).empty def test_historic_ohlcv(mocker, default_conf, ticker_history): diff --git a/tests/exchange/test_exchange.py b/tests/exchange/test_exchange.py index 68fac8632..a21a5f3ac 100644 --- a/tests/exchange/test_exchange.py +++ b/tests/exchange/test_exchange.py @@ -1047,8 +1047,8 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name): ] pair = 'ETH/BTC' - async def mock_candle_hist(pair, ticker_interval, since_ms): - return pair, ticker_interval, tick + async def mock_candle_hist(pair, timeframe, since_ms): + return pair, timeframe, tick exchange._async_get_candle_history = Mock(wraps=mock_candle_hist) # one_call calculation * 1.8 should do 2 calls From 334ac8b10ccbedd2910cf054b26c18f30e668d96 Mon Sep 17 00:00:00 2001 From: Matthias Date: Sat, 2 Nov 2019 20:34:06 +0100 Subject: [PATCH 4/7] Adapt documentation for timeframe --- docs/strategy-customization.md | 8 ++++---- docs/strategy_analysis_example.md | 4 ++-- user_data/notebooks/strategy_analysis_example.ipynb | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/strategy-customization.md b/docs/strategy-customization.md index 72938f9af..34f86f2ce 100644 --- a/docs/strategy-customization.md +++ b/docs/strategy-customization.md @@ -314,9 +314,9 @@ Please always check the mode of operation to select the correct method to get da #### Possible options for DataProvider - `available_pairs` - Property with tuples listing cached pairs with their intervals (pair, interval). -- `ohlcv(pair, ticker_interval)` - Currently cached ticker data for the pair, returns DataFrame or empty DataFrame. -- `historic_ohlcv(pair, ticker_interval)` - Returns historical data stored on disk. -- `get_pair_dataframe(pair, ticker_interval)` - This is a universal method, which returns either historical data (for backtesting) or cached live data (for the Dry-Run and Live-Run modes). +- `ohlcv(pair, timeframe)` - Currently cached ticker data for the pair, returns DataFrame or empty DataFrame. +- `historic_ohlcv(pair, timeframe)` - Returns historical data stored on disk. +- `get_pair_dataframe(pair, timeframe)` - This is a universal method, which returns either historical data (for backtesting) or cached live data (for the Dry-Run and Live-Run modes). - `orderbook(pair, maximum)` - Returns latest orderbook data for the pair, a dict with bids/asks with a total of `maximum` entries. - `market(pair)` - Returns market data for the pair: fees, limits, precisions, activity flag, etc. See [ccxt documentation](https://github.com/ccxt/ccxt/wiki/Manual#markets) for more details on Market data structure. - `runmode` - Property containing the current runmode. @@ -327,7 +327,7 @@ Please always check the mode of operation to select the correct method to get da if self.dp: inf_pair, inf_timeframe = self.informative_pairs()[0] informative = self.dp.get_pair_dataframe(pair=inf_pair, - ticker_interval=inf_timeframe) + timeframe=inf_timeframe) ``` !!! Warning "Warning about backtesting" diff --git a/docs/strategy_analysis_example.md b/docs/strategy_analysis_example.md index aa4578ca7..9e61bda65 100644 --- a/docs/strategy_analysis_example.md +++ b/docs/strategy_analysis_example.md @@ -10,7 +10,7 @@ from pathlib import Path # Customize these according to your needs. # Define some constants -ticker_interval = "5m" +timeframe = "5m" # Name of the strategy class strategy_name = 'SampleStrategy' # Path to user data @@ -29,7 +29,7 @@ pair = "BTC_USDT" from freqtrade.data.history import load_pair_history candles = load_pair_history(datadir=data_location, - ticker_interval=ticker_interval, + timeframe=timeframe, pair=pair) # Confirm success diff --git a/user_data/notebooks/strategy_analysis_example.ipynb b/user_data/notebooks/strategy_analysis_example.ipynb index 03dc83b4e..2876ea938 100644 --- a/user_data/notebooks/strategy_analysis_example.ipynb +++ b/user_data/notebooks/strategy_analysis_example.ipynb @@ -26,7 +26,7 @@ "# Customize these according to your needs.\n", "\n", "# Define some constants\n", - "ticker_interval = \"5m\"\n", + "timeframe = \"5m\"\n", "# Name of the strategy class\n", "strategy_name = 'SampleStrategy'\n", "# Path to user data\n", @@ -49,7 +49,7 @@ "from freqtrade.data.history import load_pair_history\n", "\n", "candles = load_pair_history(datadir=data_location,\n", - " ticker_interval=ticker_interval,\n", + " timeframe=timeframe,\n", " pair=pair)\n", "\n", "# Confirm success\n", From 1c57a4ac35435914b1a7330129185ee6dec55be1 Mon Sep 17 00:00:00 2001 From: Matthias Date: Sat, 2 Nov 2019 20:34:39 +0100 Subject: [PATCH 5/7] more replacements of ticker_interval --- freqtrade/constants.py | 4 ++-- freqtrade/data/btanalysis.py | 6 +++--- freqtrade/optimize/backtesting.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/freqtrade/constants.py b/freqtrade/constants.py index 5fdd45916..f34232bb1 100644 --- a/freqtrade/constants.py +++ b/freqtrade/constants.py @@ -24,7 +24,7 @@ AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList'] DRY_RUN_WALLET = 999.9 MATH_CLOSE_PREC = 1e-14 # Precision used for float comparisons -TICKER_INTERVALS = [ +TIMEFRAMES = [ '1m', '3m', '5m', '15m', '30m', '1h', '2h', '4h', '6h', '8h', '12h', '1d', '3d', '1w', @@ -57,7 +57,7 @@ CONF_SCHEMA = { 'type': 'object', 'properties': { 'max_open_trades': {'type': 'integer', 'minimum': -1}, - 'ticker_interval': {'type': 'string', 'enum': TICKER_INTERVALS}, + 'ticker_interval': {'type': 'string', 'enum': TIMEFRAMES}, 'stake_currency': {'type': 'string', 'enum': ['BTC', 'XBT', 'ETH', 'USDT', 'EUR', 'USD']}, 'stake_amount': { "type": ["number", "string"], diff --git a/freqtrade/data/btanalysis.py b/freqtrade/data/btanalysis.py index 2f7a234ce..379c80060 100644 --- a/freqtrade/data/btanalysis.py +++ b/freqtrade/data/btanalysis.py @@ -178,9 +178,9 @@ def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str, :return: Returns df with one additional column, col_name, containing the cumulative profit. """ from freqtrade.exchange import timeframe_to_minutes - ticker_minutes = timeframe_to_minutes(timeframe) - # Resample to ticker_interval to make sure trades match candles - _trades_sum = trades.resample(f'{ticker_minutes}min', on='close_time')[['profitperc']].sum() + timeframe_minutes = timeframe_to_minutes(timeframe) + # Resample to timeframe to make sure trades match candles + _trades_sum = trades.resample(f'{timeframe_minutes}min', on='close_time')[['profitperc']].sum() df.loc[:, col_name] = _trades_sum.cumsum() # Set first value to 0 df.loc[df.iloc[0].name, col_name] = 0 diff --git a/freqtrade/optimize/backtesting.py b/freqtrade/optimize/backtesting.py index 79478076b..2c2d116a4 100644 --- a/freqtrade/optimize/backtesting.py +++ b/freqtrade/optimize/backtesting.py @@ -121,7 +121,7 @@ class Backtesting: min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days ) # Adjust startts forward if not enough data is available - timerange.adjust_start_if_necessary(timeframe_to_seconds(self.ticker_interval), + timerange.adjust_start_if_necessary(timeframe_to_seconds(self.timeframe), self.required_startup, min_date) return data, timerange From c449e3928057213d14d2de2ab6c9f46ac51da5d5 Mon Sep 17 00:00:00 2001 From: Matthias Date: Sun, 3 Nov 2019 10:01:05 +0100 Subject: [PATCH 6/7] Replace more occurances of ticker_interval --- freqtrade/configuration/timerange.py | 6 +++--- freqtrade/optimize/hyperopt_interface.py | 8 ++++---- tests/optimize/__init__.py | 4 ++-- tests/optimize/test_backtest_detail.py | 4 ++-- tests/optimize/test_backtesting.py | 2 +- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/freqtrade/configuration/timerange.py b/freqtrade/configuration/timerange.py index 156f0e1e2..a8be873df 100644 --- a/freqtrade/configuration/timerange.py +++ b/freqtrade/configuration/timerange.py @@ -39,12 +39,12 @@ class TimeRange: if self.startts: self.startts = self.startts - seconds - def adjust_start_if_necessary(self, ticker_interval_secs: int, startup_candles: int, + def adjust_start_if_necessary(self, timeframe_secs: int, startup_candles: int, min_date: arrow.Arrow) -> None: """ Adjust startts by candles. Applies only if no startup-candles have been available. - :param ticker_interval_secs: Ticker interval in seconds e.g. `timeframe_to_seconds('5m')` + :param timeframe_secs: Ticker timeframe in seconds e.g. `timeframe_to_seconds('5m')` :param startup_candles: Number of candles to move start-date forward :param min_date: Minimum data date loaded. Key kriterium to decide if start-time has to be moved @@ -55,7 +55,7 @@ class TimeRange: # If no startts was defined, or backtest-data starts at the defined backtest-date logger.warning("Moving start-date by %s candles to account for startup time.", startup_candles) - self.startts = (min_date.timestamp + ticker_interval_secs * startup_candles) + self.startts = (min_date.timestamp + timeframe_secs * startup_candles) self.starttype = 'date' @staticmethod diff --git a/freqtrade/optimize/hyperopt_interface.py b/freqtrade/optimize/hyperopt_interface.py index 142f305df..ac41ba92f 100644 --- a/freqtrade/optimize/hyperopt_interface.py +++ b/freqtrade/optimize/hyperopt_interface.py @@ -106,10 +106,10 @@ class IHyperOpt(ABC): roi_t_alpha = 1.0 roi_p_alpha = 1.0 - ticker_interval_mins = timeframe_to_minutes(IHyperOpt.ticker_interval) + timeframe_mins = timeframe_to_minutes(IHyperOpt.ticker_interval) # We define here limits for the ROI space parameters automagically adapted to the - # ticker_interval used by the bot: + # timeframe used by the bot: # # * 'roi_t' (limits for the time intervals in the ROI tables) components # are scaled linearly. @@ -117,8 +117,8 @@ class IHyperOpt(ABC): # # The scaling is designed so that it maps exactly to the legacy Freqtrade roi_space() # method for the 5m ticker interval. - roi_t_scale = ticker_interval_mins / 5 - roi_p_scale = math.log1p(ticker_interval_mins) / math.log1p(5) + roi_t_scale = timeframe_mins / 5 + roi_p_scale = math.log1p(timeframe_mins) / math.log1p(5) roi_limits = { 'roi_t1_min': int(10 * roi_t_scale * roi_t_alpha), 'roi_t1_max': int(120 * roi_t_scale * roi_t_alpha), diff --git a/tests/optimize/__init__.py b/tests/optimize/__init__.py index fdbaaa54d..8756143a0 100644 --- a/tests/optimize/__init__.py +++ b/tests/optimize/__init__.py @@ -7,7 +7,7 @@ from freqtrade.exchange import timeframe_to_minutes from freqtrade.strategy.interface import SellType ticker_start_time = arrow.get(2018, 10, 3) -tests_ticker_interval = '1h' +tests_timeframe = '1h' class BTrade(NamedTuple): @@ -36,7 +36,7 @@ class BTContainer(NamedTuple): def _get_frame_time_from_offset(offset): - return ticker_start_time.shift(minutes=(offset * timeframe_to_minutes(tests_ticker_interval)) + return ticker_start_time.shift(minutes=(offset * timeframe_to_minutes(tests_timeframe)) ).datetime diff --git a/tests/optimize/test_backtest_detail.py b/tests/optimize/test_backtest_detail.py index 54f4c8796..3f6cc8c9a 100644 --- a/tests/optimize/test_backtest_detail.py +++ b/tests/optimize/test_backtest_detail.py @@ -9,7 +9,7 @@ from freqtrade.optimize.backtesting import Backtesting from freqtrade.strategy.interface import SellType from tests.conftest import patch_exchange from tests.optimize import (BTContainer, BTrade, _build_backtest_dataframe, - _get_frame_time_from_offset, tests_ticker_interval) + _get_frame_time_from_offset, tests_timeframe) # Test 0: Sell with signal sell in candle 3 # Test with Stop-loss at 1% @@ -293,7 +293,7 @@ def test_backtest_results(default_conf, fee, mocker, caplog, data) -> None: """ default_conf["stoploss"] = data.stop_loss default_conf["minimal_roi"] = data.roi - default_conf["ticker_interval"] = tests_ticker_interval + default_conf["ticker_interval"] = tests_timeframe default_conf["trailing_stop"] = data.trailing_stop default_conf["trailing_only_offset_is_reached"] = data.trailing_only_offset_is_reached # Only add this to configuration If it's necessary diff --git a/tests/optimize/test_backtesting.py b/tests/optimize/test_backtesting.py index a5ab6d84c..508c12e89 100644 --- a/tests/optimize/test_backtesting.py +++ b/tests/optimize/test_backtesting.py @@ -307,7 +307,7 @@ def test_backtesting_init(mocker, default_conf, order_types) -> None: get_fee = mocker.patch('freqtrade.exchange.Exchange.get_fee', MagicMock(return_value=0.5)) backtesting = Backtesting(default_conf) assert backtesting.config == default_conf - assert backtesting.ticker_interval == '5m' + assert backtesting.timeframe == '5m' assert callable(backtesting.strategy.tickerdata_to_dataframe) assert callable(backtesting.strategy.advise_buy) assert callable(backtesting.strategy.advise_sell) From 6ac73f7cde81ac04044a956138a1a627991fd1e2 Mon Sep 17 00:00:00 2001 From: Matthias Date: Wed, 13 Nov 2019 11:28:26 +0100 Subject: [PATCH 7/7] Update missed strings --- freqtrade/data/dataprovider.py | 4 ++-- freqtrade/data/history.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/freqtrade/data/dataprovider.py b/freqtrade/data/dataprovider.py index db71ff029..7b7159145 100644 --- a/freqtrade/data/dataprovider.py +++ b/freqtrade/data/dataprovider.py @@ -61,7 +61,7 @@ class DataProvider: """ Get stored historic ohlcv data :param pair: pair to get the data for - :param timeframe: ticker interval to get data for + :param timeframe: timeframe to get data for """ return load_pair_history(pair=pair, timeframe=timeframe or self._config['ticker_interval'], @@ -73,7 +73,7 @@ class DataProvider: Return pair ohlcv data, either live or cached historical -- depending on the runmode. :param pair: pair to get the data for - :param timeframe: ticker interval to get data for + :param timeframe: timeframe to get data for :return: Dataframe for this pair """ if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): diff --git a/freqtrade/data/history.py b/freqtrade/data/history.py index 3dea41c55..d45b1c890 100644 --- a/freqtrade/data/history.py +++ b/freqtrade/data/history.py @@ -279,7 +279,7 @@ def download_pair_history(datadir: Path, timeframe: str = '5m', timerange: Optional[TimeRange] = None) -> bool: """ - Download the latest ticker intervals from the exchange for the pair passed in parameters + Download latest candles from the exchange for the pair and timeframe passed in parameters The data is downloaded starting from the last correct data that exists in a cache. If timerange starts earlier than the data in the cache, the full data will be redownloaded