Added candle_type to a lot of methods, wrote some tests
This commit is contained in:
parent
e2f98a8dab
commit
920151934a
@ -5,7 +5,7 @@ import itertools
|
|||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from pandas import DataFrame, to_datetime
|
from pandas import DataFrame, to_datetime
|
||||||
|
@ -41,7 +41,13 @@ class DataProvider:
|
|||||||
"""
|
"""
|
||||||
self.__slice_index = limit_index
|
self.__slice_index = limit_index
|
||||||
|
|
||||||
def _set_cached_df(self, pair: str, timeframe: str, dataframe: DataFrame) -> None:
|
def _set_cached_df(
|
||||||
|
self,
|
||||||
|
pair: str,
|
||||||
|
timeframe: str,
|
||||||
|
dataframe: DataFrame,
|
||||||
|
candle_type: str = ''
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Store cached Dataframe.
|
Store cached Dataframe.
|
||||||
Using private method as this should never be used by a user
|
Using private method as this should never be used by a user
|
||||||
@ -50,7 +56,8 @@ class DataProvider:
|
|||||||
:param timeframe: Timeframe to get data for
|
:param timeframe: Timeframe to get data for
|
||||||
:param dataframe: analyzed dataframe
|
:param dataframe: analyzed dataframe
|
||||||
"""
|
"""
|
||||||
self.__cached_pairs[(pair, timeframe)] = (dataframe, datetime.now(timezone.utc))
|
self.__cached_pairs[(pair, timeframe, candle_type)] = (
|
||||||
|
dataframe, datetime.now(timezone.utc))
|
||||||
|
|
||||||
def add_pairlisthandler(self, pairlists) -> None:
|
def add_pairlisthandler(self, pairlists) -> None:
|
||||||
"""
|
"""
|
||||||
@ -58,13 +65,18 @@ class DataProvider:
|
|||||||
"""
|
"""
|
||||||
self._pairlists = pairlists
|
self._pairlists = pairlists
|
||||||
|
|
||||||
def historic_ohlcv(self, pair: str, timeframe: str = None) -> DataFrame:
|
def historic_ohlcv(
|
||||||
|
self,
|
||||||
|
pair: str,
|
||||||
|
timeframe: str = None,
|
||||||
|
candle_type: str = ''
|
||||||
|
) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
Get stored historical candle (OHLCV) data
|
Get stored historical candle (OHLCV) data
|
||||||
:param pair: pair to get the data for
|
:param pair: pair to get the data for
|
||||||
:param timeframe: timeframe to get data for
|
:param timeframe: timeframe to get data for
|
||||||
"""
|
"""
|
||||||
saved_pair = (pair, str(timeframe))
|
saved_pair = (pair, str(timeframe), candle_type)
|
||||||
if saved_pair not in self.__cached_pairs_backtesting:
|
if saved_pair not in self.__cached_pairs_backtesting:
|
||||||
timerange = TimeRange.parse_timerange(None if self._config.get(
|
timerange = TimeRange.parse_timerange(None if self._config.get(
|
||||||
'timerange') is None else str(self._config.get('timerange')))
|
'timerange') is None else str(self._config.get('timerange')))
|
||||||
@ -77,11 +89,17 @@ class DataProvider:
|
|||||||
timeframe=timeframe or self._config['timeframe'],
|
timeframe=timeframe or self._config['timeframe'],
|
||||||
datadir=self._config['datadir'],
|
datadir=self._config['datadir'],
|
||||||
timerange=timerange,
|
timerange=timerange,
|
||||||
data_format=self._config.get('dataformat_ohlcv', 'json')
|
data_format=self._config.get('dataformat_ohlcv', 'json'),
|
||||||
|
candle_type=candle_type
|
||||||
)
|
)
|
||||||
return self.__cached_pairs_backtesting[saved_pair].copy()
|
return self.__cached_pairs_backtesting[saved_pair].copy()
|
||||||
|
|
||||||
def get_pair_dataframe(self, pair: str, timeframe: str = None) -> DataFrame:
|
def get_pair_dataframe(
|
||||||
|
self,
|
||||||
|
pair: str,
|
||||||
|
timeframe: str = None,
|
||||||
|
candle_type: str = ''
|
||||||
|
) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
Return pair candle (OHLCV) data, either live or cached historical -- depending
|
Return pair candle (OHLCV) data, either live or cached historical -- depending
|
||||||
on the runmode.
|
on the runmode.
|
||||||
@ -91,12 +109,12 @@ class DataProvider:
|
|||||||
"""
|
"""
|
||||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||||
# Get live OHLCV data.
|
# Get live OHLCV data.
|
||||||
data = self.ohlcv(pair=pair, timeframe=timeframe)
|
data = self.ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||||
else:
|
else:
|
||||||
# Get historical OHLCV data (cached on disk).
|
# Get historical OHLCV data (cached on disk).
|
||||||
data = self.historic_ohlcv(pair=pair, timeframe=timeframe)
|
data = self.historic_ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||||
if len(data) == 0:
|
if len(data) == 0:
|
||||||
logger.warning(f"No data found for ({pair}, {timeframe}).")
|
logger.warning(f"No data found for ({pair}, {timeframe}, {candle_type}).")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_analyzed_dataframe(
|
def get_analyzed_dataframe(
|
||||||
@ -114,7 +132,7 @@ class DataProvider:
|
|||||||
combination.
|
combination.
|
||||||
Returns empty dataframe and Epoch 0 (1970-01-01) if no dataframe was cached.
|
Returns empty dataframe and Epoch 0 (1970-01-01) if no dataframe was cached.
|
||||||
"""
|
"""
|
||||||
pair_key = (pair, timeframe)
|
pair_key = (pair, timeframe, candle_type)
|
||||||
if pair_key in self.__cached_pairs:
|
if pair_key in self.__cached_pairs:
|
||||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||||
df, date = self.__cached_pairs[pair_key]
|
df, date = self.__cached_pairs[pair_key]
|
||||||
@ -200,8 +218,10 @@ class DataProvider:
|
|||||||
if self._exchange is None:
|
if self._exchange is None:
|
||||||
raise OperationalException(NO_EXCHANGE_EXCEPTION)
|
raise OperationalException(NO_EXCHANGE_EXCEPTION)
|
||||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||||
return self._exchange.klines((pair, timeframe or self._config['timeframe']),
|
return self._exchange.klines(
|
||||||
copy=copy)
|
(pair, timeframe or self._config['timeframe'], candle_type),
|
||||||
|
copy=copy
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return DataFrame()
|
return DataFrame()
|
||||||
|
|
||||||
|
@ -54,6 +54,7 @@ def load_pair_history(pair: str,
|
|||||||
fill_missing=fill_up_missing,
|
fill_missing=fill_up_missing,
|
||||||
drop_incomplete=drop_incomplete,
|
drop_incomplete=drop_incomplete,
|
||||||
startup_candles=startup_candles,
|
startup_candles=startup_candles,
|
||||||
|
candle_type=candle_type
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -91,7 +92,8 @@ def load_data(datadir: Path,
|
|||||||
datadir=datadir, timerange=timerange,
|
datadir=datadir, timerange=timerange,
|
||||||
fill_up_missing=fill_up_missing,
|
fill_up_missing=fill_up_missing,
|
||||||
startup_candles=startup_candles,
|
startup_candles=startup_candles,
|
||||||
data_handler=data_handler
|
data_handler=data_handler,
|
||||||
|
candle_type=candle_type
|
||||||
)
|
)
|
||||||
if not hist.empty:
|
if not hist.empty:
|
||||||
result[pair] = hist
|
result[pair] = hist
|
||||||
@ -124,7 +126,8 @@ def refresh_data(datadir: Path,
|
|||||||
process = f'{idx}/{len(pairs)}'
|
process = f'{idx}/{len(pairs)}'
|
||||||
_download_pair_history(pair=pair, process=process,
|
_download_pair_history(pair=pair, process=process,
|
||||||
timeframe=timeframe, datadir=datadir,
|
timeframe=timeframe, datadir=datadir,
|
||||||
timerange=timerange, exchange=exchange, data_handler=data_handler)
|
timerange=timerange, exchange=exchange, data_handler=data_handler,
|
||||||
|
candle_type=candle_type)
|
||||||
|
|
||||||
|
|
||||||
def _load_cached_data_for_updating(
|
def _load_cached_data_for_updating(
|
||||||
@ -150,7 +153,8 @@ def _load_cached_data_for_updating(
|
|||||||
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||||
data = data_handler.ohlcv_load(pair, timeframe=timeframe,
|
data = data_handler.ohlcv_load(pair, timeframe=timeframe,
|
||||||
timerange=None, fill_missing=False,
|
timerange=None, fill_missing=False,
|
||||||
drop_incomplete=True, warn_no_data=False)
|
drop_incomplete=True, warn_no_data=False,
|
||||||
|
candle_type=candle_type)
|
||||||
if not data.empty:
|
if not data.empty:
|
||||||
if start and start < data.iloc[0]['date']:
|
if start and start < data.iloc[0]['date']:
|
||||||
# Earlier data than existing data requested, redownload all
|
# Earlier data than existing data requested, redownload all
|
||||||
@ -194,7 +198,8 @@ def _download_pair_history(pair: str, *,
|
|||||||
|
|
||||||
# data, since_ms = _load_cached_data_for_updating_old(datadir, pair, timeframe, timerange)
|
# data, since_ms = _load_cached_data_for_updating_old(datadir, pair, timeframe, timerange)
|
||||||
data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange,
|
data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange,
|
||||||
data_handler=data_handler)
|
data_handler=data_handler,
|
||||||
|
candle_type=candle_type)
|
||||||
|
|
||||||
logger.debug("Current Start: %s",
|
logger.debug("Current Start: %s",
|
||||||
f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
||||||
|
@ -119,8 +119,8 @@ class Edge:
|
|||||||
)
|
)
|
||||||
# Download informative pairs too
|
# Download informative pairs too
|
||||||
res = defaultdict(list)
|
res = defaultdict(list)
|
||||||
for p, t in self.strategy.gather_informative_pairs():
|
for pair, timeframe, _ in self.strategy.gather_informative_pairs():
|
||||||
res[t].append(p)
|
res[timeframe].append(pair)
|
||||||
for timeframe, inf_pairs in res.items():
|
for timeframe, inf_pairs in res.items():
|
||||||
timerange_startup = deepcopy(self._timerange)
|
timerange_startup = deepcopy(self._timerange)
|
||||||
timerange_startup.subtract_start(timeframe_to_seconds(
|
timerange_startup.subtract_start(timeframe_to_seconds(
|
||||||
|
@ -204,7 +204,7 @@ class Binance(Exchange):
|
|||||||
since_ms: int, is_new_pair: bool = False,
|
since_ms: int, is_new_pair: bool = False,
|
||||||
raise_: bool = False,
|
raise_: bool = False,
|
||||||
candle_type: str = ''
|
candle_type: str = ''
|
||||||
) -> Tuple[str, str, List]:
|
) -> Tuple[str, str, str, List]:
|
||||||
"""
|
"""
|
||||||
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
|
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
|
||||||
Does not work for other exchanges, which don't return the earliest data when called with "0"
|
Does not work for other exchanges, which don't return the earliest data when called with "0"
|
||||||
|
@ -92,7 +92,7 @@ class Exchange:
|
|||||||
self._config.update(config)
|
self._config.update(config)
|
||||||
|
|
||||||
# Holds last candle refreshed time of each pair
|
# Holds last candle refreshed time of each pair
|
||||||
self._pairs_last_refresh_time: Dict[Tuple[str, str], int] = {}
|
self._pairs_last_refresh_time: Dict[Tuple[str, str, str], int] = {}
|
||||||
# Timestamp of last markets refresh
|
# Timestamp of last markets refresh
|
||||||
self._last_markets_refresh: int = 0
|
self._last_markets_refresh: int = 0
|
||||||
|
|
||||||
@ -105,7 +105,7 @@ class Exchange:
|
|||||||
self._buy_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=1800)
|
self._buy_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=1800)
|
||||||
|
|
||||||
# Holds candles
|
# Holds candles
|
||||||
self._klines: Dict[Tuple[str, str], DataFrame] = {}
|
self._klines: Dict[Tuple[str, str, str], DataFrame] = {}
|
||||||
|
|
||||||
# Holds all open sell orders for dry_run
|
# Holds all open sell orders for dry_run
|
||||||
self._dry_run_open_orders: Dict[str, Any] = {}
|
self._dry_run_open_orders: Dict[str, Any] = {}
|
||||||
@ -359,7 +359,7 @@ class Exchange:
|
|||||||
or (self.trading_mode == TradingMode.FUTURES and self.market_is_future(market))
|
or (self.trading_mode == TradingMode.FUTURES and self.market_is_future(market))
|
||||||
)
|
)
|
||||||
|
|
||||||
def klines(self, pair_interval: Tuple[str, str], copy: bool = True) -> DataFrame:
|
def klines(self, pair_interval: Tuple[str, str, str], copy: bool = True) -> DataFrame:
|
||||||
if pair_interval in self._klines:
|
if pair_interval in self._klines:
|
||||||
return self._klines[pair_interval].copy() if copy else self._klines[pair_interval]
|
return self._klines[pair_interval].copy() if copy else self._klines[pair_interval]
|
||||||
else:
|
else:
|
||||||
@ -1321,7 +1321,8 @@ class Exchange:
|
|||||||
:param since_ms: Timestamp in milliseconds to get history from
|
:param since_ms: Timestamp in milliseconds to get history from
|
||||||
:return: List with candle (OHLCV) data
|
:return: List with candle (OHLCV) data
|
||||||
"""
|
"""
|
||||||
pair, timeframe, data = asyncio.get_event_loop().run_until_complete(
|
data: List
|
||||||
|
pair, timeframe, candle_type, data = asyncio.get_event_loop().run_until_complete(
|
||||||
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
||||||
since_ms=since_ms, is_new_pair=is_new_pair,
|
since_ms=since_ms, is_new_pair=is_new_pair,
|
||||||
candle_type=candle_type))
|
candle_type=candle_type))
|
||||||
@ -1337,15 +1338,15 @@ class Exchange:
|
|||||||
:param since_ms: Timestamp in milliseconds to get history from
|
:param since_ms: Timestamp in milliseconds to get history from
|
||||||
:return: OHLCV DataFrame
|
:return: OHLCV DataFrame
|
||||||
"""
|
"""
|
||||||
ticks = self.get_historic_ohlcv(pair, timeframe, since_ms=since_ms)
|
ticks = self.get_historic_ohlcv(pair, timeframe, since_ms=since_ms, candle_type=candle_type)
|
||||||
return ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
|
return ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
|
||||||
drop_incomplete=self._ohlcv_partial_candle)
|
drop_incomplete=self._ohlcv_partial_candle)
|
||||||
|
|
||||||
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||||
since_ms: int, is_new_pair: bool,
|
since_ms: int, is_new_pair: bool = False,
|
||||||
raise_: bool = False,
|
raise_: bool = False,
|
||||||
candle_type: str = ''
|
candle_type: str = ''
|
||||||
) -> Tuple[str, str, List]:
|
) -> Tuple[str, str, str, List]:
|
||||||
"""
|
"""
|
||||||
Download historic ohlcv
|
Download historic ohlcv
|
||||||
:param is_new_pair: used by binance subclass to allow "fast" new pair downloading
|
:param is_new_pair: used by binance subclass to allow "fast" new pair downloading
|
||||||
@ -1374,12 +1375,12 @@ class Exchange:
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
# Deconstruct tuple if it's not an exception
|
# Deconstruct tuple if it's not an exception
|
||||||
p, _, new_data = res
|
p, _, c, new_data = res
|
||||||
if p == pair:
|
if p == pair and c == candle_type:
|
||||||
data.extend(new_data)
|
data.extend(new_data)
|
||||||
# Sort data again after extending the result - above calls return in "async order"
|
# Sort data again after extending the result - above calls return in "async order"
|
||||||
data = sorted(data, key=lambda x: x[0])
|
data = sorted(data, key=lambda x: x[0])
|
||||||
return pair, timeframe, data
|
return pair, timeframe, candle_type, data
|
||||||
|
|
||||||
def refresh_latest_ohlcv(self, pair_list: ListPairsWithTimeframes, *,
|
def refresh_latest_ohlcv(self, pair_list: ListPairsWithTimeframes, *,
|
||||||
since_ms: Optional[int] = None, cache: bool = True,
|
since_ms: Optional[int] = None, cache: bool = True,
|
||||||
@ -1399,8 +1400,8 @@ class Exchange:
|
|||||||
input_coroutines = []
|
input_coroutines = []
|
||||||
cached_pairs = []
|
cached_pairs = []
|
||||||
# Gather coroutines to run
|
# Gather coroutines to run
|
||||||
for pair, timeframe in set(pair_list):
|
for pair, timeframe, candle_type in set(pair_list):
|
||||||
if ((pair, timeframe) not in self._klines
|
if ((pair, timeframe, candle_type) not in self._klines
|
||||||
or self._now_is_time_to_refresh(pair, timeframe)):
|
or self._now_is_time_to_refresh(pair, timeframe)):
|
||||||
if not since_ms and self.required_candle_call_count > 1:
|
if not since_ms and self.required_candle_call_count > 1:
|
||||||
# Multiple calls for one pair - to get more history
|
# Multiple calls for one pair - to get more history
|
||||||
@ -1411,17 +1412,17 @@ class Exchange:
|
|||||||
|
|
||||||
if since_ms:
|
if since_ms:
|
||||||
input_coroutines.append(self._async_get_historic_ohlcv(
|
input_coroutines.append(self._async_get_historic_ohlcv(
|
||||||
pair, timeframe, since_ms=since_ms, raise_=True))
|
pair, timeframe, since_ms=since_ms, raise_=True, candle_type=candle_type))
|
||||||
else:
|
else:
|
||||||
# One call ... "regular" refresh
|
# One call ... "regular" refresh
|
||||||
input_coroutines.append(self._async_get_candle_history(
|
input_coroutines.append(self._async_get_candle_history(
|
||||||
pair, timeframe, since_ms=since_ms, candle_type=candle_type,))
|
pair, timeframe, since_ms=since_ms, candle_type=candle_type))
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Using cached candle (OHLCV) data for pair %s, timeframe %s ...",
|
"Using cached candle (OHLCV) data for pair %s, timeframe %s ...",
|
||||||
pair, timeframe
|
pair, timeframe, candle_type
|
||||||
)
|
)
|
||||||
cached_pairs.append((pair, timeframe))
|
cached_pairs.append((pair, timeframe, candle_type))
|
||||||
|
|
||||||
results = asyncio.get_event_loop().run_until_complete(
|
results = asyncio.get_event_loop().run_until_complete(
|
||||||
asyncio.gather(*input_coroutines, return_exceptions=True))
|
asyncio.gather(*input_coroutines, return_exceptions=True))
|
||||||
@ -1433,20 +1434,23 @@ class Exchange:
|
|||||||
logger.warning("Async code raised an exception: %s", res.__class__.__name__)
|
logger.warning("Async code raised an exception: %s", res.__class__.__name__)
|
||||||
continue
|
continue
|
||||||
# Deconstruct tuple (has 3 elements)
|
# Deconstruct tuple (has 3 elements)
|
||||||
pair, timeframe, ticks = res
|
pair, timeframe, c_type, ticks = res
|
||||||
# keeping last candle time as last refreshed time of the pair
|
# keeping last candle time as last refreshed time of the pair
|
||||||
if ticks:
|
if ticks:
|
||||||
self._pairs_last_refresh_time[(pair, timeframe)] = ticks[-1][0] // 1000
|
self._pairs_last_refresh_time[(pair, timeframe, c_type)] = ticks[-1][0] // 1000
|
||||||
# keeping parsed dataframe in cache
|
# keeping parsed dataframe in cache
|
||||||
ohlcv_df = ohlcv_to_dataframe(
|
ohlcv_df = ohlcv_to_dataframe(
|
||||||
ticks, timeframe, pair=pair, fill_missing=True,
|
ticks, timeframe, pair=pair, fill_missing=True,
|
||||||
drop_incomplete=self._ohlcv_partial_candle)
|
drop_incomplete=self._ohlcv_partial_candle)
|
||||||
results_df[(pair, timeframe)] = ohlcv_df
|
results_df[(pair, timeframe, c_type)] = ohlcv_df
|
||||||
if cache:
|
if cache:
|
||||||
self._klines[(pair, timeframe)] = ohlcv_df
|
self._klines[(pair, timeframe, c_type)] = ohlcv_df
|
||||||
# Return cached klines
|
# Return cached klines
|
||||||
for pair, timeframe in cached_pairs:
|
for pair, timeframe, c_type in cached_pairs:
|
||||||
results_df[(pair, timeframe)] = self.klines((pair, timeframe), copy=False)
|
results_df[(pair, timeframe, c_type)] = self.klines(
|
||||||
|
(pair, timeframe, c_type),
|
||||||
|
copy=False
|
||||||
|
)
|
||||||
|
|
||||||
return results_df
|
return results_df
|
||||||
|
|
||||||
@ -1459,8 +1463,12 @@ class Exchange:
|
|||||||
# Timeframe in seconds
|
# Timeframe in seconds
|
||||||
interval_in_sec = timeframe_to_seconds(timeframe)
|
interval_in_sec = timeframe_to_seconds(timeframe)
|
||||||
|
|
||||||
return not ((self._pairs_last_refresh_time.get((pair, timeframe), 0)
|
return not (
|
||||||
+ interval_in_sec) >= arrow.utcnow().int_timestamp)
|
(self._pairs_last_refresh_time.get(
|
||||||
|
(pair, timeframe, candle_type),
|
||||||
|
0
|
||||||
|
) + interval_in_sec) >= arrow.utcnow().int_timestamp
|
||||||
|
)
|
||||||
|
|
||||||
@retrier_async
|
@retrier_async
|
||||||
async def _async_get_candle_history(
|
async def _async_get_candle_history(
|
||||||
@ -1501,9 +1509,9 @@ class Exchange:
|
|||||||
data = sorted(data, key=lambda x: x[0])
|
data = sorted(data, key=lambda x: x[0])
|
||||||
except IndexError:
|
except IndexError:
|
||||||
logger.exception("Error loading %s. Result was %s.", pair, data)
|
logger.exception("Error loading %s. Result was %s.", pair, data)
|
||||||
return pair, timeframe, []
|
return pair, timeframe, candle_type, []
|
||||||
logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe)
|
logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe)
|
||||||
return pair, timeframe, data
|
return pair, timeframe, candle_type, data
|
||||||
|
|
||||||
except ccxt.NotSupported as e:
|
except ccxt.NotSupported as e:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
|
@ -72,7 +72,7 @@ class AgeFilter(IPairList):
|
|||||||
:return: new allowlist
|
:return: new allowlist
|
||||||
"""
|
"""
|
||||||
needed_pairs = [
|
needed_pairs = [
|
||||||
(p, '1d') for p in pairlist
|
(p, '1d', '') for p in pairlist
|
||||||
if p not in self._symbolsChecked and p not in self._symbolsCheckFailed]
|
if p not in self._symbolsChecked and p not in self._symbolsCheckFailed]
|
||||||
if not needed_pairs:
|
if not needed_pairs:
|
||||||
# Remove pairs that have been removed before
|
# Remove pairs that have been removed before
|
||||||
@ -88,7 +88,7 @@ class AgeFilter(IPairList):
|
|||||||
candles = self._exchange.refresh_latest_ohlcv(needed_pairs, since_ms=since_ms, cache=False)
|
candles = self._exchange.refresh_latest_ohlcv(needed_pairs, since_ms=since_ms, cache=False)
|
||||||
if self._enabled:
|
if self._enabled:
|
||||||
for p in deepcopy(pairlist):
|
for p in deepcopy(pairlist):
|
||||||
daily_candles = candles[(p, '1d')] if (p, '1d') in candles else None
|
daily_candles = candles[(p, '1d', '')] if (p, '1d', '') in candles else None
|
||||||
if not self._validate_pair_loc(p, daily_candles):
|
if not self._validate_pair_loc(p, daily_candles):
|
||||||
pairlist.remove(p)
|
pairlist.remove(p)
|
||||||
self.log_once(f"Validated {len(pairlist)} pairs.", logger.info)
|
self.log_once(f"Validated {len(pairlist)} pairs.", logger.info)
|
||||||
|
@ -67,7 +67,7 @@ class VolatilityFilter(IPairList):
|
|||||||
:param tickers: Tickers (from exchange.get_tickers()). May be cached.
|
:param tickers: Tickers (from exchange.get_tickers()). May be cached.
|
||||||
:return: new allowlist
|
:return: new allowlist
|
||||||
"""
|
"""
|
||||||
needed_pairs = [(p, '1d') for p in pairlist if p not in self._pair_cache]
|
needed_pairs = [(p, '1d', '') for p in pairlist if p not in self._pair_cache]
|
||||||
|
|
||||||
since_ms = (arrow.utcnow()
|
since_ms = (arrow.utcnow()
|
||||||
.floor('day')
|
.floor('day')
|
||||||
@ -81,7 +81,7 @@ class VolatilityFilter(IPairList):
|
|||||||
|
|
||||||
if self._enabled:
|
if self._enabled:
|
||||||
for p in deepcopy(pairlist):
|
for p in deepcopy(pairlist):
|
||||||
daily_candles = candles[(p, '1d')] if (p, '1d') in candles else None
|
daily_candles = candles[(p, '1d', '')] if (p, '1d', '') in candles else None
|
||||||
if not self._validate_pair_loc(p, daily_candles):
|
if not self._validate_pair_loc(p, daily_candles):
|
||||||
pairlist.remove(p)
|
pairlist.remove(p)
|
||||||
return pairlist
|
return pairlist
|
||||||
|
@ -160,10 +160,9 @@ class VolumePairList(IPairList):
|
|||||||
f"{self._lookback_timeframe}, starting from {format_ms_time(since_ms)} "
|
f"{self._lookback_timeframe}, starting from {format_ms_time(since_ms)} "
|
||||||
f"till {format_ms_time(to_ms)}", logger.info)
|
f"till {format_ms_time(to_ms)}", logger.info)
|
||||||
needed_pairs = [
|
needed_pairs = [
|
||||||
(p, self._lookback_timeframe) for p in
|
(p, self._lookback_timeframe, '') for p in
|
||||||
[
|
[s['symbol'] for s in filtered_tickers]
|
||||||
s['symbol'] for s in filtered_tickers
|
if p not in self._pair_cache
|
||||||
] if p not in self._pair_cache
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Get all candles
|
# Get all candles
|
||||||
@ -174,8 +173,8 @@ class VolumePairList(IPairList):
|
|||||||
)
|
)
|
||||||
for i, p in enumerate(filtered_tickers):
|
for i, p in enumerate(filtered_tickers):
|
||||||
pair_candles = candles[
|
pair_candles = candles[
|
||||||
(p['symbol'], self._lookback_timeframe)
|
(p['symbol'], self._lookback_timeframe, '')
|
||||||
] if (p['symbol'], self._lookback_timeframe) in candles else None
|
] if (p['symbol'], self._lookback_timeframe, '') in candles else None
|
||||||
# in case of candle data calculate typical price and quoteVolume for candle
|
# in case of candle data calculate typical price and quoteVolume for candle
|
||||||
if pair_candles is not None and not pair_candles.empty:
|
if pair_candles is not None and not pair_candles.empty:
|
||||||
pair_candles['typical_price'] = (pair_candles['high'] + pair_candles['low']
|
pair_candles['typical_price'] = (pair_candles['high'] + pair_candles['low']
|
||||||
|
@ -65,7 +65,7 @@ class RangeStabilityFilter(IPairList):
|
|||||||
:param tickers: Tickers (from exchange.get_tickers()). May be cached.
|
:param tickers: Tickers (from exchange.get_tickers()). May be cached.
|
||||||
:return: new allowlist
|
:return: new allowlist
|
||||||
"""
|
"""
|
||||||
needed_pairs = [(p, '1d') for p in pairlist if p not in self._pair_cache]
|
needed_pairs = [(p, '1d', '') for p in pairlist if p not in self._pair_cache]
|
||||||
|
|
||||||
since_ms = (arrow.utcnow()
|
since_ms = (arrow.utcnow()
|
||||||
.floor('day')
|
.floor('day')
|
||||||
@ -79,7 +79,7 @@ class RangeStabilityFilter(IPairList):
|
|||||||
|
|
||||||
if self._enabled:
|
if self._enabled:
|
||||||
for p in deepcopy(pairlist):
|
for p in deepcopy(pairlist):
|
||||||
daily_candles = candles[(p, '1d')] if (p, '1d') in candles else None
|
daily_candles = candles[(p, '1d', '')] if (p, '1d', '') in candles else None
|
||||||
if not self._validate_pair_loc(p, daily_candles):
|
if not self._validate_pair_loc(p, daily_candles):
|
||||||
pairlist.remove(p)
|
pairlist.remove(p)
|
||||||
return pairlist
|
return pairlist
|
||||||
|
@ -138,4 +138,4 @@ class PairListManager():
|
|||||||
"""
|
"""
|
||||||
Create list of pair tuples with (pair, timeframe)
|
Create list of pair tuples with (pair, timeframe)
|
||||||
"""
|
"""
|
||||||
return [(pair, timeframe or self._config['timeframe']) for pair in pairs]
|
return [(pair, timeframe or self._config['timeframe'], '') for pair in pairs]
|
||||||
|
@ -14,6 +14,7 @@ class InformativeData(NamedTuple):
|
|||||||
timeframe: str
|
timeframe: str
|
||||||
fmt: Union[str, Callable[[Any], str], None]
|
fmt: Union[str, Callable[[Any], str], None]
|
||||||
ffill: bool
|
ffill: bool
|
||||||
|
candle_type: str = ''
|
||||||
|
|
||||||
|
|
||||||
def informative(timeframe: str, asset: str = '',
|
def informative(timeframe: str, asset: str = '',
|
||||||
|
@ -424,14 +424,18 @@ class IStrategy(ABC, HyperStrategyMixin):
|
|||||||
informative_pairs = self.informative_pairs()
|
informative_pairs = self.informative_pairs()
|
||||||
for inf_data, _ in self._ft_informative:
|
for inf_data, _ in self._ft_informative:
|
||||||
if inf_data.asset:
|
if inf_data.asset:
|
||||||
pair_tf = (_format_pair_name(self.config, inf_data.asset), inf_data.timeframe)
|
pair_tf = (
|
||||||
|
_format_pair_name(self.config, inf_data.asset),
|
||||||
|
inf_data.timeframe,
|
||||||
|
inf_data.candle_type
|
||||||
|
)
|
||||||
informative_pairs.append(pair_tf)
|
informative_pairs.append(pair_tf)
|
||||||
else:
|
else:
|
||||||
if not self.dp:
|
if not self.dp:
|
||||||
raise OperationalException('@informative decorator with unspecified asset '
|
raise OperationalException('@informative decorator with unspecified asset '
|
||||||
'requires DataProvider instance.')
|
'requires DataProvider instance.')
|
||||||
for pair in self.dp.current_whitelist():
|
for pair in self.dp.current_whitelist():
|
||||||
informative_pairs.append((pair, inf_data.timeframe))
|
informative_pairs.append((pair, inf_data.timeframe, inf_data.candle_type))
|
||||||
return list(set(informative_pairs))
|
return list(set(informative_pairs))
|
||||||
|
|
||||||
def get_strategy_name(self) -> str:
|
def get_strategy_name(self) -> str:
|
||||||
|
@ -1326,7 +1326,7 @@ def test_start_list_data(testdatadir, capsys):
|
|||||||
pargs['config'] = None
|
pargs['config'] = None
|
||||||
start_list_data(pargs)
|
start_list_data(pargs)
|
||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
assert "Found 19 pair / timeframe combinations." in captured.out
|
assert "Found 20 pair / timeframe combinations." in captured.out
|
||||||
assert "\n| Pair | Timeframe | Type |\n" in captured.out
|
assert "\n| Pair | Timeframe | Type |\n" in captured.out
|
||||||
assert "\n| UNITTEST/BTC | 1m, 5m, 8m, 30m | |\n" in captured.out
|
assert "\n| UNITTEST/BTC | 1m, 5m, 8m, 30m | |\n" in captured.out
|
||||||
assert "\n| UNITTEST/USDT | 1h | mark |\n" in captured.out
|
assert "\n| UNITTEST/USDT | 1h | mark |\n" in captured.out
|
||||||
|
@ -75,7 +75,8 @@ def test_ohlcv_fill_up_missing_data(testdatadir, caplog):
|
|||||||
|
|
||||||
def test_ohlcv_fill_up_missing_data2(caplog):
|
def test_ohlcv_fill_up_missing_data2(caplog):
|
||||||
timeframe = '5m'
|
timeframe = '5m'
|
||||||
ticks = [[
|
ticks = [
|
||||||
|
[
|
||||||
1511686200000, # 8:50:00
|
1511686200000, # 8:50:00
|
||||||
8.794e-05, # open
|
8.794e-05, # open
|
||||||
8.948e-05, # high
|
8.948e-05, # high
|
||||||
@ -287,42 +288,45 @@ def test_convert_trades_format(default_conf, testdatadir, tmpdir):
|
|||||||
file['new'].unlink()
|
file['new'].unlink()
|
||||||
|
|
||||||
|
|
||||||
def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir):
|
@pytest.mark.parametrize('file_base', [
|
||||||
|
('XRP_ETH-5m'),
|
||||||
|
('XRP_ETH-1m'),
|
||||||
|
# ('XRP_USDT-1h-mark'), #TODO-lev: Create .gz file
|
||||||
|
])
|
||||||
|
def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base):
|
||||||
tmpdir1 = Path(tmpdir)
|
tmpdir1 = Path(tmpdir)
|
||||||
|
|
||||||
file1_orig = testdatadir / "XRP_ETH-5m.json"
|
file_orig = testdatadir / f"{file_base}.json"
|
||||||
file1 = tmpdir1 / "XRP_ETH-5m.json"
|
file_temp = tmpdir1 / f"{file_base}.json"
|
||||||
file1_new = tmpdir1 / "XRP_ETH-5m.json.gz"
|
file_new = tmpdir1 / f"{file_base}.json.gz"
|
||||||
file2_orig = testdatadir / "XRP_ETH-1m.json"
|
|
||||||
file2 = tmpdir1 / "XRP_ETH-1m.json"
|
|
||||||
file2_new = tmpdir1 / "XRP_ETH-1m.json.gz"
|
|
||||||
|
|
||||||
copyfile(file1_orig, file1)
|
copyfile(file_orig, file_temp)
|
||||||
copyfile(file2_orig, file2)
|
|
||||||
|
|
||||||
default_conf['datadir'] = tmpdir1
|
default_conf['datadir'] = tmpdir1
|
||||||
default_conf['pairs'] = ['XRP_ETH']
|
default_conf['pairs'] = ['XRP_ETH', 'XRP_USDT']
|
||||||
default_conf['timeframes'] = ['1m', '5m']
|
default_conf['timeframes'] = ['1m', '5m', '1h']
|
||||||
|
|
||||||
assert not file1_new.exists()
|
assert not file_new.exists()
|
||||||
assert not file2_new.exists()
|
|
||||||
|
|
||||||
convert_ohlcv_format(default_conf, convert_from='json',
|
convert_ohlcv_format(
|
||||||
convert_to='jsongz', erase=False)
|
default_conf,
|
||||||
|
convert_from='json',
|
||||||
|
convert_to='jsongz',
|
||||||
|
erase=False
|
||||||
|
)
|
||||||
|
|
||||||
assert file1_new.exists()
|
assert file_new.exists()
|
||||||
assert file2_new.exists()
|
assert file_temp.exists()
|
||||||
assert file1.exists()
|
|
||||||
assert file2.exists()
|
|
||||||
|
|
||||||
# Remove original files
|
# Remove original files
|
||||||
file1.unlink()
|
file_temp.unlink()
|
||||||
file2.unlink()
|
|
||||||
# Convert back
|
# Convert back
|
||||||
convert_ohlcv_format(default_conf, convert_from='jsongz',
|
convert_ohlcv_format(
|
||||||
convert_to='json', erase=True)
|
default_conf,
|
||||||
|
convert_from='jsongz',
|
||||||
|
convert_to='json',
|
||||||
|
erase=True
|
||||||
|
)
|
||||||
|
|
||||||
assert file1.exists()
|
assert file_temp.exists()
|
||||||
assert file2.exists()
|
assert not file_new.exists()
|
||||||
assert not file1_new.exists()
|
|
||||||
assert not file2_new.exists()
|
|
||||||
|
@ -11,34 +11,42 @@ from freqtrade.plugins.pairlistmanager import PairListManager
|
|||||||
from tests.conftest import get_patched_exchange
|
from tests.conftest import get_patched_exchange
|
||||||
|
|
||||||
|
|
||||||
def test_ohlcv(mocker, default_conf, ohlcv_history):
|
@pytest.mark.parametrize('candle_type', [
|
||||||
|
'mark',
|
||||||
|
'',
|
||||||
|
])
|
||||||
|
def test_ohlcv(mocker, default_conf, ohlcv_history, candle_type):
|
||||||
default_conf["runmode"] = RunMode.DRY_RUN
|
default_conf["runmode"] = RunMode.DRY_RUN
|
||||||
timeframe = default_conf["timeframe"]
|
timeframe = default_conf["timeframe"]
|
||||||
exchange = get_patched_exchange(mocker, default_conf)
|
exchange = get_patched_exchange(mocker, default_conf)
|
||||||
exchange._klines[("XRP/BTC", timeframe)] = ohlcv_history
|
exchange._klines[("XRP/BTC", timeframe, candle_type)] = ohlcv_history
|
||||||
exchange._klines[("UNITTEST/BTC", timeframe)] = ohlcv_history
|
exchange._klines[("UNITTEST/BTC", timeframe, candle_type)] = ohlcv_history
|
||||||
|
|
||||||
dp = DataProvider(default_conf, exchange)
|
dp = DataProvider(default_conf, exchange)
|
||||||
assert dp.runmode == RunMode.DRY_RUN
|
assert dp.runmode == RunMode.DRY_RUN
|
||||||
assert ohlcv_history.equals(dp.ohlcv("UNITTEST/BTC", timeframe))
|
assert ohlcv_history.equals(dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candle_type))
|
||||||
assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe), DataFrame)
|
assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candle_type), DataFrame)
|
||||||
assert dp.ohlcv("UNITTEST/BTC", timeframe) is not ohlcv_history
|
assert dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candle_type) is not ohlcv_history
|
||||||
assert dp.ohlcv("UNITTEST/BTC", timeframe, copy=False) is ohlcv_history
|
assert dp.ohlcv("UNITTEST/BTC", timeframe, copy=False, candle_type=candle_type) is ohlcv_history
|
||||||
assert not dp.ohlcv("UNITTEST/BTC", timeframe).empty
|
assert not dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candle_type).empty
|
||||||
assert dp.ohlcv("NONESENSE/AAA", timeframe).empty
|
assert dp.ohlcv("NONESENSE/AAA", timeframe, candle_type=candle_type).empty
|
||||||
|
|
||||||
# Test with and without parameter
|
# Test with and without parameter
|
||||||
assert dp.ohlcv("UNITTEST/BTC", timeframe).equals(dp.ohlcv("UNITTEST/BTC"))
|
assert dp.ohlcv(
|
||||||
|
"UNITTEST/BTC",
|
||||||
|
timeframe,
|
||||||
|
candle_type=candle_type
|
||||||
|
).equals(dp.ohlcv("UNITTEST/BTC", candle_type=candle_type))
|
||||||
|
|
||||||
default_conf["runmode"] = RunMode.LIVE
|
default_conf["runmode"] = RunMode.LIVE
|
||||||
dp = DataProvider(default_conf, exchange)
|
dp = DataProvider(default_conf, exchange)
|
||||||
assert dp.runmode == RunMode.LIVE
|
assert dp.runmode == RunMode.LIVE
|
||||||
assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe), DataFrame)
|
assert isinstance(dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candle_type), DataFrame)
|
||||||
|
|
||||||
default_conf["runmode"] = RunMode.BACKTEST
|
default_conf["runmode"] = RunMode.BACKTEST
|
||||||
dp = DataProvider(default_conf, exchange)
|
dp = DataProvider(default_conf, exchange)
|
||||||
assert dp.runmode == RunMode.BACKTEST
|
assert dp.runmode == RunMode.BACKTEST
|
||||||
assert dp.ohlcv("UNITTEST/BTC", timeframe).empty
|
assert dp.ohlcv("UNITTEST/BTC", timeframe, candle_type=candle_type).empty
|
||||||
|
|
||||||
|
|
||||||
def test_historic_ohlcv(mocker, default_conf, ohlcv_history):
|
def test_historic_ohlcv(mocker, default_conf, ohlcv_history):
|
||||||
@ -77,37 +85,46 @@ def test_historic_ohlcv_dataformat(mocker, default_conf, ohlcv_history):
|
|||||||
jsonloadmock.assert_not_called()
|
jsonloadmock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_get_pair_dataframe(mocker, default_conf, ohlcv_history):
|
@pytest.mark.parametrize('candle_type', [
|
||||||
|
'mark',
|
||||||
|
'',
|
||||||
|
])
|
||||||
|
def test_get_pair_dataframe(mocker, default_conf, ohlcv_history, candle_type):
|
||||||
default_conf["runmode"] = RunMode.DRY_RUN
|
default_conf["runmode"] = RunMode.DRY_RUN
|
||||||
timeframe = default_conf["timeframe"]
|
timeframe = default_conf["timeframe"]
|
||||||
exchange = get_patched_exchange(mocker, default_conf)
|
exchange = get_patched_exchange(mocker, default_conf)
|
||||||
exchange._klines[("XRP/BTC", timeframe)] = ohlcv_history
|
exchange._klines[("XRP/BTC", timeframe, candle_type)] = ohlcv_history
|
||||||
exchange._klines[("UNITTEST/BTC", timeframe)] = ohlcv_history
|
exchange._klines[("UNITTEST/BTC", timeframe, candle_type)] = ohlcv_history
|
||||||
|
|
||||||
dp = DataProvider(default_conf, exchange)
|
dp = DataProvider(default_conf, exchange)
|
||||||
assert dp.runmode == RunMode.DRY_RUN
|
assert dp.runmode == RunMode.DRY_RUN
|
||||||
assert ohlcv_history.equals(dp.get_pair_dataframe("UNITTEST/BTC", timeframe))
|
assert ohlcv_history.equals(dp.get_pair_dataframe(
|
||||||
assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", timeframe), DataFrame)
|
"UNITTEST/BTC", timeframe, candle_type=candle_type))
|
||||||
assert dp.get_pair_dataframe("UNITTEST/BTC", timeframe) is not ohlcv_history
|
assert isinstance(dp.get_pair_dataframe(
|
||||||
assert not dp.get_pair_dataframe("UNITTEST/BTC", timeframe).empty
|
"UNITTEST/BTC", timeframe, candle_type=candle_type), DataFrame)
|
||||||
assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe).empty
|
assert dp.get_pair_dataframe("UNITTEST/BTC", timeframe,
|
||||||
|
candle_type=candle_type) is not ohlcv_history
|
||||||
|
assert not dp.get_pair_dataframe("UNITTEST/BTC", timeframe, candle_type=candle_type).empty
|
||||||
|
assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe, candle_type=candle_type).empty
|
||||||
|
|
||||||
# Test with and without parameter
|
# Test with and without parameter
|
||||||
assert dp.get_pair_dataframe("UNITTEST/BTC", timeframe)\
|
assert dp.get_pair_dataframe("UNITTEST/BTC", timeframe, candle_type=candle_type)\
|
||||||
.equals(dp.get_pair_dataframe("UNITTEST/BTC"))
|
.equals(dp.get_pair_dataframe("UNITTEST/BTC", candle_type=candle_type))
|
||||||
|
|
||||||
default_conf["runmode"] = RunMode.LIVE
|
default_conf["runmode"] = RunMode.LIVE
|
||||||
dp = DataProvider(default_conf, exchange)
|
dp = DataProvider(default_conf, exchange)
|
||||||
assert dp.runmode == RunMode.LIVE
|
assert dp.runmode == RunMode.LIVE
|
||||||
assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", timeframe), DataFrame)
|
assert isinstance(dp.get_pair_dataframe(
|
||||||
assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe).empty
|
"UNITTEST/BTC", timeframe, candle_type=candle_type), DataFrame)
|
||||||
|
assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe, candle_type=candle_type).empty
|
||||||
|
|
||||||
historymock = MagicMock(return_value=ohlcv_history)
|
historymock = MagicMock(return_value=ohlcv_history)
|
||||||
mocker.patch("freqtrade.data.dataprovider.load_pair_history", historymock)
|
mocker.patch("freqtrade.data.dataprovider.load_pair_history", historymock)
|
||||||
default_conf["runmode"] = RunMode.BACKTEST
|
default_conf["runmode"] = RunMode.BACKTEST
|
||||||
dp = DataProvider(default_conf, exchange)
|
dp = DataProvider(default_conf, exchange)
|
||||||
assert dp.runmode == RunMode.BACKTEST
|
assert dp.runmode == RunMode.BACKTEST
|
||||||
assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", timeframe), DataFrame)
|
assert isinstance(dp.get_pair_dataframe(
|
||||||
|
"UNITTEST/BTC", timeframe, candle_type=candle_type), DataFrame)
|
||||||
# assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe).empty
|
# assert dp.get_pair_dataframe("NONESENSE/AAA", timeframe).empty
|
||||||
|
|
||||||
|
|
||||||
@ -276,7 +293,7 @@ def test_no_exchange_mode(default_conf):
|
|||||||
dp.refresh([()])
|
dp.refresh([()])
|
||||||
|
|
||||||
with pytest.raises(OperationalException, match=message):
|
with pytest.raises(OperationalException, match=message):
|
||||||
dp.ohlcv('XRP/USDT', '5m')
|
dp.ohlcv('XRP/USDT', '5m', '')
|
||||||
|
|
||||||
with pytest.raises(OperationalException, match=message):
|
with pytest.raises(OperationalException, match=message):
|
||||||
dp.market('XRP/USDT')
|
dp.market('XRP/USDT')
|
||||||
|
@ -95,6 +95,17 @@ def test_load_data_1min_timeframe(ohlcv_history, mocker, caplog, testdatadir) ->
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_data_mark(ohlcv_history, mocker, caplog, testdatadir) -> None:
|
||||||
|
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ohlcv_history)
|
||||||
|
file = testdatadir / 'UNITTEST_USDT-1h-mark.json'
|
||||||
|
load_data(datadir=testdatadir, timeframe='1h', pairs=['UNITTEST/BTC'], candle_type='mark')
|
||||||
|
assert file.is_file()
|
||||||
|
assert not log_has(
|
||||||
|
'Download history data for pair: "UNITTEST/USDT", interval: 1m '
|
||||||
|
'and store in None.', caplog
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) -> None:
|
def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) -> None:
|
||||||
ltfmock = mocker.patch(
|
ltfmock = mocker.patch(
|
||||||
'freqtrade.data.history.jsondatahandler.JsonDataHandler._ohlcv_load',
|
'freqtrade.data.history.jsondatahandler.JsonDataHandler._ohlcv_load',
|
||||||
@ -110,8 +121,9 @@ def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) ->
|
|||||||
assert ltfmock.call_args_list[0][1]['timerange'].startts == timerange.startts - 20 * 60
|
assert ltfmock.call_args_list[0][1]['timerange'].startts == timerange.startts - 20 * 60
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||||
def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
|
def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
|
||||||
default_conf, tmpdir) -> None:
|
default_conf, tmpdir, candle_type) -> None:
|
||||||
"""
|
"""
|
||||||
Test load_pair_history() with 1 min timeframe
|
Test load_pair_history() with 1 min timeframe
|
||||||
"""
|
"""
|
||||||
@ -121,7 +133,7 @@ def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
|
|||||||
file = tmpdir1 / 'MEME_BTC-1m.json'
|
file = tmpdir1 / 'MEME_BTC-1m.json'
|
||||||
|
|
||||||
# do not download a new pair if refresh_pairs isn't set
|
# do not download a new pair if refresh_pairs isn't set
|
||||||
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC')
|
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||||
assert not file.is_file()
|
assert not file.is_file()
|
||||||
assert log_has(
|
assert log_has(
|
||||||
'No history data for pair: "MEME/BTC", timeframe: 1m. '
|
'No history data for pair: "MEME/BTC", timeframe: 1m. '
|
||||||
@ -131,7 +143,7 @@ def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
|
|||||||
# download a new pair if refresh_pairs is set
|
# download a new pair if refresh_pairs is set
|
||||||
refresh_data(datadir=tmpdir1, timeframe='1m', pairs=['MEME/BTC'],
|
refresh_data(datadir=tmpdir1, timeframe='1m', pairs=['MEME/BTC'],
|
||||||
exchange=exchange)
|
exchange=exchange)
|
||||||
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC')
|
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||||
assert file.is_file()
|
assert file.is_file()
|
||||||
assert log_has_re(
|
assert log_has_re(
|
||||||
r'Download history data for pair: "MEME/BTC" \(0/1\), timeframe: 1m '
|
r'Download history data for pair: "MEME/BTC" \(0/1\), timeframe: 1m '
|
||||||
@ -166,7 +178,7 @@ def test_json_pair_data_filename(pair, expected_result, candle_type):
|
|||||||
Path('freqtrade/hello/world'),
|
Path('freqtrade/hello/world'),
|
||||||
pair,
|
pair,
|
||||||
'5m',
|
'5m',
|
||||||
candle_type
|
candle_type=candle_type
|
||||||
)
|
)
|
||||||
assert isinstance(fn, Path)
|
assert isinstance(fn, Path)
|
||||||
assert fn == Path(expected_result + '.gz')
|
assert fn == Path(expected_result + '.gz')
|
||||||
@ -241,24 +253,37 @@ def test_load_cached_data_for_updating(mocker, testdatadir) -> None:
|
|||||||
assert start_ts is None
|
assert start_ts is None
|
||||||
|
|
||||||
|
|
||||||
def test_download_pair_history(ohlcv_history_list, mocker, default_conf, tmpdir) -> None:
|
@pytest.mark.parametrize('candle_type, file_tail', [
|
||||||
|
('mark', '-mark'),
|
||||||
|
('', ''),
|
||||||
|
])
|
||||||
|
def test_download_pair_history(
|
||||||
|
ohlcv_history_list,
|
||||||
|
mocker,
|
||||||
|
default_conf,
|
||||||
|
tmpdir,
|
||||||
|
candle_type,
|
||||||
|
file_tail
|
||||||
|
) -> None:
|
||||||
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ohlcv_history_list)
|
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ohlcv_history_list)
|
||||||
exchange = get_patched_exchange(mocker, default_conf)
|
exchange = get_patched_exchange(mocker, default_conf)
|
||||||
tmpdir1 = Path(tmpdir)
|
tmpdir1 = Path(tmpdir)
|
||||||
file1_1 = tmpdir1 / 'MEME_BTC-1m.json'
|
file1_1 = tmpdir1 / f'MEME_BTC-1m{file_tail}.json'
|
||||||
file1_5 = tmpdir1 / 'MEME_BTC-5m.json'
|
file1_5 = tmpdir1 / f'MEME_BTC-5m{file_tail}.json'
|
||||||
file2_1 = tmpdir1 / 'CFI_BTC-1m.json'
|
file2_1 = tmpdir1 / f'CFI_BTC-1m{file_tail}.json'
|
||||||
file2_5 = tmpdir1 / 'CFI_BTC-5m.json'
|
file2_5 = tmpdir1 / f'CFI_BTC-5m{file_tail}.json'
|
||||||
|
|
||||||
assert not file1_1.is_file()
|
assert not file1_1.is_file()
|
||||||
assert not file2_1.is_file()
|
assert not file2_1.is_file()
|
||||||
|
|
||||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||||
pair='MEME/BTC',
|
pair='MEME/BTC',
|
||||||
timeframe='1m')
|
timeframe='1m',
|
||||||
|
candle_type=candle_type)
|
||||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||||
pair='CFI/BTC',
|
pair='CFI/BTC',
|
||||||
timeframe='1m')
|
timeframe='1m',
|
||||||
|
candle_type=candle_type)
|
||||||
assert not exchange._pairs_last_refresh_time
|
assert not exchange._pairs_last_refresh_time
|
||||||
assert file1_1.is_file()
|
assert file1_1.is_file()
|
||||||
assert file2_1.is_file()
|
assert file2_1.is_file()
|
||||||
@ -272,10 +297,12 @@ def test_download_pair_history(ohlcv_history_list, mocker, default_conf, tmpdir)
|
|||||||
|
|
||||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||||
pair='MEME/BTC',
|
pair='MEME/BTC',
|
||||||
timeframe='5m')
|
timeframe='5m',
|
||||||
|
candle_type=candle_type)
|
||||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||||
pair='CFI/BTC',
|
pair='CFI/BTC',
|
||||||
timeframe='5m')
|
timeframe='5m',
|
||||||
|
candle_type=candle_type)
|
||||||
assert not exchange._pairs_last_refresh_time
|
assert not exchange._pairs_last_refresh_time
|
||||||
assert file1_5.is_file()
|
assert file1_5.is_file()
|
||||||
assert file2_5.is_file()
|
assert file2_5.is_file()
|
||||||
@ -295,7 +322,9 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
|
|||||||
timeframe='1m')
|
timeframe='1m')
|
||||||
_download_pair_history(datadir=testdatadir, exchange=exchange, pair="UNITTEST/BTC",
|
_download_pair_history(datadir=testdatadir, exchange=exchange, pair="UNITTEST/BTC",
|
||||||
timeframe='3m')
|
timeframe='3m')
|
||||||
assert json_dump_mock.call_count == 2
|
_download_pair_history(datadir=testdatadir, exchange=exchange, pair="UNITTEST/USDT",
|
||||||
|
timeframe='1h', candle_type='mark')
|
||||||
|
assert json_dump_mock.call_count == 3
|
||||||
|
|
||||||
|
|
||||||
def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmpdir) -> None:
|
def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmpdir) -> None:
|
||||||
@ -629,7 +658,7 @@ def test_datahandler_ohlcv_get_pairs(testdatadir):
|
|||||||
pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '5m')
|
pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '5m')
|
||||||
assert set(pairs) == {'UNITTEST/BTC'}
|
assert set(pairs) == {'UNITTEST/BTC'}
|
||||||
|
|
||||||
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '1h', 'mark')
|
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type='mark')
|
||||||
assert set(pairs) == {'UNITTEST/USDT', 'XRP/USDT'}
|
assert set(pairs) == {'UNITTEST/USDT', 'XRP/USDT'}
|
||||||
|
|
||||||
# TODO-lev: The tests below
|
# TODO-lev: The tests below
|
||||||
@ -643,17 +672,33 @@ def test_datahandler_ohlcv_get_pairs(testdatadir):
|
|||||||
def test_datahandler_ohlcv_get_available_data(testdatadir):
|
def test_datahandler_ohlcv_get_available_data(testdatadir):
|
||||||
paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir)
|
paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir)
|
||||||
# Convert to set to avoid failures due to sorting
|
# Convert to set to avoid failures due to sorting
|
||||||
assert set(paircombs) == {('UNITTEST/BTC', '5m'), ('ETH/BTC', '5m'), ('XLM/BTC', '5m'),
|
assert set(paircombs) == {
|
||||||
('TRX/BTC', '5m'), ('LTC/BTC', '5m'), ('XMR/BTC', '5m'),
|
('UNITTEST/BTC', '5m', ''),
|
||||||
('ZEC/BTC', '5m'), ('UNITTEST/BTC', '1m'), ('ADA/BTC', '5m'),
|
('ETH/BTC', '5m', ''),
|
||||||
('ETC/BTC', '5m'), ('NXT/BTC', '5m'), ('DASH/BTC', '5m'),
|
('XLM/BTC', '5m', ''),
|
||||||
('XRP/ETH', '1m'), ('XRP/ETH', '5m'), ('UNITTEST/BTC', '30m'),
|
('TRX/BTC', '5m', ''),
|
||||||
('UNITTEST/BTC', '8m'), ('NOPAIR/XXX', '4m')}
|
('LTC/BTC', '5m', ''),
|
||||||
|
('XMR/BTC', '5m', ''),
|
||||||
|
('ZEC/BTC', '5m', ''),
|
||||||
|
('UNITTEST/BTC', '1m', ''),
|
||||||
|
('ADA/BTC', '5m', ''),
|
||||||
|
('ETC/BTC', '5m', ''),
|
||||||
|
('NXT/BTC', '5m', ''),
|
||||||
|
('DASH/BTC', '5m', ''),
|
||||||
|
('XRP/ETH', '1m', ''),
|
||||||
|
('XRP/ETH', '5m', ''),
|
||||||
|
('UNITTEST/BTC', '30m', ''),
|
||||||
|
('UNITTEST/BTC', '8m', ''),
|
||||||
|
('NOPAIR/XXX', '4m', ''),
|
||||||
|
('UNITTEST/USDT', '1h', 'mark'),
|
||||||
|
('XRP/USDT', '1h', ''),
|
||||||
|
('XRP/USDT', '1h', 'mark'),
|
||||||
|
}
|
||||||
|
|
||||||
paircombs = JsonGzDataHandler.ohlcv_get_available_data(testdatadir)
|
paircombs = JsonGzDataHandler.ohlcv_get_available_data(testdatadir)
|
||||||
assert set(paircombs) == {('UNITTEST/BTC', '8m')}
|
assert set(paircombs) == {('UNITTEST/BTC', '8m', '')}
|
||||||
paircombs = HDF5DataHandler.ohlcv_get_available_data(testdatadir)
|
paircombs = HDF5DataHandler.ohlcv_get_available_data(testdatadir)
|
||||||
assert set(paircombs) == {('UNITTEST/BTC', '5m')}
|
assert set(paircombs) == {('UNITTEST/BTC', '5m', '')}
|
||||||
|
|
||||||
|
|
||||||
def test_jsondatahandler_trades_get_pairs(testdatadir):
|
def test_jsondatahandler_trades_get_pairs(testdatadir):
|
||||||
@ -666,27 +711,29 @@ def test_jsondatahandler_ohlcv_purge(mocker, testdatadir):
|
|||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||||
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
||||||
dh = JsonGzDataHandler(testdatadir)
|
dh = JsonGzDataHandler(testdatadir)
|
||||||
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
|
||||||
|
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
|
||||||
assert unlinkmock.call_count == 0
|
assert unlinkmock.call_count == 0
|
||||||
|
|
||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||||
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
|
||||||
assert unlinkmock.call_count == 1
|
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
|
||||||
|
assert unlinkmock.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
def test_jsondatahandler_ohlcv_load(testdatadir, caplog):
|
def test_jsondatahandler_ohlcv_load(testdatadir, caplog):
|
||||||
dh = JsonDataHandler(testdatadir)
|
dh = JsonDataHandler(testdatadir)
|
||||||
df = dh.ohlcv_load('XRP/ETH', '5m')
|
df = dh.ohlcv_load('XRP/ETH', '5m', '')
|
||||||
assert len(df) == 711
|
assert len(df) == 711
|
||||||
|
|
||||||
df_mark = dh.ohlcv_load('XRP/USDT', '1h', candle_type="mark")
|
df_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', candle_type="mark")
|
||||||
assert len(df_mark) == 99
|
assert len(df_mark) == 99
|
||||||
|
|
||||||
df_no_mark = dh.ohlcv_load('XRP/USDT', '1h')
|
df_no_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', '')
|
||||||
assert len(df_no_mark) == 0
|
assert len(df_no_mark) == 0
|
||||||
|
|
||||||
# Failure case (empty array)
|
# Failure case (empty array)
|
||||||
df1 = dh.ohlcv_load('NOPAIR/XXX', '4m')
|
df1 = dh.ohlcv_load('NOPAIR/XXX', '4m', '')
|
||||||
assert len(df1) == 0
|
assert len(df1) == 0
|
||||||
assert log_has("Could not load data for NOPAIR/XXX.", caplog)
|
assert log_has("Could not load data for NOPAIR/XXX.", caplog)
|
||||||
assert df.columns.equals(df1.columns)
|
assert df.columns.equals(df1.columns)
|
||||||
@ -720,6 +767,8 @@ def test_datahandler_ohlcv_append(datahandler, testdatadir, ):
|
|||||||
dh = get_datahandler(testdatadir, datahandler)
|
dh = get_datahandler(testdatadir, datahandler)
|
||||||
with pytest.raises(NotImplementedError):
|
with pytest.raises(NotImplementedError):
|
||||||
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame())
|
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame())
|
||||||
|
with pytest.raises(NotImplementedError):
|
||||||
|
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame(), candle_type='mark')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('datahandler', AVAILABLE_DATAHANDLERS)
|
@pytest.mark.parametrize('datahandler', AVAILABLE_DATAHANDLERS)
|
||||||
@ -849,12 +898,14 @@ def test_hdf5datahandler_ohlcv_purge(mocker, testdatadir):
|
|||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||||
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
||||||
dh = HDF5DataHandler(testdatadir)
|
dh = HDF5DataHandler(testdatadir)
|
||||||
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
|
||||||
|
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
|
||||||
assert unlinkmock.call_count == 0
|
assert unlinkmock.call_count == 0
|
||||||
|
|
||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||||
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
|
||||||
assert unlinkmock.call_count == 1
|
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
|
||||||
|
assert unlinkmock.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
def test_gethandlerclass():
|
def test_gethandlerclass():
|
||||||
|
@ -343,7 +343,8 @@ def test__set_leverage_binance(mocker, default_conf):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test__async_get_historic_ohlcv_binance(default_conf, mocker, caplog):
|
@pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||||
|
async def test__async_get_historic_ohlcv_binance(default_conf, mocker, caplog, candle_type):
|
||||||
ohlcv = [
|
ohlcv = [
|
||||||
[
|
[
|
||||||
int((datetime.now(timezone.utc).timestamp() - 1000) * 1000),
|
int((datetime.now(timezone.utc).timestamp() - 1000) * 1000),
|
||||||
@ -360,16 +361,17 @@ async def test__async_get_historic_ohlcv_binance(default_conf, mocker, caplog):
|
|||||||
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
||||||
|
|
||||||
pair = 'ETH/BTC'
|
pair = 'ETH/BTC'
|
||||||
respair, restf, res = await exchange._async_get_historic_ohlcv(
|
respair, restf, restype, res = await exchange._async_get_historic_ohlcv(
|
||||||
pair, "5m", 1500000000000, is_new_pair=False)
|
pair, "5m", 1500000000000, is_new_pair=False, candle_type=candle_type)
|
||||||
assert respair == pair
|
assert respair == pair
|
||||||
assert restf == '5m'
|
assert restf == '5m'
|
||||||
|
assert restype == candle_type
|
||||||
# Call with very old timestamp - causes tons of requests
|
# Call with very old timestamp - causes tons of requests
|
||||||
assert exchange._api_async.fetch_ohlcv.call_count > 400
|
assert exchange._api_async.fetch_ohlcv.call_count > 400
|
||||||
# assert res == ohlcv
|
# assert res == ohlcv
|
||||||
exchange._api_async.fetch_ohlcv.reset_mock()
|
exchange._api_async.fetch_ohlcv.reset_mock()
|
||||||
_, _, res = await exchange._async_get_historic_ohlcv(
|
_, _, _, res = await exchange._async_get_historic_ohlcv(
|
||||||
pair, "5m", 1500000000000, is_new_pair=True)
|
pair, "5m", 1500000000000, is_new_pair=True, candle_type=candle_type)
|
||||||
|
|
||||||
# Called twice - one "init" call - and one to get the actual data.
|
# Called twice - one "init" call - and one to get the actual data.
|
||||||
assert exchange._api_async.fetch_ohlcv.call_count == 2
|
assert exchange._api_async.fetch_ohlcv.call_count == 2
|
||||||
|
@ -1554,7 +1554,8 @@ def test_fetch_ticker(default_conf, mocker, exchange_name):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name):
|
@pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||||
|
def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_type):
|
||||||
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
||||||
ohlcv = [
|
ohlcv = [
|
||||||
[
|
[
|
||||||
@ -1569,14 +1570,18 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name):
|
|||||||
pair = 'ETH/BTC'
|
pair = 'ETH/BTC'
|
||||||
|
|
||||||
async def mock_candle_hist(pair, timeframe, since_ms, candle_type=None):
|
async def mock_candle_hist(pair, timeframe, since_ms, candle_type=None):
|
||||||
return pair, timeframe, ohlcv
|
return pair, timeframe, candle_type, ohlcv
|
||||||
|
|
||||||
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
||||||
# one_call calculation * 1.8 should do 2 calls
|
# one_call calculation * 1.8 should do 2 calls
|
||||||
|
|
||||||
since = 5 * 60 * exchange.ohlcv_candle_limit('5m') * 1.8
|
since = 5 * 60 * exchange.ohlcv_candle_limit('5m') * 1.8
|
||||||
ret = exchange.get_historic_ohlcv(pair, "5m", int((
|
ret = exchange.get_historic_ohlcv(
|
||||||
arrow.utcnow().int_timestamp - since) * 1000))
|
pair,
|
||||||
|
"5m",
|
||||||
|
int((arrow.utcnow().int_timestamp - since) * 1000),
|
||||||
|
candle_type=candle_type
|
||||||
|
)
|
||||||
|
|
||||||
assert exchange._async_get_candle_history.call_count == 2
|
assert exchange._async_get_candle_history.call_count == 2
|
||||||
# Returns twice the above OHLCV data
|
# Returns twice the above OHLCV data
|
||||||
@ -1589,13 +1594,18 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name):
|
|||||||
raise TimeoutError()
|
raise TimeoutError()
|
||||||
|
|
||||||
exchange._async_get_candle_history = MagicMock(side_effect=mock_get_candle_hist_error)
|
exchange._async_get_candle_history = MagicMock(side_effect=mock_get_candle_hist_error)
|
||||||
ret = exchange.get_historic_ohlcv(pair, "5m", int(
|
ret = exchange.get_historic_ohlcv(
|
||||||
(arrow.utcnow().int_timestamp - since) * 1000))
|
pair,
|
||||||
|
"5m",
|
||||||
|
int((arrow.utcnow().int_timestamp - since) * 1000),
|
||||||
|
candle_type=candle_type
|
||||||
|
)
|
||||||
assert log_has_re(r"Async code raised an exception: .*", caplog)
|
assert log_has_re(r"Async code raised an exception: .*", caplog)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name):
|
@pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||||
|
def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name, candle_type):
|
||||||
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
||||||
ohlcv = [
|
ohlcv = [
|
||||||
[
|
[
|
||||||
@ -1625,15 +1635,19 @@ def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name):
|
|||||||
]
|
]
|
||||||
pair = 'ETH/BTC'
|
pair = 'ETH/BTC'
|
||||||
|
|
||||||
async def mock_candle_hist(pair, timeframe, since_ms, candle_type=None):
|
async def mock_candle_hist(pair, timeframe, since_ms, candle_type):
|
||||||
return pair, timeframe, ohlcv
|
return pair, timeframe, candle_type, ohlcv
|
||||||
|
|
||||||
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
||||||
# one_call calculation * 1.8 should do 2 calls
|
# one_call calculation * 1.8 should do 2 calls
|
||||||
|
|
||||||
since = 5 * 60 * exchange.ohlcv_candle_limit('5m') * 1.8
|
since = 5 * 60 * exchange.ohlcv_candle_limit('5m') * 1.8
|
||||||
ret = exchange.get_historic_ohlcv_as_df(pair, "5m", int((
|
ret = exchange.get_historic_ohlcv_as_df(
|
||||||
arrow.utcnow().int_timestamp - since) * 1000))
|
pair,
|
||||||
|
"5m",
|
||||||
|
int((arrow.utcnow().int_timestamp - since) * 1000),
|
||||||
|
candle_type=candle_type
|
||||||
|
)
|
||||||
|
|
||||||
assert exchange._async_get_candle_history.call_count == 2
|
assert exchange._async_get_candle_history.call_count == 2
|
||||||
# Returns twice the above OHLCV data
|
# Returns twice the above OHLCV data
|
||||||
@ -1647,6 +1661,7 @@ def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name):
|
|||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
|
# TODO-lev @pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||||
async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name):
|
async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name):
|
||||||
ohlcv = [
|
ohlcv = [
|
||||||
[
|
[
|
||||||
@ -1663,7 +1678,7 @@ async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_
|
|||||||
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
||||||
|
|
||||||
pair = 'ETH/USDT'
|
pair = 'ETH/USDT'
|
||||||
respair, restf, res = await exchange._async_get_historic_ohlcv(
|
respair, restf, _, res = await exchange._async_get_historic_ohlcv(
|
||||||
pair, "5m", 1500000000000, is_new_pair=False)
|
pair, "5m", 1500000000000, is_new_pair=False)
|
||||||
assert respair == pair
|
assert respair == pair
|
||||||
assert restf == '5m'
|
assert restf == '5m'
|
||||||
@ -1672,6 +1687,7 @@ async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_
|
|||||||
assert res[0] == ohlcv[0]
|
assert res[0] == ohlcv[0]
|
||||||
|
|
||||||
|
|
||||||
|
# TODO-lev: @pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||||
def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None:
|
def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None:
|
||||||
ohlcv = [
|
ohlcv = [
|
||||||
[
|
[
|
||||||
@ -1696,7 +1712,7 @@ def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None:
|
|||||||
exchange = get_patched_exchange(mocker, default_conf)
|
exchange = get_patched_exchange(mocker, default_conf)
|
||||||
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
||||||
|
|
||||||
pairs = [('IOTA/ETH', '5m'), ('XRP/ETH', '5m')]
|
pairs = [('IOTA/ETH', '5m', ''), ('XRP/ETH', '5m', '')]
|
||||||
# empty dicts
|
# empty dicts
|
||||||
assert not exchange._klines
|
assert not exchange._klines
|
||||||
res = exchange.refresh_latest_ohlcv(pairs, cache=False)
|
res = exchange.refresh_latest_ohlcv(pairs, cache=False)
|
||||||
@ -1727,16 +1743,18 @@ def test_refresh_latest_ohlcv(mocker, default_conf, caplog) -> None:
|
|||||||
assert exchange.klines(pair, copy=False) is exchange.klines(pair, copy=False)
|
assert exchange.klines(pair, copy=False) is exchange.klines(pair, copy=False)
|
||||||
|
|
||||||
# test caching
|
# test caching
|
||||||
res = exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m'), ('XRP/ETH', '5m')])
|
res = exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m', ''), ('XRP/ETH', '5m', '')])
|
||||||
assert len(res) == len(pairs)
|
assert len(res) == len(pairs)
|
||||||
|
|
||||||
assert exchange._api_async.fetch_ohlcv.call_count == 0
|
assert exchange._api_async.fetch_ohlcv.call_count == 0
|
||||||
assert log_has(f"Using cached candle (OHLCV) data for pair {pairs[0][0]}, "
|
assert log_has(f"Using cached candle (OHLCV) data for pair {pairs[0][0]}, "
|
||||||
f"timeframe {pairs[0][1]} ...",
|
f"timeframe {pairs[0][1]} ...",
|
||||||
caplog)
|
caplog)
|
||||||
res = exchange.refresh_latest_ohlcv([('IOTA/ETH', '5m'), ('XRP/ETH', '5m'), ('XRP/ETH', '1d')],
|
res = exchange.refresh_latest_ohlcv(
|
||||||
cache=False)
|
[('IOTA/ETH', '5m', ''), ('XRP/ETH', '5m', ''), ('XRP/ETH', '1d', '')],
|
||||||
assert len(res) == 3
|
cache=False
|
||||||
|
)
|
||||||
|
assert len(res) == 4
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@ -1761,10 +1779,11 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
|
|||||||
pair = 'ETH/BTC'
|
pair = 'ETH/BTC'
|
||||||
res = await exchange._async_get_candle_history(pair, "5m")
|
res = await exchange._async_get_candle_history(pair, "5m")
|
||||||
assert type(res) is tuple
|
assert type(res) is tuple
|
||||||
assert len(res) == 3
|
assert len(res) == 4
|
||||||
assert res[0] == pair
|
assert res[0] == pair
|
||||||
assert res[1] == "5m"
|
assert res[1] == "5m"
|
||||||
assert res[2] == ohlcv
|
assert res[2] == ''
|
||||||
|
assert res[3] == ohlcv
|
||||||
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
||||||
assert not log_has(f"Using cached candle (OHLCV) data for {pair} ...", caplog)
|
assert not log_has(f"Using cached candle (OHLCV) data for {pair} ...", caplog)
|
||||||
|
|
||||||
@ -1803,10 +1822,11 @@ async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
|
|||||||
pair = 'ETH/BTC'
|
pair = 'ETH/BTC'
|
||||||
res = await exchange._async_get_candle_history(pair, "5m")
|
res = await exchange._async_get_candle_history(pair, "5m")
|
||||||
assert type(res) is tuple
|
assert type(res) is tuple
|
||||||
assert len(res) == 3
|
assert len(res) == 4
|
||||||
assert res[0] == pair
|
assert res[0] == pair
|
||||||
assert res[1] == "5m"
|
assert res[1] == "5m"
|
||||||
assert res[2] == ohlcv
|
assert res[2] == ''
|
||||||
|
assert res[3] == ohlcv
|
||||||
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
@ -1823,7 +1843,7 @@ def test_refresh_latest_ohlcv_inv_result(default_conf, mocker, caplog):
|
|||||||
# Monkey-patch async function with empty result
|
# Monkey-patch async function with empty result
|
||||||
exchange._api_async.fetch_ohlcv = MagicMock(side_effect=mock_get_candle_hist)
|
exchange._api_async.fetch_ohlcv = MagicMock(side_effect=mock_get_candle_hist)
|
||||||
|
|
||||||
pairs = [("ETH/BTC", "5m"), ("XRP/BTC", "5m")]
|
pairs = [("ETH/BTC", "5m", ''), ("XRP/BTC", "5m", '')]
|
||||||
res = exchange.refresh_latest_ohlcv(pairs)
|
res = exchange.refresh_latest_ohlcv(pairs)
|
||||||
assert exchange._klines
|
assert exchange._klines
|
||||||
assert exchange._api_async.fetch_ohlcv.call_count == 2
|
assert exchange._api_async.fetch_ohlcv.call_count == 2
|
||||||
@ -2107,7 +2127,7 @@ async def test___async_get_candle_history_sort(default_conf, mocker, exchange_na
|
|||||||
# Test the OHLCV data sort
|
# Test the OHLCV data sort
|
||||||
res = await exchange._async_get_candle_history('ETH/BTC', default_conf['timeframe'])
|
res = await exchange._async_get_candle_history('ETH/BTC', default_conf['timeframe'])
|
||||||
assert res[0] == 'ETH/BTC'
|
assert res[0] == 'ETH/BTC'
|
||||||
res_ohlcv = res[2]
|
res_ohlcv = res[3]
|
||||||
|
|
||||||
assert sort_mock.call_count == 1
|
assert sort_mock.call_count == 1
|
||||||
assert res_ohlcv[0][0] == 1527830400000
|
assert res_ohlcv[0][0] == 1527830400000
|
||||||
@ -2145,7 +2165,7 @@ async def test___async_get_candle_history_sort(default_conf, mocker, exchange_na
|
|||||||
res = await exchange._async_get_candle_history('ETH/BTC', default_conf['timeframe'])
|
res = await exchange._async_get_candle_history('ETH/BTC', default_conf['timeframe'])
|
||||||
assert res[0] == 'ETH/BTC'
|
assert res[0] == 'ETH/BTC'
|
||||||
assert res[1] == default_conf['timeframe']
|
assert res[1] == default_conf['timeframe']
|
||||||
res_ohlcv = res[2]
|
res_ohlcv = res[3]
|
||||||
# Sorted not called again - data is already in order
|
# Sorted not called again - data is already in order
|
||||||
assert sort_mock.call_count == 0
|
assert sort_mock.call_count == 0
|
||||||
assert res_ohlcv[0][0] == 1527827700000
|
assert res_ohlcv[0][0] == 1527827700000
|
||||||
@ -2999,7 +3019,7 @@ def test_timeframe_to_next_date():
|
|||||||
def test_market_is_tradable(
|
def test_market_is_tradable(
|
||||||
mocker, default_conf, market_symbol, base,
|
mocker, default_conf, market_symbol, base,
|
||||||
quote, spot, margin, futures, trademode, add_dict, exchange, expected_result
|
quote, spot, margin, futures, trademode, add_dict, exchange, expected_result
|
||||||
) -> None:
|
) -> None:
|
||||||
default_conf['trading_mode'] = trademode
|
default_conf['trading_mode'] = trademode
|
||||||
mocker.patch('freqtrade.exchange.exchange.Exchange.validate_trading_mode_and_collateral')
|
mocker.patch('freqtrade.exchange.exchange.Exchange.validate_trading_mode_and_collateral')
|
||||||
ex = get_patched_exchange(mocker, default_conf, id=exchange)
|
ex = get_patched_exchange(mocker, default_conf, id=exchange)
|
||||||
|
@ -855,7 +855,7 @@ def test_backtest_alternate_buy_sell(default_conf, fee, mocker, testdatadir):
|
|||||||
results = result['results']
|
results = result['results']
|
||||||
assert len(results) == 100
|
assert len(results) == 100
|
||||||
# Cached data should be 200
|
# Cached data should be 200
|
||||||
analyzed_df = backtesting.dataprovider.get_analyzed_dataframe('UNITTEST/BTC', '1m')[0]
|
analyzed_df = backtesting.dataprovider.get_analyzed_dataframe('UNITTEST/BTC', '1m', '')[0]
|
||||||
assert len(analyzed_df) == 200
|
assert len(analyzed_df) == 200
|
||||||
# Expect last candle to be 1 below end date (as the last candle is assumed as "incomplete"
|
# Expect last candle to be 1 below end date (as the last candle is assumed as "incomplete"
|
||||||
# during backtesting)
|
# during backtesting)
|
||||||
@ -924,8 +924,9 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir)
|
|||||||
offset = 1 if tres == 0 else 0
|
offset = 1 if tres == 0 else 0
|
||||||
removed_candles = len(data[pair]) - offset - backtesting.strategy.startup_candle_count
|
removed_candles = len(data[pair]) - offset - backtesting.strategy.startup_candle_count
|
||||||
assert len(backtesting.dataprovider.get_analyzed_dataframe(pair, '5m')[0]) == removed_candles
|
assert len(backtesting.dataprovider.get_analyzed_dataframe(pair, '5m')[0]) == removed_candles
|
||||||
assert len(backtesting.dataprovider.get_analyzed_dataframe(
|
assert len(
|
||||||
'NXT/BTC', '5m')[0]) == len(data['NXT/BTC']) - 1 - backtesting.strategy.startup_candle_count
|
backtesting.dataprovider.get_analyzed_dataframe('NXT/BTC', '5m', '')[0]
|
||||||
|
) == len(data['NXT/BTC']) - 1 - backtesting.strategy.startup_candle_count
|
||||||
|
|
||||||
backtest_conf = {
|
backtest_conf = {
|
||||||
'processed': processed,
|
'processed': processed,
|
||||||
|
@ -460,11 +460,11 @@ def test_VolumePairList_whitelist_gen(mocker, whitelist_conf, shitcoinmarkets, t
|
|||||||
ohlcv_history_high_vola.loc[ohlcv_history_high_vola.index == 1, 'close'] = 0.00090
|
ohlcv_history_high_vola.loc[ohlcv_history_high_vola.index == 1, 'close'] = 0.00090
|
||||||
|
|
||||||
ohlcv_data = {
|
ohlcv_data = {
|
||||||
('ETH/BTC', '1d'): ohlcv_history,
|
('ETH/BTC', '1d', ''): ohlcv_history,
|
||||||
('TKN/BTC', '1d'): ohlcv_history,
|
('TKN/BTC', '1d', ''): ohlcv_history,
|
||||||
('LTC/BTC', '1d'): ohlcv_history.append(ohlcv_history),
|
('LTC/BTC', '1d', ''): ohlcv_history.append(ohlcv_history),
|
||||||
('XRP/BTC', '1d'): ohlcv_history,
|
('XRP/BTC', '1d', ''): ohlcv_history,
|
||||||
('HOT/BTC', '1d'): ohlcv_history_high_vola,
|
('HOT/BTC', '1d', ''): ohlcv_history_high_vola,
|
||||||
}
|
}
|
||||||
|
|
||||||
mocker.patch('freqtrade.exchange.Exchange.exchange_has', MagicMock(return_value=True))
|
mocker.patch('freqtrade.exchange.Exchange.exchange_has', MagicMock(return_value=True))
|
||||||
@ -578,11 +578,11 @@ def test_VolumePairList_range(mocker, whitelist_conf, shitcoinmarkets, tickers,
|
|||||||
ohlcv_history_high_volume.loc[:, 'volume'] = 10
|
ohlcv_history_high_volume.loc[:, 'volume'] = 10
|
||||||
|
|
||||||
ohlcv_data = {
|
ohlcv_data = {
|
||||||
('ETH/BTC', '1d'): ohlcv_history,
|
('ETH/BTC', '1d', ''): ohlcv_history,
|
||||||
('TKN/BTC', '1d'): ohlcv_history,
|
('TKN/BTC', '1d', ''): ohlcv_history,
|
||||||
('LTC/BTC', '1d'): ohlcv_history_medium_volume,
|
('LTC/BTC', '1d', ''): ohlcv_history_medium_volume,
|
||||||
('XRP/BTC', '1d'): ohlcv_history_high_vola,
|
('XRP/BTC', '1d', ''): ohlcv_history_high_vola,
|
||||||
('HOT/BTC', '1d'): ohlcv_history_high_volume,
|
('HOT/BTC', '1d', ''): ohlcv_history_high_volume,
|
||||||
}
|
}
|
||||||
|
|
||||||
mocker.patch('freqtrade.exchange.Exchange.exchange_has', MagicMock(return_value=True))
|
mocker.patch('freqtrade.exchange.Exchange.exchange_has', MagicMock(return_value=True))
|
||||||
@ -838,9 +838,9 @@ def test_agefilter_min_days_listed_too_large(mocker, default_conf, markets, tick
|
|||||||
def test_agefilter_caching(mocker, markets, whitelist_conf_agefilter, tickers, ohlcv_history):
|
def test_agefilter_caching(mocker, markets, whitelist_conf_agefilter, tickers, ohlcv_history):
|
||||||
with time_machine.travel("2021-09-01 05:00:00 +00:00") as t:
|
with time_machine.travel("2021-09-01 05:00:00 +00:00") as t:
|
||||||
ohlcv_data = {
|
ohlcv_data = {
|
||||||
('ETH/BTC', '1d'): ohlcv_history,
|
('ETH/BTC', '1d', ''): ohlcv_history,
|
||||||
('TKN/BTC', '1d'): ohlcv_history,
|
('TKN/BTC', '1d', ''): ohlcv_history,
|
||||||
('LTC/BTC', '1d'): ohlcv_history,
|
('LTC/BTC', '1d', ''): ohlcv_history,
|
||||||
}
|
}
|
||||||
mocker.patch.multiple(
|
mocker.patch.multiple(
|
||||||
'freqtrade.exchange.Exchange',
|
'freqtrade.exchange.Exchange',
|
||||||
@ -862,10 +862,10 @@ def test_agefilter_caching(mocker, markets, whitelist_conf_agefilter, tickers, o
|
|||||||
assert freqtrade.exchange.refresh_latest_ohlcv.call_count == 2
|
assert freqtrade.exchange.refresh_latest_ohlcv.call_count == 2
|
||||||
|
|
||||||
ohlcv_data = {
|
ohlcv_data = {
|
||||||
('ETH/BTC', '1d'): ohlcv_history,
|
('ETH/BTC', '1d', ''): ohlcv_history,
|
||||||
('TKN/BTC', '1d'): ohlcv_history,
|
('TKN/BTC', '1d', ''): ohlcv_history,
|
||||||
('LTC/BTC', '1d'): ohlcv_history,
|
('LTC/BTC', '1d', ''): ohlcv_history,
|
||||||
('XRP/BTC', '1d'): ohlcv_history.iloc[[0]],
|
('XRP/BTC', '1d', ''): ohlcv_history.iloc[[0]],
|
||||||
}
|
}
|
||||||
mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', return_value=ohlcv_data)
|
mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', return_value=ohlcv_data)
|
||||||
freqtrade.pairlists.refresh_pairlist()
|
freqtrade.pairlists.refresh_pairlist()
|
||||||
@ -883,10 +883,10 @@ def test_agefilter_caching(mocker, markets, whitelist_conf_agefilter, tickers, o
|
|||||||
t.move_to("2021-09-03 01:00:00 +00:00")
|
t.move_to("2021-09-03 01:00:00 +00:00")
|
||||||
# Called once for XRP/BTC
|
# Called once for XRP/BTC
|
||||||
ohlcv_data = {
|
ohlcv_data = {
|
||||||
('ETH/BTC', '1d'): ohlcv_history,
|
('ETH/BTC', '1d', ''): ohlcv_history,
|
||||||
('TKN/BTC', '1d'): ohlcv_history,
|
('TKN/BTC', '1d', ''): ohlcv_history,
|
||||||
('LTC/BTC', '1d'): ohlcv_history,
|
('LTC/BTC', '1d', ''): ohlcv_history,
|
||||||
('XRP/BTC', '1d'): ohlcv_history,
|
('XRP/BTC', '1d', ''): ohlcv_history,
|
||||||
}
|
}
|
||||||
mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', return_value=ohlcv_data)
|
mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', return_value=ohlcv_data)
|
||||||
freqtrade.pairlists.refresh_pairlist()
|
freqtrade.pairlists.refresh_pairlist()
|
||||||
@ -947,12 +947,12 @@ def test_rangestabilityfilter_caching(mocker, markets, default_conf, tickers, oh
|
|||||||
get_tickers=tickers
|
get_tickers=tickers
|
||||||
)
|
)
|
||||||
ohlcv_data = {
|
ohlcv_data = {
|
||||||
('ETH/BTC', '1d'): ohlcv_history,
|
('ETH/BTC', '1d', ''): ohlcv_history,
|
||||||
('TKN/BTC', '1d'): ohlcv_history,
|
('TKN/BTC', '1d', ''): ohlcv_history,
|
||||||
('LTC/BTC', '1d'): ohlcv_history,
|
('LTC/BTC', '1d', ''): ohlcv_history,
|
||||||
('XRP/BTC', '1d'): ohlcv_history,
|
('XRP/BTC', '1d', ''): ohlcv_history,
|
||||||
('HOT/BTC', '1d'): ohlcv_history,
|
('HOT/BTC', '1d', ''): ohlcv_history,
|
||||||
('BLK/BTC', '1d'): ohlcv_history,
|
('BLK/BTC', '1d', ''): ohlcv_history,
|
||||||
}
|
}
|
||||||
mocker.patch.multiple(
|
mocker.patch.multiple(
|
||||||
'freqtrade.exchange.Exchange',
|
'freqtrade.exchange.Exchange',
|
||||||
|
@ -1331,7 +1331,7 @@ def test_list_available_pairs(botclient):
|
|||||||
rc = client_get(client, f"{BASE_URI}/available_pairs")
|
rc = client_get(client, f"{BASE_URI}/available_pairs")
|
||||||
|
|
||||||
assert_response(rc)
|
assert_response(rc)
|
||||||
assert rc.json()['length'] == 13
|
assert rc.json()['length'] == 15
|
||||||
assert isinstance(rc.json()['pairs'], list)
|
assert isinstance(rc.json()['pairs'], list)
|
||||||
|
|
||||||
rc = client_get(client, f"{BASE_URI}/available_pairs?timeframe=5m")
|
rc = client_get(client, f"{BASE_URI}/available_pairs?timeframe=5m")
|
||||||
@ -1350,6 +1350,13 @@ def test_list_available_pairs(botclient):
|
|||||||
assert rc.json()['pairs'] == ['XRP/ETH']
|
assert rc.json()['pairs'] == ['XRP/ETH']
|
||||||
assert len(rc.json()['pair_interval']) == 1
|
assert len(rc.json()['pair_interval']) == 1
|
||||||
|
|
||||||
|
rc = client_get(
|
||||||
|
client, f"{BASE_URI}/available_pairs?stake_currency=USDT&timeframe=1h&type=mark")
|
||||||
|
assert_response(rc)
|
||||||
|
assert rc.json()['length'] == 2
|
||||||
|
assert rc.json()['pairs'] == ['UNITTEST/USDT', 'XRP/USDT']
|
||||||
|
assert len(rc.json()['pair_interval']) == 3 # TODO-lev: What is pair_interval? Should it be 3?
|
||||||
|
|
||||||
|
|
||||||
def test_sysinfo(botclient):
|
def test_sysinfo(botclient):
|
||||||
ftbot, client = botclient
|
ftbot, client = botclient
|
||||||
|
@ -19,7 +19,7 @@ class InformativeDecoratorTest(IStrategy):
|
|||||||
startup_candle_count: int = 20
|
startup_candle_count: int = 20
|
||||||
|
|
||||||
def informative_pairs(self):
|
def informative_pairs(self):
|
||||||
return [('BTC/USDT', '5m')]
|
return [('BTC/USDT', '5m', '')]
|
||||||
|
|
||||||
def populate_buy_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
def populate_buy_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||||
dataframe['buy'] = 0
|
dataframe['buy'] = 0
|
||||||
@ -67,7 +67,7 @@ class InformativeDecoratorTest(IStrategy):
|
|||||||
dataframe['rsi_less'] = dataframe['rsi'] < dataframe['rsi_1h']
|
dataframe['rsi_less'] = dataframe['rsi'] < dataframe['rsi_1h']
|
||||||
|
|
||||||
# Mixing manual informative pairs with decorators.
|
# Mixing manual informative pairs with decorators.
|
||||||
informative = self.dp.get_pair_dataframe('BTC/USDT', '5m')
|
informative = self.dp.get_pair_dataframe('BTC/USDT', '5m', '')
|
||||||
informative['rsi'] = 14
|
informative['rsi'] = 14
|
||||||
dataframe = merge_informative_pair(dataframe, informative, self.timeframe, '5m', ffill=True)
|
dataframe = merge_informative_pair(dataframe, informative, self.timeframe, '5m', ffill=True)
|
||||||
|
|
||||||
|
@ -144,23 +144,24 @@ def test_stoploss_from_absolute():
|
|||||||
assert stoploss_from_absolute(0, 100) == 1
|
assert stoploss_from_absolute(0, 100) == 1
|
||||||
|
|
||||||
|
|
||||||
|
# TODO-lev: @pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||||
def test_informative_decorator(mocker, default_conf):
|
def test_informative_decorator(mocker, default_conf):
|
||||||
test_data_5m = generate_test_data('5m', 40)
|
test_data_5m = generate_test_data('5m', 40)
|
||||||
test_data_30m = generate_test_data('30m', 40)
|
test_data_30m = generate_test_data('30m', 40)
|
||||||
test_data_1h = generate_test_data('1h', 40)
|
test_data_1h = generate_test_data('1h', 40)
|
||||||
data = {
|
data = {
|
||||||
('XRP/USDT', '5m'): test_data_5m,
|
('XRP/USDT', '5m', ''): test_data_5m,
|
||||||
('XRP/USDT', '30m'): test_data_30m,
|
('XRP/USDT', '30m', ''): test_data_30m,
|
||||||
('XRP/USDT', '1h'): test_data_1h,
|
('XRP/USDT', '1h', ''): test_data_1h,
|
||||||
('LTC/USDT', '5m'): test_data_5m,
|
('LTC/USDT', '5m', ''): test_data_5m,
|
||||||
('LTC/USDT', '30m'): test_data_30m,
|
('LTC/USDT', '30m', ''): test_data_30m,
|
||||||
('LTC/USDT', '1h'): test_data_1h,
|
('LTC/USDT', '1h', ''): test_data_1h,
|
||||||
('BTC/USDT', '30m'): test_data_30m,
|
('BTC/USDT', '30m', ''): test_data_30m,
|
||||||
('BTC/USDT', '5m'): test_data_5m,
|
('BTC/USDT', '5m', ''): test_data_5m,
|
||||||
('BTC/USDT', '1h'): test_data_1h,
|
('BTC/USDT', '1h', ''): test_data_1h,
|
||||||
('ETH/USDT', '1h'): test_data_1h,
|
('ETH/USDT', '1h', ''): test_data_1h,
|
||||||
('ETH/USDT', '30m'): test_data_30m,
|
('ETH/USDT', '30m', ''): test_data_30m,
|
||||||
('ETH/BTC', '1h'): test_data_1h,
|
('ETH/BTC', '1h', ''): test_data_1h,
|
||||||
}
|
}
|
||||||
from .strats.informative_decorator_strategy import InformativeDecoratorTest
|
from .strats.informative_decorator_strategy import InformativeDecoratorTest
|
||||||
default_conf['stake_currency'] = 'USDT'
|
default_conf['stake_currency'] = 'USDT'
|
||||||
@ -171,19 +172,19 @@ def test_informative_decorator(mocker, default_conf):
|
|||||||
])
|
])
|
||||||
|
|
||||||
assert len(strategy._ft_informative) == 6 # Equal to number of decorators used
|
assert len(strategy._ft_informative) == 6 # Equal to number of decorators used
|
||||||
informative_pairs = [('XRP/USDT', '1h'), ('LTC/USDT', '1h'), ('XRP/USDT', '30m'),
|
informative_pairs = [('XRP/USDT', '1h', ''), ('LTC/USDT', '1h', ''), ('XRP/USDT', '30m', ''),
|
||||||
('LTC/USDT', '30m'), ('BTC/USDT', '1h'), ('BTC/USDT', '30m'),
|
('LTC/USDT', '30m', ''), ('BTC/USDT', '1h', ''), ('BTC/USDT', '30m', ''),
|
||||||
('BTC/USDT', '5m'), ('ETH/BTC', '1h'), ('ETH/USDT', '30m')]
|
('BTC/USDT', '5m', ''), ('ETH/BTC', '1h', ''), ('ETH/USDT', '30m', '')]
|
||||||
for inf_pair in informative_pairs:
|
for inf_pair in informative_pairs:
|
||||||
assert inf_pair in strategy.gather_informative_pairs()
|
assert inf_pair in strategy.gather_informative_pairs()
|
||||||
|
|
||||||
def test_historic_ohlcv(pair, timeframe):
|
def test_historic_ohlcv(pair, timeframe, candle_type):
|
||||||
return data[(pair, timeframe or strategy.timeframe)].copy()
|
return data[(pair, timeframe or strategy.timeframe, candle_type)].copy()
|
||||||
mocker.patch('freqtrade.data.dataprovider.DataProvider.historic_ohlcv',
|
mocker.patch('freqtrade.data.dataprovider.DataProvider.historic_ohlcv',
|
||||||
side_effect=test_historic_ohlcv)
|
side_effect=test_historic_ohlcv)
|
||||||
|
|
||||||
analyzed = strategy.advise_all_indicators(
|
analyzed = strategy.advise_all_indicators(
|
||||||
{p: data[(p, strategy.timeframe)] for p in ('XRP/USDT', 'LTC/USDT')})
|
{p: data[(p, strategy.timeframe, '')] for p in ('XRP/USDT', 'LTC/USDT')})
|
||||||
expected_columns = [
|
expected_columns = [
|
||||||
'rsi_1h', 'rsi_30m', # Stacked informative decorators
|
'rsi_1h', 'rsi_30m', # Stacked informative decorators
|
||||||
'btc_usdt_rsi_1h', # BTC 1h informative
|
'btc_usdt_rsi_1h', # BTC 1h informative
|
||||||
|
@ -681,7 +681,7 @@ def test_process_informative_pairs_added(default_conf_usdt, ticker_usdt, mocker)
|
|||||||
create_order=MagicMock(side_effect=TemporaryError),
|
create_order=MagicMock(side_effect=TemporaryError),
|
||||||
refresh_latest_ohlcv=refresh_mock,
|
refresh_latest_ohlcv=refresh_mock,
|
||||||
)
|
)
|
||||||
inf_pairs = MagicMock(return_value=[("BTC/ETH", '1m'), ("ETH/USDT", "1h")])
|
inf_pairs = MagicMock(return_value=[("BTC/ETH", '1m', ''), ("ETH/USDT", "1h", '')])
|
||||||
mocker.patch.multiple(
|
mocker.patch.multiple(
|
||||||
'freqtrade.strategy.interface.IStrategy',
|
'freqtrade.strategy.interface.IStrategy',
|
||||||
get_exit_signal=MagicMock(return_value=(False, False)),
|
get_exit_signal=MagicMock(return_value=(False, False)),
|
||||||
@ -696,9 +696,9 @@ def test_process_informative_pairs_added(default_conf_usdt, ticker_usdt, mocker)
|
|||||||
freqtrade.process()
|
freqtrade.process()
|
||||||
assert inf_pairs.call_count == 1
|
assert inf_pairs.call_count == 1
|
||||||
assert refresh_mock.call_count == 1
|
assert refresh_mock.call_count == 1
|
||||||
assert ("BTC/ETH", "1m") in refresh_mock.call_args[0][0]
|
assert ("BTC/ETH", "1m", '') in refresh_mock.call_args[0][0]
|
||||||
assert ("ETH/USDT", "1h") in refresh_mock.call_args[0][0]
|
assert ("ETH/USDT", "1h", '') in refresh_mock.call_args[0][0]
|
||||||
assert ("ETH/USDT", default_conf_usdt["timeframe"]) in refresh_mock.call_args[0][0]
|
assert ("ETH/USDT", default_conf_usdt["timeframe"], '') in refresh_mock.call_args[0][0]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("trading_mode", [
|
@pytest.mark.parametrize("trading_mode", [
|
||||||
|
102
tests/testdata/XRP_USDT-1h.json
vendored
Normal file
102
tests/testdata/XRP_USDT-1h.json
vendored
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
[
|
||||||
|
[ 1637110800000, 1.0801, 1.09758, 1.07654, 1.07925, 3153694.607359 ],
|
||||||
|
[ 1637114400000, 1.07896, 1.0875, 1.07351, 1.07616, 2697616.070908 ],
|
||||||
|
[ 1637118000000, 1.07607, 1.08521, 1.05896, 1.06804, 4014666.826073 ],
|
||||||
|
[ 1637121600000, 1.06848, 1.07846, 1.06067, 1.07629, 3764015.567745 ],
|
||||||
|
[ 1637125200000, 1.07647, 1.08791, 1.07309, 1.0839, 1669038.113726 ],
|
||||||
|
[ 1637128800000, 1.08414, 1.0856, 1.07431, 1.0794, 1921068.874499 ],
|
||||||
|
[ 1637132400000, 1.0798, 1.09499, 1.07363, 1.08721, 2491096.863582 ],
|
||||||
|
[ 1637136000000, 1.08688, 1.09133, 1.08004, 1.08011, 1983486.794272 ],
|
||||||
|
[ 1637139600000, 1.08017, 1.08027, 1.06667, 1.07039, 3429247.985309 ],
|
||||||
|
[ 1637143200000, 1.07054, 1.10699, 1.07038, 1.10284, 4554151.954177 ],
|
||||||
|
[ 1637146800000, 1.10315, 1.10989, 1.09781, 1.1071, 2012983.10465 ],
|
||||||
|
[ 1637150400000, 1.10627, 1.10849, 1.10155, 1.10539, 1117804.08918 ],
|
||||||
|
[ 1637154000000, 1.10545, 1.11299, 1.09574, 1.09604, 2252781.33926 ],
|
||||||
|
[ 1637157600000, 1.09583, 1.10037, 1.08402, 1.08404, 1882359.279342 ],
|
||||||
|
[ 1637161200000, 1.08433, 1.08924, 1.07583, 1.08543, 1826745.82579 ],
|
||||||
|
[ 1637164800000, 1.08571, 1.09622, 1.07946, 1.09496, 1651730.678891 ],
|
||||||
|
[ 1637168400000, 1.09509, 1.0979, 1.0878, 1.0945, 1081210.614598 ],
|
||||||
|
[ 1637172000000, 1.09483, 1.10223, 1.09362, 1.09922, 1065998.492028 ],
|
||||||
|
[ 1637175600000, 1.09916, 1.10201, 1.09226, 1.09459, 924935.492048 ],
|
||||||
|
[ 1637179200000, 1.09458, 1.10196, 1.09051, 1.09916, 1253539.625345 ],
|
||||||
|
[ 1637182800000, 1.09939, 1.09948, 1.08751, 1.09485, 1066269.190094 ],
|
||||||
|
[ 1637186400000, 1.0949, 1.095, 1.08537, 1.09229, 924726.680514 ],
|
||||||
|
[ 1637190000000, 1.0923, 1.09877, 1.08753, 1.09522, 1150213.905599 ],
|
||||||
|
[ 1637193600000, 1.09538, 1.10675, 1.09058, 1.10453, 1489867.578178 ],
|
||||||
|
[ 1637197200000, 1.10446, 1.16313, 1.0978, 1.12907, 10016166.026355 ],
|
||||||
|
[ 1637200800000, 1.1287, 1.15367, 1.12403, 1.1381, 7167920.053752 ],
|
||||||
|
[ 1637204400000, 1.13818, 1.14242, 1.12358, 1.1244, 2665326.190545 ],
|
||||||
|
[ 1637208000000, 1.12432, 1.14864, 1.11061, 1.11447, 9340547.947608 ],
|
||||||
|
[ 1637211600000, 1.114, 1.12618, 1.10911, 1.11412, 11759138.472952 ],
|
||||||
|
[ 1637215200000, 1.11381, 1.11701, 1.10507, 1.1136, 3104670.727264 ],
|
||||||
|
[ 1637218800000, 1.11433, 1.1145, 1.09682, 1.10715, 2522287.830673 ],
|
||||||
|
[ 1637222400000, 1.1073, 1.11, 1.10224, 1.10697, 2021691.204473 ],
|
||||||
|
[ 1637226000000, 1.10622, 1.10707, 1.07727, 1.08674, 3679010.223352 ],
|
||||||
|
[ 1637229600000, 1.08651, 1.09861, 1.08065, 1.09771, 2041421.476307 ],
|
||||||
|
[ 1637233200000, 1.09784, 1.102, 1.08339, 1.08399, 1920597.122813 ],
|
||||||
|
[ 1637236800000, 1.08458, 1.09523, 1.07961, 1.08263, 2403158.337373 ],
|
||||||
|
[ 1637240400000, 1.08309, 1.08959, 1.06094, 1.07703, 4425686.808376 ],
|
||||||
|
[ 1637244000000, 1.07702, 1.08064, 1.063, 1.07049, 3361334.048801 ],
|
||||||
|
[ 1637247600000, 1.07126, 1.07851, 1.04538, 1.0562, 5865602.611111 ],
|
||||||
|
[ 1637251200000, 1.05616, 1.06326, 1.0395, 1.04074, 4206860.947352 ],
|
||||||
|
[ 1637254800000, 1.04023, 1.0533, 1.01478, 1.0417, 5641193.647291 ],
|
||||||
|
[ 1637258400000, 1.04177, 1.05444, 1.04132, 1.05204, 1819341.083656 ],
|
||||||
|
[ 1637262000000, 1.05201, 1.05962, 1.04964, 1.05518, 1567923.362515 ],
|
||||||
|
[ 1637265600000, 1.05579, 1.05924, 1.04772, 1.04773, 1794108.065606 ],
|
||||||
|
[ 1637269200000, 1.0484, 1.05622, 1.04183, 1.04544, 1936537.403899 ],
|
||||||
|
[ 1637272800000, 1.04543, 1.05331, 1.03396, 1.03892, 2839486.418143 ],
|
||||||
|
[ 1637276400000, 1.03969, 1.04592, 1.02886, 1.04086, 3116275.899177 ],
|
||||||
|
[ 1637280000000, 1.0409, 1.05681, 1.02922, 1.05481, 4671209.916896 ],
|
||||||
|
[ 1637283600000, 1.05489, 1.05538, 1.03539, 1.03599, 2566357.247547 ],
|
||||||
|
[ 1637287200000, 1.03596, 1.04606, 1.02038, 1.02428, 3441834.238546 ],
|
||||||
|
[ 1637290800000, 1.02483, 1.0291, 1.01785, 1.0285, 2678602.729339 ],
|
||||||
|
[ 1637294400000, 1.0287, 1.0446, 1.0259, 1.04264, 2303621.340808 ],
|
||||||
|
[ 1637298000000, 1.04313, 1.04676, 1.03662, 1.04499, 2426475.439485 ],
|
||||||
|
[ 1637301600000, 1.0451, 1.04971, 1.041, 1.04448, 2088365.810515 ],
|
||||||
|
[ 1637305200000, 1.04473, 1.04845, 1.03801, 1.04227, 2222396.213472 ],
|
||||||
|
[ 1637308800000, 1.04211, 1.06965, 1.04168, 1.05711, 3267643.936025 ],
|
||||||
|
[ 1637312400000, 1.0569, 1.06578, 1.05626, 1.05844, 1512848.016057 ],
|
||||||
|
[ 1637316000000, 1.05814, 1.05916, 1.04923, 1.05464, 1710694.805693 ],
|
||||||
|
[ 1637319600000, 1.05484, 1.05731, 1.0458, 1.05359, 1587100.45253 ],
|
||||||
|
[ 1637323200000, 1.05382, 1.06063, 1.05156, 1.05227, 1409095.236152 ],
|
||||||
|
[ 1637326800000, 1.05256, 1.06489, 1.04996, 1.06471, 1879315.174541 ],
|
||||||
|
[ 1637330400000, 1.06491, 1.1036, 1.06489, 1.09439, 6212842.71216 ],
|
||||||
|
[ 1637334000000, 1.09441, 1.10252, 1.082, 1.08879, 4833417.181969 ],
|
||||||
|
[ 1637337600000, 1.08866, 1.09485, 1.07538, 1.09045, 2554438.746366 ],
|
||||||
|
[ 1637341200000, 1.09058, 1.09906, 1.08881, 1.09039, 1961024.28963 ],
|
||||||
|
[ 1637344800000, 1.09063, 1.09447, 1.08555, 1.09041, 1427538.639232 ],
|
||||||
|
[ 1637348400000, 1.09066, 1.09521, 1.088, 1.09332, 847724.821691 ],
|
||||||
|
[ 1637352000000, 1.09335, 1.09489, 1.08402, 1.08501, 1035043.133874 ],
|
||||||
|
[ 1637355600000, 1.08474, 1.08694, 1.08, 1.08606, 969952.892274 ],
|
||||||
|
[ 1637359200000, 1.08601, 1.09, 1.08201, 1.08476, 1105782.581808 ],
|
||||||
|
[ 1637362800000, 1.08463, 1.09245, 1.08201, 1.08971, 1334467.438673 ],
|
||||||
|
[ 1637366400000, 1.0897, 1.09925, 1.08634, 1.09049, 2460070.020396 ],
|
||||||
|
[ 1637370000000, 1.0908, 1.10002, 1.09002, 1.09845, 1210028.489394 ],
|
||||||
|
[ 1637373600000, 1.09785, 1.09791, 1.08944, 1.08962, 1261987.295847 ],
|
||||||
|
[ 1637377200000, 1.08951, 1.0919, 1.08429, 1.08548, 1124938.783404 ],
|
||||||
|
[ 1637380800000, 1.08536, 1.09, 1.08424, 1.08783, 1330935.680168 ],
|
||||||
|
[ 1637384400000, 1.0877, 1.08969, 1.08266, 1.08617, 874900.746037 ],
|
||||||
|
[ 1637388000000, 1.08622, 1.09224, 1.0843, 1.0889, 1240184.759178 ],
|
||||||
|
[ 1637391600000, 1.08917, 1.0909, 1.08408, 1.08535, 706148.380072 ],
|
||||||
|
[ 1637395200000, 1.08521, 1.08857, 1.07829, 1.08349, 1713832.050838 ],
|
||||||
|
[ 1637398800000, 1.08343, 1.08841, 1.08272, 1.0855, 696597.06327 ],
|
||||||
|
[ 1637402400000, 1.08553, 1.0898, 1.08353, 1.08695, 1104159.802108 ],
|
||||||
|
[ 1637406000000, 1.08703, 1.09838, 1.08635, 1.09695, 1404001.384389 ],
|
||||||
|
[ 1637409600000, 1.09695, 1.10175, 1.09024, 1.09278, 1219090.620484 ],
|
||||||
|
[ 1637413200000, 1.093, 1.09577, 1.08615, 1.08792, 994797.546591 ],
|
||||||
|
[ 1637416800000, 1.08793, 1.09239, 1.08572, 1.08725, 1251685.429497 ],
|
||||||
|
[ 1637420400000, 1.08721, 1.08767, 1.06029, 1.06556, 3955719.53631 ],
|
||||||
|
[ 1637424000000, 1.06553, 1.07385, 1.06169, 1.07257, 1868359.179534 ],
|
||||||
|
[ 1637427600000, 1.07266, 1.0745, 1.06759, 1.07261, 1015134.469304 ],
|
||||||
|
[ 1637431200000, 1.07255, 1.0974, 1.06819, 1.09369, 4377675.964829 ],
|
||||||
|
[ 1637434800000, 1.09368, 1.09562, 1.08899, 1.09036, 914791.699929 ],
|
||||||
|
[ 1637438400000, 1.09085, 1.09262, 1.08855, 1.09214, 661436.936672 ],
|
||||||
|
[ 1637442000000, 1.0924, 1.09475, 1.08874, 1.09282, 593143.283519 ],
|
||||||
|
[ 1637445600000, 1.09301, 1.09638, 1.09154, 1.09611, 603952.916221 ],
|
||||||
|
[ 1637449200000, 1.09569, 1.09828, 1.09301, 1.09747, 676053.591571 ],
|
||||||
|
[ 1637452800000, 1.09742, 1.09822, 1.09011, 1.0902, 1375704.506469 ],
|
||||||
|
[ 1637456400000, 1.0901, 1.09311, 1.08619, 1.08856, 928706.03929 ],
|
||||||
|
[ 1637460000000, 1.08855, 1.08941, 1.07401, 1.08035, 2669150.388642 ],
|
||||||
|
[ 1637463600000, 1.08016, 1.08341, 1.07448, 1.07672, 1604049.131307 ],
|
||||||
|
[ 1637467200000, 1.07685, 1.08229, 1.07552, 1.0765, 1153357.274076 ]
|
||||||
|
]
|
Loading…
Reference in New Issue
Block a user