Merge branch 'develop' of https://github.com/freqtrade/freqtrade into max-open-trades
This commit is contained in:
commit
8c3ac56bc5
@ -52,7 +52,7 @@ def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_cand
|
|||||||
return analysed_trades_dict
|
return analysed_trades_dict
|
||||||
|
|
||||||
|
|
||||||
def _analyze_candles_and_indicators(pair, trades, signal_candles):
|
def _analyze_candles_and_indicators(pair, trades: pd.DataFrame, signal_candles: pd.DataFrame):
|
||||||
buyf = signal_candles
|
buyf = signal_candles
|
||||||
|
|
||||||
if len(buyf) > 0:
|
if len(buyf) > 0:
|
||||||
@ -120,7 +120,7 @@ def _do_group_table_output(bigdf, glist):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'],
|
agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'],
|
||||||
'profit_ratio': ['sum', 'median', 'mean']}
|
'profit_ratio': ['median', 'mean', 'sum']}
|
||||||
agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median',
|
agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median',
|
||||||
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
|
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
|
||||||
'total_profit_pct']
|
'total_profit_pct']
|
||||||
|
@ -197,7 +197,7 @@ def calculate_cagr(days_passed: int, starting_balance: float, final_balance: flo
|
|||||||
def calculate_expectancy(trades: pd.DataFrame) -> float:
|
def calculate_expectancy(trades: pd.DataFrame) -> float:
|
||||||
"""
|
"""
|
||||||
Calculate expectancy
|
Calculate expectancy
|
||||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)
|
||||||
:return: expectancy
|
:return: expectancy
|
||||||
"""
|
"""
|
||||||
if len(trades) == 0:
|
if len(trades) == 0:
|
||||||
@ -239,7 +239,7 @@ def calculate_sortino(trades: pd.DataFrame, min_date: datetime, max_date: dateti
|
|||||||
|
|
||||||
down_stdev = np.std(trades.loc[trades['profit_abs'] < 0, 'profit_abs'] / starting_balance)
|
down_stdev = np.std(trades.loc[trades['profit_abs'] < 0, 'profit_abs'] / starting_balance)
|
||||||
|
|
||||||
if down_stdev != 0:
|
if down_stdev != 0 and not np.isnan(down_stdev):
|
||||||
sortino_ratio = expected_returns_mean / down_stdev * np.sqrt(365)
|
sortino_ratio = expected_returns_mean / down_stdev * np.sqrt(365)
|
||||||
else:
|
else:
|
||||||
# Define high (negative) sortino ratio to be clear that this is NOT optimal.
|
# Define high (negative) sortino ratio to be clear that this is NOT optimal.
|
||||||
|
@ -11,7 +11,7 @@ from freqtrade.enums import CandleType, MarginMode, TradingMode
|
|||||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
from freqtrade.exchange.common import retrier
|
from freqtrade.exchange.common import retrier
|
||||||
from freqtrade.exchange.types import Tickers
|
from freqtrade.exchange.types import OHLCVResponse, Tickers
|
||||||
from freqtrade.misc import deep_merge_dicts, json_load
|
from freqtrade.misc import deep_merge_dicts, json_load
|
||||||
|
|
||||||
|
|
||||||
@ -112,7 +112,7 @@ class Binance(Exchange):
|
|||||||
since_ms: int, candle_type: CandleType,
|
since_ms: int, candle_type: CandleType,
|
||||||
is_new_pair: bool = False, raise_: bool = False,
|
is_new_pair: bool = False, raise_: bool = False,
|
||||||
until_ms: Optional[int] = None
|
until_ms: Optional[int] = None
|
||||||
) -> Tuple[str, str, str, List]:
|
) -> OHLCVResponse:
|
||||||
"""
|
"""
|
||||||
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
|
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
|
||||||
Does not work for other exchanges, which don't return the earliest data when called with "0"
|
Does not work for other exchanges, which don't return the earliest data when called with "0"
|
||||||
|
@ -36,7 +36,7 @@ from freqtrade.exchange.exchange_utils import (CcxtModuleType, amount_to_contrac
|
|||||||
price_to_precision, timeframe_to_minutes,
|
price_to_precision, timeframe_to_minutes,
|
||||||
timeframe_to_msecs, timeframe_to_next_date,
|
timeframe_to_msecs, timeframe_to_next_date,
|
||||||
timeframe_to_prev_date, timeframe_to_seconds)
|
timeframe_to_prev_date, timeframe_to_seconds)
|
||||||
from freqtrade.exchange.types import Ticker, Tickers
|
from freqtrade.exchange.types import OHLCVResponse, Ticker, Tickers
|
||||||
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
|
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
|
||||||
safe_value_fallback2)
|
safe_value_fallback2)
|
||||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||||
@ -1813,32 +1813,18 @@ class Exchange:
|
|||||||
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||||
:return: List with candle (OHLCV) data
|
:return: List with candle (OHLCV) data
|
||||||
"""
|
"""
|
||||||
pair, _, _, data = self.loop.run_until_complete(
|
pair, _, _, data, _ = self.loop.run_until_complete(
|
||||||
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
||||||
since_ms=since_ms, until_ms=until_ms,
|
since_ms=since_ms, until_ms=until_ms,
|
||||||
is_new_pair=is_new_pair, candle_type=candle_type))
|
is_new_pair=is_new_pair, candle_type=candle_type))
|
||||||
logger.info(f"Downloaded data for {pair} with length {len(data)}.")
|
logger.info(f"Downloaded data for {pair} with length {len(data)}.")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_historic_ohlcv_as_df(self, pair: str, timeframe: str,
|
|
||||||
since_ms: int, candle_type: CandleType) -> DataFrame:
|
|
||||||
"""
|
|
||||||
Minimal wrapper around get_historic_ohlcv - converting the result into a dataframe
|
|
||||||
:param pair: Pair to download
|
|
||||||
:param timeframe: Timeframe to get data for
|
|
||||||
:param since_ms: Timestamp in milliseconds to get history from
|
|
||||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
|
||||||
:return: OHLCV DataFrame
|
|
||||||
"""
|
|
||||||
ticks = self.get_historic_ohlcv(pair, timeframe, since_ms=since_ms, candle_type=candle_type)
|
|
||||||
return ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
|
|
||||||
drop_incomplete=self._ohlcv_partial_candle)
|
|
||||||
|
|
||||||
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||||
since_ms: int, candle_type: CandleType,
|
since_ms: int, candle_type: CandleType,
|
||||||
is_new_pair: bool = False, raise_: bool = False,
|
is_new_pair: bool = False, raise_: bool = False,
|
||||||
until_ms: Optional[int] = None
|
until_ms: Optional[int] = None
|
||||||
) -> Tuple[str, str, str, List]:
|
) -> OHLCVResponse:
|
||||||
"""
|
"""
|
||||||
Download historic ohlcv
|
Download historic ohlcv
|
||||||
:param is_new_pair: used by binance subclass to allow "fast" new pair downloading
|
:param is_new_pair: used by binance subclass to allow "fast" new pair downloading
|
||||||
@ -1869,15 +1855,16 @@ class Exchange:
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
# Deconstruct tuple if it's not an exception
|
# Deconstruct tuple if it's not an exception
|
||||||
p, _, c, new_data = res
|
p, _, c, new_data, _ = res
|
||||||
if p == pair and c == candle_type:
|
if p == pair and c == candle_type:
|
||||||
data.extend(new_data)
|
data.extend(new_data)
|
||||||
# Sort data again after extending the result - above calls return in "async order"
|
# Sort data again after extending the result - above calls return in "async order"
|
||||||
data = sorted(data, key=lambda x: x[0])
|
data = sorted(data, key=lambda x: x[0])
|
||||||
return pair, timeframe, candle_type, data
|
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
|
||||||
|
|
||||||
def _build_coroutine(self, pair: str, timeframe: str, candle_type: CandleType,
|
def _build_coroutine(
|
||||||
since_ms: Optional[int], cache: bool) -> Coroutine:
|
self, pair: str, timeframe: str, candle_type: CandleType,
|
||||||
|
since_ms: Optional[int], cache: bool) -> Coroutine[Any, Any, OHLCVResponse]:
|
||||||
not_all_data = cache and self.required_candle_call_count > 1
|
not_all_data = cache and self.required_candle_call_count > 1
|
||||||
if cache and (pair, timeframe, candle_type) in self._klines:
|
if cache and (pair, timeframe, candle_type) in self._klines:
|
||||||
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
|
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
|
||||||
@ -1914,7 +1901,7 @@ class Exchange:
|
|||||||
"""
|
"""
|
||||||
Build Coroutines to execute as part of refresh_latest_ohlcv
|
Build Coroutines to execute as part of refresh_latest_ohlcv
|
||||||
"""
|
"""
|
||||||
input_coroutines = []
|
input_coroutines: List[Coroutine[Any, Any, OHLCVResponse]] = []
|
||||||
cached_pairs = []
|
cached_pairs = []
|
||||||
for pair, timeframe, candle_type in set(pair_list):
|
for pair, timeframe, candle_type in set(pair_list):
|
||||||
if (timeframe not in self.timeframes
|
if (timeframe not in self.timeframes
|
||||||
@ -1978,7 +1965,6 @@ class Exchange:
|
|||||||
:return: Dict of [{(pair, timeframe): Dataframe}]
|
:return: Dict of [{(pair, timeframe): Dataframe}]
|
||||||
"""
|
"""
|
||||||
logger.debug("Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
|
logger.debug("Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
|
||||||
drop_incomplete = self._ohlcv_partial_candle if drop_incomplete is None else drop_incomplete
|
|
||||||
|
|
||||||
# Gather coroutines to run
|
# Gather coroutines to run
|
||||||
input_coroutines, cached_pairs = self._build_ohlcv_dl_jobs(pair_list, since_ms, cache)
|
input_coroutines, cached_pairs = self._build_ohlcv_dl_jobs(pair_list, since_ms, cache)
|
||||||
@ -1996,8 +1982,9 @@ class Exchange:
|
|||||||
if isinstance(res, Exception):
|
if isinstance(res, Exception):
|
||||||
logger.warning(f"Async code raised an exception: {repr(res)}")
|
logger.warning(f"Async code raised an exception: {repr(res)}")
|
||||||
continue
|
continue
|
||||||
# Deconstruct tuple (has 4 elements)
|
# Deconstruct tuple (has 5 elements)
|
||||||
pair, timeframe, c_type, ticks = res
|
pair, timeframe, c_type, ticks, drop_hint = res
|
||||||
|
drop_incomplete = drop_hint if drop_incomplete is None else drop_incomplete
|
||||||
ohlcv_df = self._process_ohlcv_df(
|
ohlcv_df = self._process_ohlcv_df(
|
||||||
pair, timeframe, c_type, ticks, cache, drop_incomplete)
|
pair, timeframe, c_type, ticks, cache, drop_incomplete)
|
||||||
|
|
||||||
@ -2025,7 +2012,7 @@ class Exchange:
|
|||||||
timeframe: str,
|
timeframe: str,
|
||||||
candle_type: CandleType,
|
candle_type: CandleType,
|
||||||
since_ms: Optional[int] = None,
|
since_ms: Optional[int] = None,
|
||||||
) -> Tuple[str, str, str, List]:
|
) -> OHLCVResponse:
|
||||||
"""
|
"""
|
||||||
Asynchronously get candle history data using fetch_ohlcv
|
Asynchronously get candle history data using fetch_ohlcv
|
||||||
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||||
@ -2065,9 +2052,9 @@ class Exchange:
|
|||||||
data = sorted(data, key=lambda x: x[0])
|
data = sorted(data, key=lambda x: x[0])
|
||||||
except IndexError:
|
except IndexError:
|
||||||
logger.exception("Error loading %s. Result was %s.", pair, data)
|
logger.exception("Error loading %s. Result was %s.", pair, data)
|
||||||
return pair, timeframe, candle_type, []
|
return pair, timeframe, candle_type, [], self._ohlcv_partial_candle
|
||||||
logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe)
|
logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe)
|
||||||
return pair, timeframe, candle_type, data
|
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
|
||||||
|
|
||||||
except ccxt.NotSupported as e:
|
except ccxt.NotSupported as e:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
from typing import Dict, Optional, TypedDict
|
from typing import Dict, List, Optional, Tuple, TypedDict
|
||||||
|
|
||||||
|
from freqtrade.enums import CandleType
|
||||||
|
|
||||||
|
|
||||||
class Ticker(TypedDict):
|
class Ticker(TypedDict):
|
||||||
@ -14,3 +16,6 @@ class Ticker(TypedDict):
|
|||||||
|
|
||||||
|
|
||||||
Tickers = Dict[str, Ticker]
|
Tickers = Dict[str, Ticker]
|
||||||
|
|
||||||
|
# pair, timeframe, candleType, OHLCV, drop last?,
|
||||||
|
OHLCVResponse = Tuple[str, str, CandleType, List, bool]
|
||||||
|
@ -1178,6 +1178,7 @@ class Backtesting:
|
|||||||
open_trade_count_start = self.backtest_loop(
|
open_trade_count_start = self.backtest_loop(
|
||||||
row, pair, current_time, end_date, max_open_trades,
|
row, pair, current_time, end_date, max_open_trades,
|
||||||
open_trade_count_start)
|
open_trade_count_start)
|
||||||
|
continue
|
||||||
detail_data.loc[:, 'enter_long'] = row[LONG_IDX]
|
detail_data.loc[:, 'enter_long'] = row[LONG_IDX]
|
||||||
detail_data.loc[:, 'exit_long'] = row[ELONG_IDX]
|
detail_data.loc[:, 'exit_long'] = row[ELONG_IDX]
|
||||||
detail_data.loc[:, 'enter_short'] = row[SHORT_IDX]
|
detail_data.loc[:, 'enter_short'] = row[SHORT_IDX]
|
||||||
|
@ -28,7 +28,7 @@ class FreqaiExampleStrategy(IStrategy):
|
|||||||
plot_config = {
|
plot_config = {
|
||||||
"main_plot": {},
|
"main_plot": {},
|
||||||
"subplots": {
|
"subplots": {
|
||||||
"prediction": {"prediction": {"color": "blue"}},
|
"&-s_close": {"prediction": {"color": "blue"}},
|
||||||
"do_predict": {
|
"do_predict": {
|
||||||
"do_predict": {"color": "brown"},
|
"do_predict": {"color": "brown"},
|
||||||
},
|
},
|
||||||
@ -140,7 +140,8 @@ class FreqaiExampleStrategy(IStrategy):
|
|||||||
# If user wishes to use multiple targets, they can add more by
|
# If user wishes to use multiple targets, they can add more by
|
||||||
# appending more columns with '&'. User should keep in mind that multi targets
|
# appending more columns with '&'. User should keep in mind that multi targets
|
||||||
# requires a multioutput prediction model such as
|
# requires a multioutput prediction model such as
|
||||||
# templates/CatboostPredictionMultiModel.py,
|
# freqai/prediction_models/CatboostRegressorMultiTarget.py,
|
||||||
|
# freqtrade trade --freqaimodel CatboostRegressorMultiTarget
|
||||||
|
|
||||||
# df["&-s_range"] = (
|
# df["&-s_range"] = (
|
||||||
# df["close"]
|
# df["close"]
|
||||||
|
@ -746,9 +746,7 @@ def test_download_data_no_exchange(mocker, caplog):
|
|||||||
start_download_data(pargs)
|
start_download_data(pargs)
|
||||||
|
|
||||||
|
|
||||||
def test_download_data_no_pairs(mocker, caplog):
|
def test_download_data_no_pairs(mocker):
|
||||||
|
|
||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
|
||||||
|
|
||||||
mocker.patch('freqtrade.commands.data_commands.refresh_backtest_ohlcv_data',
|
mocker.patch('freqtrade.commands.data_commands.refresh_backtest_ohlcv_data',
|
||||||
MagicMock(return_value=["ETH/BTC", "XRP/BTC"]))
|
MagicMock(return_value=["ETH/BTC", "XRP/BTC"]))
|
||||||
@ -770,8 +768,6 @@ def test_download_data_no_pairs(mocker, caplog):
|
|||||||
|
|
||||||
def test_download_data_all_pairs(mocker, markets):
|
def test_download_data_all_pairs(mocker, markets):
|
||||||
|
|
||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
|
||||||
|
|
||||||
dl_mock = mocker.patch('freqtrade.commands.data_commands.refresh_backtest_ohlcv_data',
|
dl_mock = mocker.patch('freqtrade.commands.data_commands.refresh_backtest_ohlcv_data',
|
||||||
MagicMock(return_value=["ETH/BTC", "XRP/BTC"]))
|
MagicMock(return_value=["ETH/BTC", "XRP/BTC"]))
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
@ -557,7 +557,7 @@ async def test__async_get_historic_ohlcv_binance(default_conf, mocker, caplog, c
|
|||||||
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
||||||
|
|
||||||
pair = 'ETH/BTC'
|
pair = 'ETH/BTC'
|
||||||
respair, restf, restype, res = await exchange._async_get_historic_ohlcv(
|
respair, restf, restype, res, _ = await exchange._async_get_historic_ohlcv(
|
||||||
pair, "5m", 1500000000000, is_new_pair=False, candle_type=candle_type)
|
pair, "5m", 1500000000000, is_new_pair=False, candle_type=candle_type)
|
||||||
assert respair == pair
|
assert respair == pair
|
||||||
assert restf == '5m'
|
assert restf == '5m'
|
||||||
@ -566,7 +566,7 @@ async def test__async_get_historic_ohlcv_binance(default_conf, mocker, caplog, c
|
|||||||
assert exchange._api_async.fetch_ohlcv.call_count > 400
|
assert exchange._api_async.fetch_ohlcv.call_count > 400
|
||||||
# assert res == ohlcv
|
# assert res == ohlcv
|
||||||
exchange._api_async.fetch_ohlcv.reset_mock()
|
exchange._api_async.fetch_ohlcv.reset_mock()
|
||||||
_, _, _, res = await exchange._async_get_historic_ohlcv(
|
_, _, _, res, _ = await exchange._async_get_historic_ohlcv(
|
||||||
pair, "5m", 1500000000000, is_new_pair=True, candle_type=candle_type)
|
pair, "5m", 1500000000000, is_new_pair=True, candle_type=candle_type)
|
||||||
|
|
||||||
# Called twice - one "init" call - and one to get the actual data.
|
# Called twice - one "init" call - and one to get the actual data.
|
||||||
|
@ -1955,7 +1955,7 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_
|
|||||||
pair = 'ETH/BTC'
|
pair = 'ETH/BTC'
|
||||||
|
|
||||||
async def mock_candle_hist(pair, timeframe, candle_type, since_ms):
|
async def mock_candle_hist(pair, timeframe, candle_type, since_ms):
|
||||||
return pair, timeframe, candle_type, ohlcv
|
return pair, timeframe, candle_type, ohlcv, True
|
||||||
|
|
||||||
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
||||||
# one_call calculation * 1.8 should do 2 calls
|
# one_call calculation * 1.8 should do 2 calls
|
||||||
@ -1988,62 +1988,6 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_
|
|||||||
assert log_has_re(r"Async code raised an exception: .*", caplog)
|
assert log_has_re(r"Async code raised an exception: .*", caplog)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
|
||||||
@pytest.mark.parametrize('candle_type', ['mark', ''])
|
|
||||||
def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name, candle_type):
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
|
||||||
ohlcv = [
|
|
||||||
[
|
|
||||||
arrow.utcnow().int_timestamp * 1000, # unix timestamp ms
|
|
||||||
1, # open
|
|
||||||
2, # high
|
|
||||||
3, # low
|
|
||||||
4, # close
|
|
||||||
5, # volume (in quote currency)
|
|
||||||
],
|
|
||||||
[
|
|
||||||
arrow.utcnow().shift(minutes=5).int_timestamp * 1000, # unix timestamp ms
|
|
||||||
1, # open
|
|
||||||
2, # high
|
|
||||||
3, # low
|
|
||||||
4, # close
|
|
||||||
5, # volume (in quote currency)
|
|
||||||
],
|
|
||||||
[
|
|
||||||
arrow.utcnow().shift(minutes=10).int_timestamp * 1000, # unix timestamp ms
|
|
||||||
1, # open
|
|
||||||
2, # high
|
|
||||||
3, # low
|
|
||||||
4, # close
|
|
||||||
5, # volume (in quote currency)
|
|
||||||
]
|
|
||||||
]
|
|
||||||
pair = 'ETH/BTC'
|
|
||||||
|
|
||||||
async def mock_candle_hist(pair, timeframe, candle_type, since_ms):
|
|
||||||
return pair, timeframe, candle_type, ohlcv
|
|
||||||
|
|
||||||
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
|
||||||
# one_call calculation * 1.8 should do 2 calls
|
|
||||||
|
|
||||||
since = 5 * 60 * exchange.ohlcv_candle_limit('5m', CandleType.SPOT) * 1.8
|
|
||||||
ret = exchange.get_historic_ohlcv_as_df(
|
|
||||||
pair,
|
|
||||||
"5m",
|
|
||||||
int((arrow.utcnow().int_timestamp - since) * 1000),
|
|
||||||
candle_type=candle_type
|
|
||||||
)
|
|
||||||
|
|
||||||
assert exchange._async_get_candle_history.call_count == 2
|
|
||||||
# Returns twice the above OHLCV data
|
|
||||||
assert len(ret) == 2
|
|
||||||
assert isinstance(ret, DataFrame)
|
|
||||||
assert 'date' in ret.columns
|
|
||||||
assert 'open' in ret.columns
|
|
||||||
assert 'close' in ret.columns
|
|
||||||
assert 'high' in ret.columns
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
@pytest.mark.parametrize('candle_type', [CandleType.MARK, CandleType.SPOT])
|
@pytest.mark.parametrize('candle_type', [CandleType.MARK, CandleType.SPOT])
|
||||||
@ -2063,7 +2007,7 @@ async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_
|
|||||||
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
exchange._api_async.fetch_ohlcv = get_mock_coro(ohlcv)
|
||||||
|
|
||||||
pair = 'ETH/USDT'
|
pair = 'ETH/USDT'
|
||||||
respair, restf, _, res = await exchange._async_get_historic_ohlcv(
|
respair, restf, _, res, _ = await exchange._async_get_historic_ohlcv(
|
||||||
pair, "5m", 1500000000000, candle_type=candle_type, is_new_pair=False)
|
pair, "5m", 1500000000000, candle_type=candle_type, is_new_pair=False)
|
||||||
assert respair == pair
|
assert respair == pair
|
||||||
assert restf == '5m'
|
assert restf == '5m'
|
||||||
@ -2074,7 +2018,7 @@ async def test__async_get_historic_ohlcv(default_conf, mocker, caplog, exchange_
|
|||||||
exchange._api_async.fetch_ohlcv.reset_mock()
|
exchange._api_async.fetch_ohlcv.reset_mock()
|
||||||
end_ts = 1_500_500_000_000
|
end_ts = 1_500_500_000_000
|
||||||
start_ts = 1_500_000_000_000
|
start_ts = 1_500_000_000_000
|
||||||
respair, restf, _, res = await exchange._async_get_historic_ohlcv(
|
respair, restf, _, res, _ = await exchange._async_get_historic_ohlcv(
|
||||||
pair, "5m", since_ms=start_ts, candle_type=candle_type, is_new_pair=False,
|
pair, "5m", since_ms=start_ts, candle_type=candle_type, is_new_pair=False,
|
||||||
until_ms=end_ts
|
until_ms=end_ts
|
||||||
)
|
)
|
||||||
@ -2306,7 +2250,7 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
|
|||||||
pair = 'ETH/BTC'
|
pair = 'ETH/BTC'
|
||||||
res = await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT)
|
res = await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT)
|
||||||
assert type(res) is tuple
|
assert type(res) is tuple
|
||||||
assert len(res) == 4
|
assert len(res) == 5
|
||||||
assert res[0] == pair
|
assert res[0] == pair
|
||||||
assert res[1] == "5m"
|
assert res[1] == "5m"
|
||||||
assert res[2] == CandleType.SPOT
|
assert res[2] == CandleType.SPOT
|
||||||
@ -2393,7 +2337,7 @@ async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
|
|||||||
pair = 'ETH/BTC'
|
pair = 'ETH/BTC'
|
||||||
res = await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT)
|
res = await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT)
|
||||||
assert type(res) is tuple
|
assert type(res) is tuple
|
||||||
assert len(res) == 4
|
assert len(res) == 5
|
||||||
assert res[0] == pair
|
assert res[0] == pair
|
||||||
assert res[1] == "5m"
|
assert res[1] == "5m"
|
||||||
assert res[2] == CandleType.SPOT
|
assert res[2] == CandleType.SPOT
|
||||||
|
Loading…
Reference in New Issue
Block a user