Merge branch 'develop' into freqai_feature_engineering_functions

This commit is contained in:
Wagner Costa 2023-01-05 17:55:35 -03:00
commit d82264ced9
8 changed files with 20 additions and 87 deletions

View File

@ -52,7 +52,7 @@ def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_cand
return analysed_trades_dict return analysed_trades_dict
def _analyze_candles_and_indicators(pair, trades, signal_candles): def _analyze_candles_and_indicators(pair, trades: pd.DataFrame, signal_candles: pd.DataFrame):
buyf = signal_candles buyf = signal_candles
if len(buyf) > 0: if len(buyf) > 0:
@ -120,7 +120,7 @@ def _do_group_table_output(bigdf, glist):
else: else:
agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'], agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'],
'profit_ratio': ['sum', 'median', 'mean']} 'profit_ratio': ['median', 'mean', 'sum']}
agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median', agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median',
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct', 'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
'total_profit_pct'] 'total_profit_pct']

View File

@ -239,7 +239,7 @@ def calculate_sortino(trades: pd.DataFrame, min_date: datetime, max_date: dateti
down_stdev = np.std(trades.loc[trades['profit_abs'] < 0, 'profit_abs'] / starting_balance) down_stdev = np.std(trades.loc[trades['profit_abs'] < 0, 'profit_abs'] / starting_balance)
if down_stdev != 0: if down_stdev != 0 and not np.isnan(down_stdev):
sortino_ratio = expected_returns_mean / down_stdev * np.sqrt(365) sortino_ratio = expected_returns_mean / down_stdev * np.sqrt(365)
else: else:
# Define high (negative) sortino ratio to be clear that this is NOT optimal. # Define high (negative) sortino ratio to be clear that this is NOT optimal.

View File

@ -11,7 +11,7 @@ from freqtrade.enums import CandleType, MarginMode, TradingMode
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
from freqtrade.exchange import Exchange from freqtrade.exchange import Exchange
from freqtrade.exchange.common import retrier from freqtrade.exchange.common import retrier
from freqtrade.exchange.types import Tickers from freqtrade.exchange.types import OHLCVResponse, Tickers
from freqtrade.misc import deep_merge_dicts, json_load from freqtrade.misc import deep_merge_dicts, json_load
@ -112,7 +112,7 @@ class Binance(Exchange):
since_ms: int, candle_type: CandleType, since_ms: int, candle_type: CandleType,
is_new_pair: bool = False, raise_: bool = False, is_new_pair: bool = False, raise_: bool = False,
until_ms: Optional[int] = None until_ms: Optional[int] = None
) -> Tuple[str, str, str, List]: ) -> OHLCVResponse:
""" """
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
Does not work for other exchanges, which don't return the earliest data when called with "0" Does not work for other exchanges, which don't return the earliest data when called with "0"

View File

@ -36,7 +36,7 @@ from freqtrade.exchange.exchange_utils import (CcxtModuleType, amount_to_contrac
price_to_precision, timeframe_to_minutes, price_to_precision, timeframe_to_minutes,
timeframe_to_msecs, timeframe_to_next_date, timeframe_to_msecs, timeframe_to_next_date,
timeframe_to_prev_date, timeframe_to_seconds) timeframe_to_prev_date, timeframe_to_seconds)
from freqtrade.exchange.types import Ticker, Tickers from freqtrade.exchange.types import OHLCVResponse, Ticker, Tickers
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json, from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
safe_value_fallback2) safe_value_fallback2)
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
@ -1820,25 +1820,11 @@ class Exchange:
logger.info(f"Downloaded data for {pair} with length {len(data)}.") logger.info(f"Downloaded data for {pair} with length {len(data)}.")
return data return data
def get_historic_ohlcv_as_df(self, pair: str, timeframe: str,
since_ms: int, candle_type: CandleType) -> DataFrame:
"""
Minimal wrapper around get_historic_ohlcv - converting the result into a dataframe
:param pair: Pair to download
:param timeframe: Timeframe to get data for
:param since_ms: Timestamp in milliseconds to get history from
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: OHLCV DataFrame
"""
ticks = self.get_historic_ohlcv(pair, timeframe, since_ms=since_ms, candle_type=candle_type)
return ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
drop_incomplete=self._ohlcv_partial_candle)
async def _async_get_historic_ohlcv(self, pair: str, timeframe: str, async def _async_get_historic_ohlcv(self, pair: str, timeframe: str,
since_ms: int, candle_type: CandleType, since_ms: int, candle_type: CandleType,
is_new_pair: bool = False, raise_: bool = False, is_new_pair: bool = False, raise_: bool = False,
until_ms: Optional[int] = None until_ms: Optional[int] = None
) -> Tuple[str, str, str, List]: ) -> OHLCVResponse:
""" """
Download historic ohlcv Download historic ohlcv
:param is_new_pair: used by binance subclass to allow "fast" new pair downloading :param is_new_pair: used by binance subclass to allow "fast" new pair downloading
@ -1876,8 +1862,9 @@ class Exchange:
data = sorted(data, key=lambda x: x[0]) data = sorted(data, key=lambda x: x[0])
return pair, timeframe, candle_type, data return pair, timeframe, candle_type, data
def _build_coroutine(self, pair: str, timeframe: str, candle_type: CandleType, def _build_coroutine(
since_ms: Optional[int], cache: bool) -> Coroutine: self, pair: str, timeframe: str, candle_type: CandleType,
since_ms: Optional[int], cache: bool) -> Coroutine[Any, Any, OHLCVResponse]:
not_all_data = cache and self.required_candle_call_count > 1 not_all_data = cache and self.required_candle_call_count > 1
if cache and (pair, timeframe, candle_type) in self._klines: if cache and (pair, timeframe, candle_type) in self._klines:
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type) candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
@ -1914,7 +1901,7 @@ class Exchange:
""" """
Build Coroutines to execute as part of refresh_latest_ohlcv Build Coroutines to execute as part of refresh_latest_ohlcv
""" """
input_coroutines = [] input_coroutines: List[Coroutine[Any, Any, OHLCVResponse]] = []
cached_pairs = [] cached_pairs = []
for pair, timeframe, candle_type in set(pair_list): for pair, timeframe, candle_type in set(pair_list):
if (timeframe not in self.timeframes if (timeframe not in self.timeframes
@ -2025,7 +2012,7 @@ class Exchange:
timeframe: str, timeframe: str,
candle_type: CandleType, candle_type: CandleType,
since_ms: Optional[int] = None, since_ms: Optional[int] = None,
) -> Tuple[str, str, str, List]: ) -> OHLCVResponse:
""" """
Asynchronously get candle history data using fetch_ohlcv Asynchronously get candle history data using fetch_ohlcv
:param candle_type: '', mark, index, premiumIndex, or funding_rate :param candle_type: '', mark, index, premiumIndex, or funding_rate

View File

@ -1,4 +1,6 @@
from typing import Dict, Optional, TypedDict from typing import Dict, List, Optional, Tuple, TypedDict
from freqtrade.enums import CandleType
class Ticker(TypedDict): class Ticker(TypedDict):
@ -14,3 +16,6 @@ class Ticker(TypedDict):
Tickers = Dict[str, Ticker] Tickers = Dict[str, Ticker]
# pair, timeframe, candleType, OHLCV
OHLCVResponse = Tuple[str, str, CandleType, List]

View File

@ -1177,6 +1177,7 @@ class Backtesting:
open_trade_count_start = self.backtest_loop( open_trade_count_start = self.backtest_loop(
row, pair, current_time, end_date, max_open_trades, row, pair, current_time, end_date, max_open_trades,
open_trade_count_start) open_trade_count_start)
continue
detail_data.loc[:, 'enter_long'] = row[LONG_IDX] detail_data.loc[:, 'enter_long'] = row[LONG_IDX]
detail_data.loc[:, 'exit_long'] = row[ELONG_IDX] detail_data.loc[:, 'exit_long'] = row[ELONG_IDX]
detail_data.loc[:, 'enter_short'] = row[SHORT_IDX] detail_data.loc[:, 'enter_short'] = row[SHORT_IDX]

View File

@ -746,9 +746,7 @@ def test_download_data_no_exchange(mocker, caplog):
start_download_data(pargs) start_download_data(pargs)
def test_download_data_no_pairs(mocker, caplog): def test_download_data_no_pairs(mocker):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
mocker.patch('freqtrade.commands.data_commands.refresh_backtest_ohlcv_data', mocker.patch('freqtrade.commands.data_commands.refresh_backtest_ohlcv_data',
MagicMock(return_value=["ETH/BTC", "XRP/BTC"])) MagicMock(return_value=["ETH/BTC", "XRP/BTC"]))
@ -770,8 +768,6 @@ def test_download_data_no_pairs(mocker, caplog):
def test_download_data_all_pairs(mocker, markets): def test_download_data_all_pairs(mocker, markets):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
dl_mock = mocker.patch('freqtrade.commands.data_commands.refresh_backtest_ohlcv_data', dl_mock = mocker.patch('freqtrade.commands.data_commands.refresh_backtest_ohlcv_data',
MagicMock(return_value=["ETH/BTC", "XRP/BTC"])) MagicMock(return_value=["ETH/BTC", "XRP/BTC"]))
patch_exchange(mocker) patch_exchange(mocker)

View File

@ -1988,62 +1988,6 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_
assert log_has_re(r"Async code raised an exception: .*", caplog) assert log_has_re(r"Async code raised an exception: .*", caplog)
@pytest.mark.parametrize("exchange_name", EXCHANGES)
@pytest.mark.parametrize('candle_type', ['mark', ''])
def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name, candle_type):
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
ohlcv = [
[
arrow.utcnow().int_timestamp * 1000, # unix timestamp ms
1, # open
2, # high
3, # low
4, # close
5, # volume (in quote currency)
],
[
arrow.utcnow().shift(minutes=5).int_timestamp * 1000, # unix timestamp ms
1, # open
2, # high
3, # low
4, # close
5, # volume (in quote currency)
],
[
arrow.utcnow().shift(minutes=10).int_timestamp * 1000, # unix timestamp ms
1, # open
2, # high
3, # low
4, # close
5, # volume (in quote currency)
]
]
pair = 'ETH/BTC'
async def mock_candle_hist(pair, timeframe, candle_type, since_ms):
return pair, timeframe, candle_type, ohlcv
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
# one_call calculation * 1.8 should do 2 calls
since = 5 * 60 * exchange.ohlcv_candle_limit('5m', CandleType.SPOT) * 1.8
ret = exchange.get_historic_ohlcv_as_df(
pair,
"5m",
int((arrow.utcnow().int_timestamp - since) * 1000),
candle_type=candle_type
)
assert exchange._async_get_candle_history.call_count == 2
# Returns twice the above OHLCV data
assert len(ret) == 2
assert isinstance(ret, DataFrame)
assert 'date' in ret.columns
assert 'open' in ret.columns
assert 'close' in ret.columns
assert 'high' in ret.columns
@pytest.mark.asyncio @pytest.mark.asyncio
@pytest.mark.parametrize("exchange_name", EXCHANGES) @pytest.mark.parametrize("exchange_name", EXCHANGES)
@pytest.mark.parametrize('candle_type', [CandleType.MARK, CandleType.SPOT]) @pytest.mark.parametrize('candle_type', [CandleType.MARK, CandleType.SPOT])