Merge branch 'freqtrade:develop' into patch-1

This commit is contained in:
Leonardo Custodio 2023-01-31 12:00:00 -03:00 committed by GitHub
commit d27d5624e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 17 additions and 10 deletions

View File

@ -9,7 +9,7 @@ from collections import deque
from datetime import datetime, timezone
from typing import Any, Dict, List, Optional, Tuple
from pandas import DataFrame, to_timedelta
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
from freqtrade.configuration import TimeRange
from freqtrade.constants import (FULL_DATAFRAME_THRESHOLD, Config, ListPairsWithTimeframes,
@ -206,9 +206,11 @@ class DataProvider:
existing_df, _ = self.__producer_pairs_df[producer_name][pair_key]
# CHECK FOR MISSING CANDLES
timeframe_delta = to_timedelta(timeframe) # Convert the timeframe to a timedelta for pandas
local_last = existing_df.iloc[-1]['date'] # We want the last date from our copy
incoming_first = dataframe.iloc[0]['date'] # We want the first date from the incoming
# Convert the timeframe to a timedelta for pandas
timeframe_delta: Timedelta = to_timedelta(timeframe)
local_last: Timestamp = existing_df.iloc[-1]['date'] # We want the last date from our copy
# We want the first date from the incoming
incoming_first: Timestamp = dataframe.iloc[0]['date']
# Remove existing candles that are newer than the incoming first candle
existing_df1 = existing_df[existing_df['date'] < incoming_first]
@ -221,7 +223,7 @@ class DataProvider:
# we missed some candles between our data and the incoming
# so return False and candle_difference.
if candle_difference > 1:
return (False, candle_difference)
return (False, int(candle_difference))
if existing_df1.empty:
appended_df = dataframe
else:

View File

@ -308,7 +308,7 @@ class IDataHandler(ABC):
timerange=timerange_startup,
candle_type=candle_type
)
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data, True):
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data):
return pairdf
else:
enddate = pairdf.iloc[-1]['date']
@ -316,7 +316,7 @@ class IDataHandler(ABC):
if timerange_startup:
self._validate_pairdata(pair, pairdf, timeframe, candle_type, timerange_startup)
pairdf = trim_dataframe(pairdf, timerange_startup)
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data):
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data, True):
return pairdf
# incomplete candles should only be dropped if we didn't trim the end beforehand.

View File

@ -44,7 +44,7 @@ class SharpeHyperOptLossDaily(IHyperOptLoss):
sum_daily = (
results.resample(resample_freq, on='close_date').agg(
{"profit_ratio_after_slippage": sum}).reindex(t_index).fillna(0)
{"profit_ratio_after_slippage": 'sum'}).reindex(t_index).fillna(0)
)
total_profit = sum_daily["profit_ratio_after_slippage"] - risk_free_rate

View File

@ -46,7 +46,7 @@ class SortinoHyperOptLossDaily(IHyperOptLoss):
sum_daily = (
results.resample(resample_freq, on='close_date').agg(
{"profit_ratio_after_slippage": sum}).reindex(t_index).fillna(0)
{"profit_ratio_after_slippage": 'sum'}).reindex(t_index).fillna(0)
)
total_profit = sum_daily["profit_ratio_after_slippage"] - minimum_acceptable_return

View File

@ -90,7 +90,7 @@ async def _process_consumer_request(
elif type == RPCRequestType.ANALYZED_DF:
# Limit the amount of candles per dataframe to 'limit' or 1500
limit = min(data.get('limit', 1500), 1500) if data else None
limit = int(min(data.get('limit', 1500), 1500)) if data else None
pair = data.get('pair', None) if data else None
# For every pair in the generator, send a separate message

View File

@ -437,6 +437,7 @@ def test_dp__add_external_df(default_conf_usdt):
# Add the same dataframe again - dataframe size shall not change.
res = dp._add_external_df('ETH/USDT', df, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is True
assert isinstance(res[1], int)
assert res[1] == 0
df, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
assert len(df) == 24
@ -446,6 +447,7 @@ def test_dp__add_external_df(default_conf_usdt):
res = dp._add_external_df('ETH/USDT', df2, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is True
assert isinstance(res[1], int)
assert res[1] == 0
df, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
assert len(df) == 48
@ -455,6 +457,7 @@ def test_dp__add_external_df(default_conf_usdt):
res = dp._add_external_df('ETH/USDT', df3, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is True
assert isinstance(res[1], int)
assert res[1] == 0
df, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
# New length = 48 + 12 (since we have a 12 hour offset).
@ -478,6 +481,7 @@ def test_dp__add_external_df(default_conf_usdt):
res = dp._add_external_df('ETH/USDT', df4, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is False
# 36 hours - from 2022-01-03 12:00:00+00:00 to 2022-01-05 00:00:00+00:00
assert isinstance(res[1], int)
assert res[1] == 36
df, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
# New length = 61 + 1
@ -488,4 +492,5 @@ def test_dp__add_external_df(default_conf_usdt):
res = dp._add_external_df('ETH/USDT', df4, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is False
# 36 hours - from 2022-01-03 12:00:00+00:00 to 2022-01-05 00:00:00+00:00
assert isinstance(res[1], int)
assert res[1] == 0