minor improvements, fixes, old config+constant removal

This commit is contained in:
Timothy Pogue 2022-09-04 10:22:10 -06:00
parent 1601868854
commit 07f806a314
10 changed files with 51 additions and 38 deletions

2
.gitignore vendored
View File

@ -113,5 +113,3 @@ target/
!config_examples/config_full.example.json !config_examples/config_full.example.json
!config_examples/config_kraken.example.json !config_examples/config_kraken.example.json
!config_examples/config_freqai.example.json !config_examples/config_freqai.example.json
!config_examples/config_leader.example.json
!config_examples/config_follower.example.json

View File

@ -175,12 +175,15 @@
"password": "SuperSecurePassword", "password": "SuperSecurePassword",
"ws_token": "a_secret_ws_token" "ws_token": "a_secret_ws_token"
}, },
// The ExternalMessageConsumer config should only be enabled on an instance
// that listens to outside data from another instance. This should not be enabled
// in your producer of data.
"external_message_consumer": { "external_message_consumer": {
"enabled": false, "enabled": false,
"producers": [ "producers": [
{ {
"name": "default", "name": "default",
"url": "ws://some.freqtrade.bot/api/v1/message/ws", "url": "ws://localhost:8081/api/v1/message/ws",
"ws_token": "a_secret_ws_token" "ws_token": "a_secret_ws_token"
} }
], ],

View File

@ -33,8 +33,7 @@ HYPEROPT_LOSS_BUILTIN = ['ShortTradeDurHyperOptLoss', 'OnlyProfitHyperOptLoss',
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList', AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList',
'AgeFilter', 'OffsetFilter', 'PerformanceFilter', 'AgeFilter', 'OffsetFilter', 'PerformanceFilter',
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter', 'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter', 'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
'ExternalPairList']
AVAILABLE_PROTECTIONS = ['CooldownPeriod', 'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard'] AVAILABLE_PROTECTIONS = ['CooldownPeriod', 'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
AVAILABLE_DATAHANDLERS = ['json', 'jsongz', 'hdf5'] AVAILABLE_DATAHANDLERS = ['json', 'jsongz', 'hdf5']
BACKTEST_BREAKDOWNS = ['day', 'week', 'month'] BACKTEST_BREAKDOWNS = ['day', 'week', 'month']

View File

@ -48,8 +48,11 @@ class DataProvider:
self.__producer_pairs: Dict[str, List[str]] = {} self.__producer_pairs: Dict[str, List[str]] = {}
self._msg_queue: deque = deque() self._msg_queue: deque = deque()
self._default_candle_type = self._config.get('candle_type_def', CandleType.SPOT)
self._default_timeframe = self._config.get('timeframe', '1h')
self.__msg_cache = PeriodicCache( self.__msg_cache = PeriodicCache(
maxsize=1000, ttl=timeframe_to_seconds(self._config.get('timeframe', '1h'))) maxsize=1000, ttl=timeframe_to_seconds(self._default_timeframe))
self._num_sources = len( self._num_sources = len(
self._config.get('external_message_consumer', {}).get('producers', []) self._config.get('external_message_consumer', {}).get('producers', [])
@ -84,7 +87,7 @@ class DataProvider:
dataframe, datetime.now(timezone.utc)) dataframe, datetime.now(timezone.utc))
# For multiple producers we will want to merge the pairlists instead of overwriting # For multiple producers we will want to merge the pairlists instead of overwriting
def set_producer_pairs(self, pairlist: List[str], producer_name: str = "default"): def _set_producer_pairs(self, pairlist: List[str], producer_name: str = "default"):
""" """
Set the pairs received to later be used. Set the pairs received to later be used.
This only supports 1 Producer right now. This only supports 1 Producer right now.
@ -101,7 +104,7 @@ class DataProvider:
""" """
return self.__producer_pairs.get(producer_name, []) return self.__producer_pairs.get(producer_name, [])
def emit_df( def _emit_df(
self, self,
pair_key: PairWithTimeframe, pair_key: PairWithTimeframe,
dataframe: DataFrame dataframe: DataFrame
@ -123,12 +126,12 @@ class DataProvider:
} }
) )
def add_external_df( def _add_external_df(
self, self,
pair: str, pair: str,
timeframe: str,
dataframe: DataFrame, dataframe: DataFrame,
candle_type: CandleType, timeframe: Optional[str] = None,
candle_type: Optional[CandleType] = None,
producer_name: str = "default" producer_name: str = "default"
) -> None: ) -> None:
""" """
@ -138,18 +141,22 @@ class DataProvider:
:param timeframe: Timeframe to get data for :param timeframe: Timeframe to get data for
:param candle_type: Any of the enum CandleType (must match trading mode!) :param candle_type: Any of the enum CandleType (must match trading mode!)
""" """
pair_key = (pair, timeframe, candle_type) _timeframe = self._default_timeframe if not timeframe else timeframe
_candle_type = self._default_candle_type if not candle_type else candle_type
pair_key = (pair, _timeframe, _candle_type)
if producer_name not in self.__producer_pairs_df: if producer_name not in self.__producer_pairs_df:
self.__producer_pairs_df[producer_name] = {} self.__producer_pairs_df[producer_name] = {}
self.__producer_pairs_df[producer_name][pair_key] = (dataframe, datetime.now(timezone.utc)) self.__producer_pairs_df[producer_name][pair_key] = (dataframe, datetime.now(timezone.utc))
logger.debug(f"External DataFrame for {pair_key} from {producer_name} added.")
def get_external_df( def get_external_df(
self, self,
pair: str, pair: str,
timeframe: str, timeframe: Optional[str] = None,
candle_type: CandleType, candle_type: Optional[CandleType] = None,
producer_name: str = "default" producer_name: str = "default"
) -> Tuple[DataFrame, datetime]: ) -> Tuple[DataFrame, datetime]:
""" """
@ -160,16 +167,22 @@ class DataProvider:
:param timeframe: Timeframe to get data for :param timeframe: Timeframe to get data for
:param candle_type: Any of the enum CandleType (must match trading mode!) :param candle_type: Any of the enum CandleType (must match trading mode!)
""" """
pair_key = (pair, timeframe, candle_type) _timeframe = self._default_timeframe if not timeframe else timeframe
_candle_type = self._default_candle_type if not candle_type else candle_type
pair_key = (pair, _timeframe, _candle_type)
# If we have no data from this Producer yet
if producer_name not in self.__producer_pairs_df: if producer_name not in self.__producer_pairs_df:
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970) # We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc)) return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
# If we do have data from that Producer, but no data on this pair_key
if pair_key not in self.__producer_pairs_df[producer_name]: if pair_key not in self.__producer_pairs_df[producer_name]:
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970) # We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc)) return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
# We have it, return this data
return self.__producer_pairs_df[producer_name][pair_key] return self.__producer_pairs_df[producer_name][pair_key]
def add_pairlisthandler(self, pairlists) -> None: def add_pairlisthandler(self, pairlists) -> None:

View File

@ -199,10 +199,6 @@ class FreqtradeBot(LoggingMixin):
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)() strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)()
# This just means we won't broadcast dataframes if we're listening to a producer
# Doesn't necessarily NEED to be this way, as maybe we'd like to broadcast
# even if we are using external dataframes in the future.
self.strategy.analyze(self.active_pair_whitelist) self.strategy.analyze(self.active_pair_whitelist)
with self._exit_lock: with self._exit_lock:

View File

@ -50,6 +50,7 @@ def get_user_from_token(token, secret_key: str, token_type: str = "access"):
# This should be reimplemented to better realign with the existing tools provided # This should be reimplemented to better realign with the existing tools provided
# by FastAPI regarding API Tokens # by FastAPI regarding API Tokens
# https://github.com/tiangolo/fastapi/blob/master/fastapi/security/api_key.py
async def get_ws_token( async def get_ws_token(
ws: WebSocket, ws: WebSocket,
token: Union[str, None] = None, token: Union[str, None] = None,

View File

@ -2,23 +2,30 @@ import logging
from typing import Any, Dict from typing import Any, Dict
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
# fastapi does not make this available through it, so import directly from starlette
from starlette.websockets import WebSocketState
from freqtrade.enums import RPCMessageType, RPCRequestType from freqtrade.enums import RPCMessageType, RPCRequestType
from freqtrade.rpc.api_server.deps import get_channel_manager, get_rpc from freqtrade.rpc.api_server.deps import get_channel_manager, get_rpc
from freqtrade.rpc.api_server.ws.channel import WebSocketChannel from freqtrade.rpc.api_server.ws.channel import WebSocketChannel
from freqtrade.rpc.api_server.ws.utils import is_websocket_alive
from freqtrade.rpc.rpc import RPC from freqtrade.rpc.rpc import RPC
# from typing import Any, Dict
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Private router, protected by API Key authentication # Private router, protected by API Key authentication
router = APIRouter() router = APIRouter()
async def is_websocket_alive(ws: WebSocket) -> bool:
if (
ws.application_state == WebSocketState.CONNECTED and
ws.client_state == WebSocketState.CONNECTED
):
return True
return False
async def _process_consumer_request( async def _process_consumer_request(
request: Dict[str, Any], request: Dict[str, Any],
channel: WebSocketChannel, channel: WebSocketChannel,

View File

@ -205,7 +205,7 @@ class ApiServer(RPCHandler):
# For testing, shouldn't happen when stable # For testing, shouldn't happen when stable
except Exception as e: except Exception as e:
logger.info(f"Exception happened in background task: {e}") logger.exception(f"Exception happened in background task: {e}")
def start_api(self): def start_api(self):
""" """
@ -244,7 +244,6 @@ class ApiServer(RPCHandler):
if self._standalone: if self._standalone:
self._server.run() self._server.run()
else: else:
if self._config.get('api_server', {}).get('enable_message_ws', False):
self.start_message_queue() self.start_message_queue()
self._server.run_in_thread() self._server.run_in_thread()
except Exception: except Exception:

View File

@ -289,7 +289,7 @@ class ExternalMessageConsumer:
return return
# Add the pairlist data to the DataProvider # Add the pairlist data to the DataProvider
self._dp.set_producer_pairs(message_data, producer_name=producer_name) self._dp._set_producer_pairs(message_data, producer_name=producer_name)
logger.debug(f"Consumed message from {producer_name} of type RPCMessageType.WHITELIST") logger.debug(f"Consumed message from {producer_name} of type RPCMessageType.WHITELIST")
@ -309,7 +309,7 @@ class ExternalMessageConsumer:
dataframe = remove_entry_exit_signals(dataframe) dataframe = remove_entry_exit_signals(dataframe)
# Add the dataframe to the dataprovider # Add the dataframe to the dataprovider
self._dp.add_external_df(pair, timeframe, dataframe, self._dp._add_external_df(pair, dataframe, timeframe,
candle_type, producer_name=producer_name) candle_type, producer_name=producer_name)
logger.debug( logger.debug(

View File

@ -700,8 +700,7 @@ class IStrategy(ABC, HyperStrategyMixin):
def _analyze_ticker_internal( def _analyze_ticker_internal(
self, self,
dataframe: DataFrame, dataframe: DataFrame,
metadata: dict, metadata: dict
emit_df: bool = False
) -> DataFrame: ) -> DataFrame:
""" """
Parses the given candle (OHLCV) data and returns a populated DataFrame Parses the given candle (OHLCV) data and returns a populated DataFrame
@ -725,7 +724,7 @@ class IStrategy(ABC, HyperStrategyMixin):
candle_type = self.config.get('candle_type_def', CandleType.SPOT) candle_type = self.config.get('candle_type_def', CandleType.SPOT)
self.dp._set_cached_df(pair, self.timeframe, dataframe, candle_type=candle_type) self.dp._set_cached_df(pair, self.timeframe, dataframe, candle_type=candle_type)
self.dp.emit_df((pair, self.timeframe, candle_type), dataframe) self.dp._emit_df((pair, self.timeframe, candle_type), dataframe)
else: else:
logger.debug("Skipping TA Analysis for already analyzed candle") logger.debug("Skipping TA Analysis for already analyzed candle")
@ -737,8 +736,7 @@ class IStrategy(ABC, HyperStrategyMixin):
def analyze_pair( def analyze_pair(
self, self,
pair: str, pair: str
emit_df: bool = False
) -> None: ) -> None:
""" """
Fetch data for this pair from dataprovider and analyze. Fetch data for this pair from dataprovider and analyze.
@ -759,7 +757,7 @@ class IStrategy(ABC, HyperStrategyMixin):
dataframe = strategy_safe_wrapper( dataframe = strategy_safe_wrapper(
self._analyze_ticker_internal, message="" self._analyze_ticker_internal, message=""
)(dataframe, {'pair': pair}, emit_df) )(dataframe, {'pair': pair})
self.assert_df(dataframe, df_len, df_close, df_date) self.assert_df(dataframe, df_len, df_close, df_date)
except StrategyError as error: except StrategyError as error:
@ -772,15 +770,14 @@ class IStrategy(ABC, HyperStrategyMixin):
def analyze( def analyze(
self, self,
pairs: List[str], pairs: List[str]
emit_df: bool = False
) -> None: ) -> None:
""" """
Analyze all pairs using analyze_pair(). Analyze all pairs using analyze_pair().
:param pairs: List of pairs to analyze :param pairs: List of pairs to analyze
""" """
for pair in pairs: for pair in pairs:
self.analyze_pair(pair, emit_df) self.analyze_pair(pair)
@ staticmethod @ staticmethod
def preserve_df(dataframe: DataFrame) -> Tuple[int, float, datetime]: def preserve_df(dataframe: DataFrame) -> Tuple[int, float, datetime]: