split initial data into separate requests
This commit is contained in:
parent
dccde88c83
commit
eb4cd6ba82
@ -33,4 +33,6 @@ class RPCMessageType(str, Enum):
|
|||||||
# Enum for parsing requests from ws consumers
|
# Enum for parsing requests from ws consumers
|
||||||
class RPCRequestType(str, Enum):
|
class RPCRequestType(str, Enum):
|
||||||
SUBSCRIBE = 'subscribe'
|
SUBSCRIBE = 'subscribe'
|
||||||
INITIAL_DATA = 'initial_data'
|
|
||||||
|
WHITELIST = 'whitelist'
|
||||||
|
ANALYZED_DF = 'analyzed_df'
|
||||||
|
@ -33,7 +33,7 @@ async def _process_consumer_request(
|
|||||||
return
|
return
|
||||||
|
|
||||||
if not isinstance(data, list):
|
if not isinstance(data, list):
|
||||||
logger.error(f"Improper request from channel: {channel} - {request}")
|
logger.error(f"Improper subscribe request from channel: {channel} - {request}")
|
||||||
return
|
return
|
||||||
|
|
||||||
# If all topics passed are a valid RPCMessageType, set subscriptions on channel
|
# If all topics passed are a valid RPCMessageType, set subscriptions on channel
|
||||||
@ -42,18 +42,18 @@ async def _process_consumer_request(
|
|||||||
logger.debug(f"{channel} subscribed to topics: {data}")
|
logger.debug(f"{channel} subscribed to topics: {data}")
|
||||||
channel.set_subscriptions(data)
|
channel.set_subscriptions(data)
|
||||||
|
|
||||||
elif type == RPCRequestType.INITIAL_DATA:
|
elif type == RPCRequestType.WHITELIST:
|
||||||
# Acquire the data
|
# They requested the whitelist
|
||||||
initial_data = rpc._ws_initial_data()
|
whitelist = rpc._ws_request_whitelist()
|
||||||
|
|
||||||
# We now loop over it sending it in pieces
|
await channel.send({"type": RPCMessageType.WHITELIST, "data": whitelist})
|
||||||
whitelist_data, analyzed_df = initial_data.get('whitelist'), initial_data.get('analyzed_df')
|
|
||||||
|
|
||||||
if whitelist_data:
|
elif type == RPCRequestType.ANALYZED_DF:
|
||||||
await channel.send({"type": RPCMessageType.WHITELIST, "data": whitelist_data})
|
# They requested the full historical analyzed dataframes
|
||||||
|
analyzed_df = rpc._ws_request_analyzed_df()
|
||||||
|
|
||||||
if analyzed_df:
|
# For every dataframe, send as a separate message
|
||||||
for pair, message in analyzed_df.items():
|
for _, message in analyzed_df.items():
|
||||||
await channel.send({"type": RPCMessageType.ANALYZED_DF, "data": message})
|
await channel.send({"type": RPCMessageType.ANALYZED_DF, "data": message})
|
||||||
|
|
||||||
|
|
||||||
|
@ -158,10 +158,12 @@ class ExternalMessageConsumer:
|
|||||||
self.compose_consumer_request(RPCRequestType.SUBSCRIBE, self.topics)
|
self.compose_consumer_request(RPCRequestType.SUBSCRIBE, self.topics)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Now request the initial data from this Producer
|
# Now request the initial data from this Producer for every topic
|
||||||
await channel.send(
|
# we've subscribed to
|
||||||
self.compose_consumer_request(RPCRequestType.INITIAL_DATA)
|
for topic in self.topics:
|
||||||
)
|
# without .upper() we get KeyError
|
||||||
|
request_type = RPCRequestType[topic.upper()]
|
||||||
|
await channel.send(self.compose_consumer_request(request_type))
|
||||||
|
|
||||||
# Now receive data, if none is within the time limit, ping
|
# Now receive data, if none is within the time limit, ping
|
||||||
while True:
|
while True:
|
||||||
@ -191,9 +193,12 @@ class ExternalMessageConsumer:
|
|||||||
await asyncio.sleep(self.sleep_time)
|
await asyncio.sleep(self.sleep_time)
|
||||||
|
|
||||||
break
|
break
|
||||||
except Exception as e:
|
|
||||||
logger.exception(e)
|
# Catch invalid ws_url, and break the loop
|
||||||
continue
|
except websockets.exceptions.InvalidURI as e:
|
||||||
|
logger.error(f"{ws_url} is an invalid WebSocket URL - {e}")
|
||||||
|
break
|
||||||
|
|
||||||
except (
|
except (
|
||||||
socket.gaierror,
|
socket.gaierror,
|
||||||
ConnectionRefusedError,
|
ConnectionRefusedError,
|
||||||
@ -204,9 +209,9 @@ class ExternalMessageConsumer:
|
|||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Catch invalid ws_url, and break the loop
|
except Exception as e:
|
||||||
except websockets.exceptions.InvalidURI as e:
|
# An unforseen error has occurred, log and stop
|
||||||
logger.error(f"{ws_url} is an invalid WebSocket URL - {e}")
|
logger.exception(e)
|
||||||
break
|
break
|
||||||
|
|
||||||
def compose_consumer_request(
|
def compose_consumer_request(
|
||||||
|
@ -1068,13 +1068,14 @@ class RPC:
|
|||||||
|
|
||||||
return _data
|
return _data
|
||||||
|
|
||||||
def _ws_initial_data(self):
|
def _ws_request_analyzed_df(self):
|
||||||
""" Websocket friendly initial data, whitelists and all analyzed dataframes """
|
""" Historical Analyzed Dataframes for WebSocket """
|
||||||
whitelist = self._freqtrade.active_pair_whitelist
|
whitelist = self._freqtrade.active_pair_whitelist
|
||||||
# We only get the last 500 candles, should we remove the limit?
|
return self._ws_all_analysed_dataframes(whitelist, 500)
|
||||||
analyzed_df = self._ws_all_analysed_dataframes(whitelist, 500)
|
|
||||||
|
|
||||||
return {"whitelist": whitelist, "analyzed_df": analyzed_df}
|
def _ws_request_whitelist(self):
|
||||||
|
""" Whitelist data for WebSocket """
|
||||||
|
return self._freqtrade.active_pair_whitelist
|
||||||
|
|
||||||
@ staticmethod
|
@ staticmethod
|
||||||
def _rpc_analysed_history_full(config, pair: str, timeframe: str,
|
def _rpc_analysed_history_full(config, pair: str, timeframe: str,
|
||||||
|
Loading…
Reference in New Issue
Block a user