Merge branch 'freqtrade:develop' into develop
This commit is contained in:
commit
efa321bf4f
@ -17,7 +17,7 @@ repos:
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.28.11.2
|
||||
- types-tabulate==0.9.0.0
|
||||
- types-python-dateutil==2.8.19.1
|
||||
- types-python-dateutil==2.8.19.2
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
|
@ -53,7 +53,7 @@
|
||||
"XTZ/BTC"
|
||||
],
|
||||
"pair_blacklist": [
|
||||
"BNB/BTC"
|
||||
"BNB/.*"
|
||||
]
|
||||
},
|
||||
"pairlists": [
|
||||
|
@ -78,6 +78,8 @@ This function needs to return a floating point number (`float`). Smaller numbers
|
||||
To override a pre-defined space (`roi_space`, `generate_roi_table`, `stoploss_space`, `trailing_space`), define a nested class called Hyperopt and define the required spaces as follows:
|
||||
|
||||
```python
|
||||
from freqtrade.optimize.space import Categorical, Dimension, Integer, SKDecimal
|
||||
|
||||
class MyAwesomeStrategy(IStrategy):
|
||||
class HyperOpt:
|
||||
# Define a custom stoploss space.
|
||||
@ -94,6 +96,33 @@ class MyAwesomeStrategy(IStrategy):
|
||||
SKDecimal(0.01, 0.07, decimals=3, name='roi_p2'),
|
||||
SKDecimal(0.01, 0.20, decimals=3, name='roi_p3'),
|
||||
]
|
||||
|
||||
def generate_roi_table(params: Dict) -> Dict[int, float]:
|
||||
|
||||
roi_table = {}
|
||||
roi_table[0] = params['roi_p1'] + params['roi_p2'] + params['roi_p3']
|
||||
roi_table[params['roi_t3']] = params['roi_p1'] + params['roi_p2']
|
||||
roi_table[params['roi_t3'] + params['roi_t2']] = params['roi_p1']
|
||||
roi_table[params['roi_t3'] + params['roi_t2'] + params['roi_t1']] = 0
|
||||
|
||||
return roi_table
|
||||
|
||||
def trailing_space() -> List[Dimension]:
|
||||
# All parameters here are mandatory, you can only modify their type or the range.
|
||||
return [
|
||||
# Fixed to true, if optimizing trailing_stop we assume to use trailing stop at all times.
|
||||
Categorical([True], name='trailing_stop'),
|
||||
|
||||
SKDecimal(0.01, 0.35, decimals=3, name='trailing_stop_positive'),
|
||||
# 'trailing_stop_positive_offset' should be greater than 'trailing_stop_positive',
|
||||
# so this intermediate parameter is used as the value of the difference between
|
||||
# them. The value of the 'trailing_stop_positive_offset' is constructed in the
|
||||
# generate_trailing_params() method.
|
||||
# This is similar to the hyperspace dimensions used for constructing the ROI tables.
|
||||
SKDecimal(0.001, 0.1, decimals=3, name='trailing_stop_positive_offset_p1'),
|
||||
|
||||
Categorical([True, False], name='trailing_only_offset_is_reached'),
|
||||
]
|
||||
```
|
||||
|
||||
!!! Note
|
||||
|
@ -522,13 +522,13 @@ Since backtesting lacks some detailed information about what happens within a ca
|
||||
- ROI
|
||||
- exits are compared to high - but the ROI value is used (e.g. ROI = 2%, high=5% - so the exit will be at 2%)
|
||||
- exits are never "below the candle", so a ROI of 2% may result in a exit at 2.4% if low was at 2.4% profit
|
||||
- Forceexits caused by `<N>=-1` ROI entries use low as exit value, unless N falls on the candle open (e.g. `120: -1` for 1h candles)
|
||||
- Force-exits caused by `<N>=-1` ROI entries use low as exit value, unless N falls on the candle open (e.g. `120: -1` for 1h candles)
|
||||
- Stoploss exits happen exactly at stoploss price, even if low was lower, but the loss will be `2 * fees` higher than the stoploss price
|
||||
- Stoploss is evaluated before ROI within one candle. So you can often see more trades with the `stoploss` exit reason comparing to the results obtained with the same strategy in the Dry Run/Live Trade modes
|
||||
- Low happens before high for stoploss, protecting capital first
|
||||
- Trailing stoploss
|
||||
- Trailing Stoploss is only adjusted if it's below the candle's low (otherwise it would be triggered)
|
||||
- On trade entry candles that trigger trailing stoploss, the "minimum offset" (`stop_positive_offset`) is assumed (instead of high) - and the stop is calculated from this point
|
||||
- On trade entry candles that trigger trailing stoploss, the "minimum offset" (`stop_positive_offset`) is assumed (instead of high) - and the stop is calculated from this point. This rule is NOT applicable to custom-stoploss scenarios, since there's no information about the stoploss logic available.
|
||||
- High happens first - adjusting stoploss
|
||||
- Low uses the adjusted stoploss (so exits with large high-low difference are backtested correctly)
|
||||
- ROI applies before trailing-stop, ensuring profits are "top-capped" at ROI if both ROI and trailing stop applies
|
||||
|
@ -102,6 +102,12 @@ If this happens for all pairs in the pairlist, this might indicate a recent exch
|
||||
|
||||
Irrespectively of the reason, Freqtrade will fill up these candles with "empty" candles, where open, high, low and close are set to the previous candle close - and volume is empty. In a chart, this will look like a `_` - and is aligned with how exchanges usually represent 0 volume candles.
|
||||
|
||||
### I'm getting "Price jump between 2 candles detected"
|
||||
|
||||
This message is a warning that the candles had a price jump of > 30%.
|
||||
This might be a sign that the pair stopped trading, and some token exchange took place (e.g. COCOS in 2021 - where price jumped from 0.0000154 to 0.01621).
|
||||
This message is often accompanied by ["Missing data fillup"](#im-getting-missing-data-fillup-messages-in-the-log) - as trading on such pairs is often stopped for some time.
|
||||
|
||||
### I'm getting "Outdated history for pair xxx" in the log
|
||||
|
||||
The bot is trying to tell you that it got an outdated last candle (not the last complete candle).
|
||||
|
@ -161,9 +161,13 @@ You can indicate to the bot that it should not train models, but instead should
|
||||
|
||||
```json
|
||||
"freqai": {
|
||||
"enabled": true,
|
||||
"follow_mode": true,
|
||||
"identifier": "example"
|
||||
"identifier": "example",
|
||||
"feature_parameters": {
|
||||
// leader bots feature_parameters inserted here
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
In this example, the user has a leader bot with the `"identifier": "example"`. The leader bot is already running or is launched simultaneously with the follower. The follower will load models created by the leader and inference them to obtain predictions instead of training its own models.
|
||||
In this example, the user has a leader bot with the `"identifier": "example"`. The leader bot is already running or is launched simultaneously with the follower. The follower will load models created by the leader and inference them to obtain predictions instead of training its own models. The user will also need to duplicate the `feature_parameters` parameters from from the leaders freqai configuration file into the freqai section of the followers config.
|
||||
|
@ -1,6 +1,6 @@
|
||||
markdown==3.3.7
|
||||
mkdocs==1.4.1
|
||||
mkdocs-material==8.5.6
|
||||
mkdocs-material==8.5.7
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==9.6
|
||||
pymdown-extensions==9.7
|
||||
jinja2==3.1.2
|
||||
|
@ -159,6 +159,7 @@ The stoploss price can only ever move upwards - if the stoploss value returned f
|
||||
|
||||
The method must return a stoploss value (float / number) as a percentage of the current price.
|
||||
E.g. If the `current_rate` is 200 USD, then returning `0.02` will set the stoploss price 2% lower, at 196 USD.
|
||||
During backtesting, `current_rate` (and `current_profit`) are provided against the candle's high (or low for short trades) - while the resulting stoploss is evaluated against the candle's low (or high for short trades).
|
||||
|
||||
The absolute value of the return value is used (the sign is ignored), so returning `0.05` or `-0.05` have the same result, a stoploss 5% below the current price.
|
||||
|
||||
|
@ -303,7 +303,7 @@ class IDataHandler(ABC):
|
||||
timerange=timerange_startup,
|
||||
candle_type=candle_type
|
||||
)
|
||||
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data):
|
||||
if self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data, True):
|
||||
return pairdf
|
||||
else:
|
||||
enddate = pairdf.iloc[-1]['date']
|
||||
@ -323,8 +323,9 @@ class IDataHandler(ABC):
|
||||
self._check_empty_df(pairdf, pair, timeframe, candle_type, warn_no_data)
|
||||
return pairdf
|
||||
|
||||
def _check_empty_df(self, pairdf: DataFrame, pair: str, timeframe: str,
|
||||
candle_type: CandleType, warn_no_data: bool):
|
||||
def _check_empty_df(
|
||||
self, pairdf: DataFrame, pair: str, timeframe: str, candle_type: CandleType,
|
||||
warn_no_data: bool, warn_price: bool = False) -> bool:
|
||||
"""
|
||||
Warn on empty dataframe
|
||||
"""
|
||||
@ -335,6 +336,20 @@ class IDataHandler(ABC):
|
||||
"Use `freqtrade download-data` to download the data"
|
||||
)
|
||||
return True
|
||||
elif warn_price:
|
||||
candle_price_gap = 0
|
||||
if (candle_type in (CandleType.SPOT, CandleType.FUTURES) and
|
||||
not pairdf.empty
|
||||
and 'close' in pairdf.columns and 'open' in pairdf.columns):
|
||||
# Detect gaps between prior close and open
|
||||
gaps = ((pairdf['open'] - pairdf['close'].shift(1)) / pairdf['close'].shift(1))
|
||||
gaps = gaps.dropna()
|
||||
if len(gaps):
|
||||
candle_price_gap = max(abs(gaps))
|
||||
if candle_price_gap > 0.1:
|
||||
logger.info(f"Price jump in {pair}, {timeframe}, {candle_type} between two candles "
|
||||
f"of {candle_price_gap:.2%} detected.")
|
||||
|
||||
return False
|
||||
|
||||
def _validate_pairdata(self, pair, pairdata: DataFrame, timeframe: str,
|
||||
|
@ -1996,9 +1996,9 @@ class Exchange:
|
||||
# Timeframe in seconds
|
||||
interval_in_sec = timeframe_to_seconds(timeframe)
|
||||
|
||||
return not (
|
||||
return (
|
||||
(self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0)
|
||||
+ interval_in_sec) >= arrow.utcnow().int_timestamp
|
||||
+ interval_in_sec) < arrow.utcnow().int_timestamp
|
||||
)
|
||||
|
||||
@retrier_async
|
||||
|
@ -210,7 +210,10 @@ class FreqaiDataKitchen:
|
||||
const_cols = list((filtered_df.nunique() == 1).loc[lambda x: x].index)
|
||||
if const_cols:
|
||||
filtered_df = filtered_df.filter(filtered_df.columns.difference(const_cols))
|
||||
self.data['constant_features_list'] = const_cols
|
||||
logger.warning(f"Removed features {const_cols} with constant values.")
|
||||
else:
|
||||
self.data['constant_features_list'] = []
|
||||
# we don't care about total row number (total no. datapoints) in training, we only care
|
||||
# about removing any row with NaNs
|
||||
# if labels has multiple columns (user wants to train multiple modelEs), we detect here
|
||||
@ -241,6 +244,7 @@ class FreqaiDataKitchen:
|
||||
self.data["filter_drop_index_training"] = drop_index
|
||||
|
||||
else:
|
||||
if len(self.data['constant_features_list']):
|
||||
filtered_df = self.check_pred_labels(filtered_df)
|
||||
# we are backtesting so we need to preserve row number to send back to strategy,
|
||||
# so now we use do_predict to avoid any prediction based on a NaN
|
||||
@ -464,17 +468,15 @@ class FreqaiDataKitchen:
|
||||
def check_pred_labels(self, df_predictions: DataFrame) -> DataFrame:
|
||||
"""
|
||||
Check that prediction feature labels match training feature labels.
|
||||
:params:
|
||||
:df_predictions: incoming predictions
|
||||
:param df_predictions: incoming predictions
|
||||
"""
|
||||
train_labels = self.data_dictionary["train_features"].columns
|
||||
pred_labels = df_predictions.columns
|
||||
num_diffs = len(pred_labels.difference(train_labels))
|
||||
if num_diffs != 0:
|
||||
df_predictions = df_predictions[train_labels]
|
||||
constant_labels = self.data['constant_features_list']
|
||||
df_predictions = df_predictions.filter(
|
||||
df_predictions.columns.difference(constant_labels)
|
||||
)
|
||||
logger.warning(
|
||||
f"Removed {num_diffs} features from prediction features, "
|
||||
f"these were likely considered constant values during most recent training."
|
||||
f"Removed {len(constant_labels)} features from prediction features, "
|
||||
f"these were considered constant values during most recent training."
|
||||
)
|
||||
|
||||
return df_predictions
|
||||
|
@ -26,8 +26,7 @@ class XGBoostRFClassifier(BaseClassifierModel):
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:params:
|
||||
:data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
"""
|
||||
|
||||
@ -65,7 +64,7 @@ class XGBoostRFClassifier(BaseClassifierModel):
|
||||
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param: unfiltered_df: Full dataframe for the current backtest period.
|
||||
:param unfiltered_df: Full dataframe for the current backtest period.
|
||||
:return:
|
||||
:pred_df: dataframe containing the predictions
|
||||
:do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove
|
||||
|
@ -29,6 +29,7 @@ class XGBoostRFRegressor(BaseRegressionModel):
|
||||
|
||||
if self.freqai_info.get("data_split_parameters", {}).get("test_size", 0.1) == 0:
|
||||
eval_set = None
|
||||
eval_weights = None
|
||||
else:
|
||||
eval_set = [(data_dictionary["test_features"], data_dictionary["test_labels"])]
|
||||
eval_weights = [data_dictionary['test_weights']]
|
||||
|
@ -29,6 +29,7 @@ class XGBoostRegressor(BaseRegressionModel):
|
||||
|
||||
if self.freqai_info.get("data_split_parameters", {}).get("test_size", 0.1) == 0:
|
||||
eval_set = None
|
||||
eval_weights = None
|
||||
else:
|
||||
eval_set = [(data_dictionary["test_features"], data_dictionary["test_labels"])]
|
||||
eval_weights = [data_dictionary['test_weights']]
|
||||
|
@ -1,4 +1,3 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
@ -11,6 +10,7 @@ from freqtrade.enums import RPCMessageType, RPCRequestType
|
||||
from freqtrade.rpc.api_server.api_auth import validate_ws_token
|
||||
from freqtrade.rpc.api_server.deps import get_channel_manager, get_rpc
|
||||
from freqtrade.rpc.api_server.ws import WebSocketChannel
|
||||
from freqtrade.rpc.api_server.ws.channel import ChannelManager
|
||||
from freqtrade.rpc.api_server.ws_schemas import (WSAnalyzedDFMessage, WSMessageSchema,
|
||||
WSRequestSchema, WSWhitelistMessage)
|
||||
from freqtrade.rpc.rpc import RPC
|
||||
@ -37,7 +37,8 @@ async def is_websocket_alive(ws: WebSocket) -> bool:
|
||||
async def _process_consumer_request(
|
||||
request: Dict[str, Any],
|
||||
channel: WebSocketChannel,
|
||||
rpc: RPC
|
||||
rpc: RPC,
|
||||
channel_manager: ChannelManager
|
||||
):
|
||||
"""
|
||||
Validate and handle a request from a websocket consumer
|
||||
@ -74,7 +75,7 @@ async def _process_consumer_request(
|
||||
# Format response
|
||||
response = WSWhitelistMessage(data=whitelist)
|
||||
# Send it back
|
||||
await channel.send(response.dict(exclude_none=True))
|
||||
await channel_manager.send_direct(channel, response.dict(exclude_none=True))
|
||||
|
||||
elif type == RPCRequestType.ANALYZED_DF:
|
||||
limit = None
|
||||
@ -89,9 +90,7 @@ async def _process_consumer_request(
|
||||
# For every dataframe, send as a separate message
|
||||
for _, message in analyzed_df.items():
|
||||
response = WSAnalyzedDFMessage(data=message)
|
||||
await channel.send(response.dict(exclude_none=True))
|
||||
# Throttle the messages to 50/s
|
||||
await asyncio.sleep(0.02)
|
||||
await channel_manager.send_direct(channel, response.dict(exclude_none=True))
|
||||
|
||||
|
||||
@router.websocket("/message/ws")
|
||||
@ -116,7 +115,7 @@ async def message_endpoint(
|
||||
request = await channel.recv()
|
||||
|
||||
# Process the request here
|
||||
await _process_consumer_request(request, channel, rpc)
|
||||
await _process_consumer_request(request, channel, rpc, channel_manager)
|
||||
|
||||
except (WebSocketDisconnect, WebSocketException):
|
||||
# Handle client disconnects
|
||||
|
@ -16,6 +16,7 @@ from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.rpc.api_server.uvicorn_threaded import UvicornServer
|
||||
from freqtrade.rpc.api_server.ws import ChannelManager
|
||||
from freqtrade.rpc.api_server.ws_schemas import WSMessageSchemaType
|
||||
from freqtrade.rpc.rpc import RPC, RPCException, RPCHandler
|
||||
|
||||
|
||||
@ -127,7 +128,7 @@ class ApiServer(RPCHandler):
|
||||
cls._has_rpc = False
|
||||
cls._rpc = None
|
||||
|
||||
def send_msg(self, msg: Dict[str, str]) -> None:
|
||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
||||
if self._ws_queue:
|
||||
sync_q = self._ws_queue.sync_q
|
||||
sync_q.put(msg)
|
||||
@ -194,14 +195,10 @@ class ApiServer(RPCHandler):
|
||||
while True:
|
||||
logger.debug("Getting queue messages...")
|
||||
# Get data from queue
|
||||
message = await async_queue.get()
|
||||
message: WSMessageSchemaType = await async_queue.get()
|
||||
logger.debug(f"Found message of type: {message.get('type')}")
|
||||
# Broadcast it
|
||||
await self._ws_channel_manager.broadcast(message)
|
||||
# Limit messages per sec.
|
||||
# Could cause problems with queue size if too low, and
|
||||
# problems with network traffik if too high.
|
||||
await asyncio.sleep(0.001)
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from threading import RLock
|
||||
from typing import Any, Dict, List, Optional, Type
|
||||
from typing import Any, Dict, List, Optional, Type, Union
|
||||
from uuid import uuid4
|
||||
|
||||
from fastapi import WebSocket as FastAPIWebSocket
|
||||
@ -10,6 +10,7 @@ from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy
|
||||
from freqtrade.rpc.api_server.ws.serializer import (HybridJSONWebSocketSerializer,
|
||||
WebSocketSerializer)
|
||||
from freqtrade.rpc.api_server.ws.types import WebSocketType
|
||||
from freqtrade.rpc.api_server.ws_schemas import WSMessageSchemaType
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -24,6 +25,8 @@ class WebSocketChannel:
|
||||
self,
|
||||
websocket: WebSocketType,
|
||||
channel_id: Optional[str] = None,
|
||||
drain_timeout: int = 3,
|
||||
throttle: float = 0.01,
|
||||
serializer_cls: Type[WebSocketSerializer] = HybridJSONWebSocketSerializer
|
||||
):
|
||||
|
||||
@ -34,7 +37,11 @@ class WebSocketChannel:
|
||||
# The Serializing class for the WebSocket object
|
||||
self._serializer_cls = serializer_cls
|
||||
|
||||
self.drain_timeout = drain_timeout
|
||||
self.throttle = throttle
|
||||
|
||||
self._subscriptions: List[str] = []
|
||||
# 32 is the size of the receiving queue in websockets package
|
||||
self.queue: asyncio.Queue[Dict[str, Any]] = asyncio.Queue(maxsize=32)
|
||||
self._relay_task = asyncio.create_task(self.relay())
|
||||
|
||||
@ -47,6 +54,10 @@ class WebSocketChannel:
|
||||
def __repr__(self):
|
||||
return f"WebSocketChannel({self.channel_id}, {self.remote_addr})"
|
||||
|
||||
@property
|
||||
def raw_websocket(self):
|
||||
return self._websocket.raw_websocket
|
||||
|
||||
@property
|
||||
def remote_addr(self):
|
||||
return self._websocket.remote_addr
|
||||
@ -57,11 +68,19 @@ class WebSocketChannel:
|
||||
"""
|
||||
await self._wrapped_ws.send(data)
|
||||
|
||||
async def send(self, data):
|
||||
async def send(self, data) -> bool:
|
||||
"""
|
||||
Add the data to the queue to be sent
|
||||
Add the data to the queue to be sent.
|
||||
:returns: True if data added to queue, False otherwise
|
||||
"""
|
||||
self.queue.put_nowait(data)
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
self.queue.put(data),
|
||||
timeout=self.drain_timeout
|
||||
)
|
||||
return True
|
||||
except asyncio.TimeoutError:
|
||||
return False
|
||||
|
||||
async def recv(self):
|
||||
"""
|
||||
@ -119,8 +138,8 @@ class WebSocketChannel:
|
||||
# Limit messages per sec.
|
||||
# Could cause problems with queue size if too low, and
|
||||
# problems with network traffik if too high.
|
||||
# 0.001 = 1000/s
|
||||
await asyncio.sleep(0.001)
|
||||
# 0.01 = 100/s
|
||||
await asyncio.sleep(self.throttle)
|
||||
except RuntimeError:
|
||||
# The connection was closed, just exit the task
|
||||
return
|
||||
@ -160,6 +179,7 @@ class ChannelManager:
|
||||
with self._lock:
|
||||
channel = self.channels.get(websocket)
|
||||
if channel:
|
||||
logger.info(f"Disconnecting channel {channel}")
|
||||
if not channel.is_closed():
|
||||
await channel.close()
|
||||
|
||||
@ -170,36 +190,30 @@ class ChannelManager:
|
||||
Disconnect all Channels
|
||||
"""
|
||||
with self._lock:
|
||||
for websocket, channel in self.channels.copy().items():
|
||||
if not channel.is_closed():
|
||||
await channel.close()
|
||||
|
||||
self.channels = dict()
|
||||
|
||||
async def broadcast(self, data):
|
||||
"""
|
||||
Broadcast data on all Channels
|
||||
|
||||
:param data: The data to send
|
||||
"""
|
||||
with self._lock:
|
||||
message_type = data.get('type')
|
||||
for websocket, channel in self.channels.copy().items():
|
||||
if channel.subscribed_to(message_type):
|
||||
if not channel.queue.full():
|
||||
await channel.send(data)
|
||||
else:
|
||||
logger.info(f"Channel {channel} is too far behind, disconnecting")
|
||||
for websocket in self.channels.copy().keys():
|
||||
await self.on_disconnect(websocket)
|
||||
|
||||
async def send_direct(self, channel, data):
|
||||
async def broadcast(self, message: WSMessageSchemaType):
|
||||
"""
|
||||
Send data directly through direct_channel only
|
||||
Broadcast a message on all Channels
|
||||
|
||||
:param direct_channel: The WebSocketChannel object to send data through
|
||||
:param data: The data to send
|
||||
:param message: The message to send
|
||||
"""
|
||||
await channel.send(data)
|
||||
with self._lock:
|
||||
for channel in self.channels.copy().values():
|
||||
if channel.subscribed_to(message.get('type')):
|
||||
await self.send_direct(channel, message)
|
||||
|
||||
async def send_direct(
|
||||
self, channel: WebSocketChannel, message: Union[WSMessageSchemaType, Dict[str, Any]]):
|
||||
"""
|
||||
Send a message directly through direct_channel only
|
||||
|
||||
:param direct_channel: The WebSocketChannel object to send the message through
|
||||
:param message: The message to send
|
||||
"""
|
||||
if not await channel.send(message):
|
||||
await self.on_disconnect(channel.raw_websocket)
|
||||
|
||||
def has_channels(self):
|
||||
"""
|
||||
|
@ -15,6 +15,10 @@ class WebSocketProxy:
|
||||
def __init__(self, websocket: WebSocketType):
|
||||
self._websocket: Union[FastAPIWebSocket, WebSocket] = websocket
|
||||
|
||||
@property
|
||||
def raw_websocket(self):
|
||||
return self._websocket
|
||||
|
||||
@property
|
||||
def remote_addr(self) -> Tuple[Any, ...]:
|
||||
if isinstance(self._websocket, WebSocket):
|
||||
|
@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Dict, List, Optional, TypedDict
|
||||
|
||||
from pandas import DataFrame
|
||||
from pydantic import BaseModel
|
||||
@ -18,6 +18,12 @@ class WSRequestSchema(BaseArbitraryModel):
|
||||
data: Optional[Any] = None
|
||||
|
||||
|
||||
class WSMessageSchemaType(TypedDict):
|
||||
# Type for typing to avoid doing pydantic typechecks.
|
||||
type: RPCMessageType
|
||||
data: Optional[Dict[str, Any]]
|
||||
|
||||
|
||||
class WSMessageSchema(BaseArbitraryModel):
|
||||
type: RPCMessageType
|
||||
data: Optional[Any] = None
|
||||
|
@ -270,6 +270,11 @@ class ExternalMessageConsumer:
|
||||
logger.debug(f"Connection to {channel} still alive...")
|
||||
|
||||
continue
|
||||
except (websockets.exceptions.ConnectionClosed):
|
||||
# Just eat the error and continue reconnecting
|
||||
logger.warning(f"Disconnection in {channel} - retrying in {self.sleep_time}s")
|
||||
await asyncio.sleep(self.sleep_time)
|
||||
break
|
||||
except Exception as e:
|
||||
logger.warning(f"Ping error {channel} - retrying in {self.sleep_time}s")
|
||||
logger.debug(e, exc_info=e)
|
||||
|
@ -1072,26 +1072,26 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
trade.stop_loss > (high or current_rate)
|
||||
)
|
||||
|
||||
# Make sure current_profit is calculated using high for backtesting.
|
||||
bound = (low if trade.is_short else high)
|
||||
bound_profit = current_profit if not bound else trade.calc_profit_ratio(bound)
|
||||
if self.use_custom_stoploss and dir_correct:
|
||||
stop_loss_value = strategy_safe_wrapper(self.custom_stoploss, default_retval=None
|
||||
)(pair=trade.pair, trade=trade,
|
||||
current_time=current_time,
|
||||
current_rate=current_rate,
|
||||
current_profit=current_profit)
|
||||
current_rate=(bound or current_rate),
|
||||
current_profit=bound_profit)
|
||||
# Sanity check - error cases will return None
|
||||
if stop_loss_value:
|
||||
# logger.info(f"{trade.pair} {stop_loss_value=} {current_profit=}")
|
||||
trade.adjust_stop_loss(current_rate, stop_loss_value)
|
||||
# logger.info(f"{trade.pair} {stop_loss_value=} {bound_profit=}")
|
||||
trade.adjust_stop_loss(bound or current_rate, stop_loss_value)
|
||||
else:
|
||||
logger.warning("CustomStoploss function did not return valid stoploss")
|
||||
|
||||
if self.trailing_stop and dir_correct:
|
||||
# trailing stoploss handling
|
||||
sl_offset = self.trailing_stop_positive_offset
|
||||
|
||||
# Make sure current_profit is calculated using high for backtesting.
|
||||
bound = low if trade.is_short else high
|
||||
bound_profit = current_profit if not bound else trade.calc_profit_ratio(bound)
|
||||
|
||||
# Don't update stoploss if trailing_only_offset_is_reached is true.
|
||||
if not (self.trailing_only_offset_is_reached and bound_profit < sl_offset):
|
||||
|
@ -14,6 +14,7 @@ from freqtrade.configuration import Configuration
|
||||
from freqtrade.constants import PROCESS_THROTTLE_SECS, RETRY_TIMEOUT, Config
|
||||
from freqtrade.enums import State
|
||||
from freqtrade.exceptions import OperationalException, TemporaryError
|
||||
from freqtrade.exchange import timeframe_to_next_date
|
||||
from freqtrade.freqtradebot import FreqtradeBot
|
||||
|
||||
|
||||
@ -35,7 +36,6 @@ class Worker:
|
||||
self._config = config
|
||||
self._init(False)
|
||||
|
||||
self.last_throttle_start_time: float = 0
|
||||
self._heartbeat_msg: float = 0
|
||||
|
||||
# Tell systemd that we completed initialization phase
|
||||
@ -112,7 +112,10 @@ class Worker:
|
||||
# Ping systemd watchdog before throttling
|
||||
self._notify("WATCHDOG=1\nSTATUS=State: RUNNING.")
|
||||
|
||||
self._throttle(func=self._process_running, throttle_secs=self._throttle_secs)
|
||||
# Use an offset of 1s to ensure a new candle has been issued
|
||||
self._throttle(func=self._process_running, throttle_secs=self._throttle_secs,
|
||||
timeframe=self._config['timeframe'] if self._config else None,
|
||||
timeframe_offset=1)
|
||||
|
||||
if self._heartbeat_interval:
|
||||
now = time.time()
|
||||
@ -127,24 +130,42 @@ class Worker:
|
||||
|
||||
return state
|
||||
|
||||
def _throttle(self, func: Callable[..., Any], throttle_secs: float, *args, **kwargs) -> Any:
|
||||
def _throttle(self, func: Callable[..., Any], throttle_secs: float,
|
||||
timeframe: Optional[str] = None, timeframe_offset: float = 1.0,
|
||||
*args, **kwargs) -> Any:
|
||||
"""
|
||||
Throttles the given callable that it
|
||||
takes at least `min_secs` to finish execution.
|
||||
:param func: Any callable
|
||||
:param throttle_secs: throttling interation execution time limit in seconds
|
||||
:param timeframe: ensure iteration is executed at the beginning of the next candle.
|
||||
:param timeframe_offset: offset in seconds to apply to the next candle time.
|
||||
:return: Any (result of execution of func)
|
||||
"""
|
||||
self.last_throttle_start_time = time.time()
|
||||
last_throttle_start_time = time.time()
|
||||
logger.debug("========================================")
|
||||
result = func(*args, **kwargs)
|
||||
time_passed = time.time() - self.last_throttle_start_time
|
||||
sleep_duration = max(throttle_secs - time_passed, 0.0)
|
||||
time_passed = time.time() - last_throttle_start_time
|
||||
sleep_duration = throttle_secs - time_passed
|
||||
if timeframe:
|
||||
next_tf = timeframe_to_next_date(timeframe)
|
||||
# Maximum throttling should be until new candle arrives
|
||||
# Offset of 0.2s is added to ensure a new candle has been issued.
|
||||
next_tf_with_offset = next_tf.timestamp() - time.time() + timeframe_offset
|
||||
sleep_duration = min(sleep_duration, next_tf_with_offset)
|
||||
sleep_duration = max(sleep_duration, 0.0)
|
||||
# next_iter = datetime.now(timezone.utc) + timedelta(seconds=sleep_duration)
|
||||
|
||||
logger.debug(f"Throttling with '{func.__name__}()': sleep for {sleep_duration:.2f} s, "
|
||||
f"last iteration took {time_passed:.2f} s.")
|
||||
time.sleep(sleep_duration)
|
||||
self._sleep(sleep_duration)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _sleep(sleep_duration: float) -> None:
|
||||
"""Local sleep method - to improve testability"""
|
||||
time.sleep(sleep_duration)
|
||||
|
||||
def _process_stopped(self) -> None:
|
||||
self.freqtrade.process_stopped()
|
||||
|
||||
|
@ -11,7 +11,7 @@ flake8-tidy-imports==4.8.0
|
||||
mypy==0.982
|
||||
pre-commit==2.20.0
|
||||
pytest==7.1.3
|
||||
pytest-asyncio==0.19.0
|
||||
pytest-asyncio==0.20.1
|
||||
pytest-cov==4.0.0
|
||||
pytest-mock==3.10.0
|
||||
pytest-random-order==1.0.4
|
||||
@ -27,4 +27,4 @@ types-cachetools==5.2.1
|
||||
types-filelock==3.2.7
|
||||
types-requests==2.28.11.2
|
||||
types-tabulate==0.9.0.0
|
||||
types-python-dateutil==2.8.19.1
|
||||
types-python-dateutil==2.8.19.2
|
||||
|
@ -2,8 +2,8 @@
|
||||
-r requirements.txt
|
||||
|
||||
# Required for hyperopt
|
||||
scipy==1.9.2
|
||||
scipy==1.9.3
|
||||
scikit-learn==1.1.2
|
||||
scikit-optimize==0.9.0
|
||||
filelock==3.8.0
|
||||
progressbar2==4.0.0
|
||||
progressbar2==4.1.1
|
||||
|
@ -1,10 +1,10 @@
|
||||
numpy==1.23.4
|
||||
pandas==1.5.0; platform_machine != 'armv7l'
|
||||
pandas==1.5.1; platform_machine != 'armv7l'
|
||||
# Piwheels doesn't have 1.5.0 yet.
|
||||
pandas==1.4.3; platform_machine == 'armv7l'
|
||||
pandas-ta==0.3.14b
|
||||
|
||||
ccxt==2.0.25
|
||||
ccxt==2.0.58
|
||||
# Pin cryptography for now due to rust build errors with piwheels
|
||||
cryptography==38.0.1
|
||||
aiohttp==3.8.3
|
||||
@ -29,7 +29,7 @@ pyarrow==9.0.0; platform_machine != 'armv7l'
|
||||
py_find_1st==1.1.5
|
||||
|
||||
# Load ticker files 30% faster
|
||||
python-rapidjson==1.8
|
||||
python-rapidjson==1.9
|
||||
# Properly format api responses
|
||||
orjson==3.8.0
|
||||
|
||||
@ -38,9 +38,9 @@ sdnotify==0.3.2
|
||||
|
||||
# API Server
|
||||
fastapi==0.85.1
|
||||
pydantic>=1.8.0
|
||||
pydantic==1.10.2
|
||||
uvicorn==0.18.3
|
||||
pyjwt==2.5.0
|
||||
pyjwt==2.6.0
|
||||
aiofiles==22.1.0
|
||||
psutil==5.9.2
|
||||
|
||||
|
@ -15,7 +15,7 @@ from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler, g
|
||||
from freqtrade.data.history.jsondatahandler import JsonDataHandler, JsonGzDataHandler
|
||||
from freqtrade.data.history.parquetdatahandler import ParquetDataHandler
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
from tests.conftest import log_has
|
||||
from tests.conftest import log_has, log_has_re
|
||||
|
||||
|
||||
def test_datahandler_ohlcv_get_pairs(testdatadir):
|
||||
@ -154,6 +154,85 @@ def test_jsondatahandler_ohlcv_load(testdatadir, caplog):
|
||||
assert df.columns.equals(df1.columns)
|
||||
|
||||
|
||||
def test_datahandler__check_empty_df(testdatadir, caplog):
|
||||
dh = JsonDataHandler(testdatadir)
|
||||
expected_text = r"Price jump in UNITTEST/USDT, 1h, spot between"
|
||||
df = DataFrame([
|
||||
[
|
||||
1511686200000, # 8:50:00
|
||||
8.794, # open
|
||||
8.948, # high
|
||||
8.794, # low
|
||||
8.88, # close
|
||||
2255, # volume (in quote currency)
|
||||
],
|
||||
[
|
||||
1511686500000, # 8:55:00
|
||||
8.88,
|
||||
8.942,
|
||||
8.88,
|
||||
8.893,
|
||||
9911,
|
||||
],
|
||||
[
|
||||
1511687100000, # 9:05:00
|
||||
8.891,
|
||||
8.893,
|
||||
8.875,
|
||||
8.877,
|
||||
2251
|
||||
],
|
||||
[
|
||||
1511687400000, # 9:10:00
|
||||
8.877,
|
||||
8.883,
|
||||
8.895,
|
||||
8.817,
|
||||
123551
|
||||
]
|
||||
], columns=['date', 'open', 'high', 'low', 'close', 'volume'])
|
||||
|
||||
dh._check_empty_df(df, 'UNITTEST/USDT', '1h', CandleType.SPOT, True, True)
|
||||
assert not log_has_re(expected_text, caplog)
|
||||
df = DataFrame([
|
||||
[
|
||||
1511686200000, # 8:50:00
|
||||
8.794, # open
|
||||
8.948, # high
|
||||
8.794, # low
|
||||
8.88, # close
|
||||
2255, # volume (in quote currency)
|
||||
],
|
||||
[
|
||||
1511686500000, # 8:55:00
|
||||
8.88,
|
||||
8.942,
|
||||
8.88,
|
||||
8.893,
|
||||
9911,
|
||||
],
|
||||
[
|
||||
1511687100000, # 9:05:00
|
||||
889.1, # Price jump by several decimals
|
||||
889.3,
|
||||
887.5,
|
||||
887.7,
|
||||
2251
|
||||
],
|
||||
[
|
||||
1511687400000, # 9:10:00
|
||||
8.877,
|
||||
8.883,
|
||||
8.895,
|
||||
8.817,
|
||||
123551
|
||||
]
|
||||
], columns=['date', 'open', 'high', 'low', 'close', 'volume'])
|
||||
|
||||
dh._check_empty_df(df, 'UNITTEST/USDT', '1h', CandleType.SPOT, True, True)
|
||||
assert log_has_re(expected_text, caplog)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('datahandler', ['feather', 'parquet'])
|
||||
def test_datahandler_trades_not_supported(datahandler, testdatadir, ):
|
||||
dh = get_datahandler(testdatadir, datahandler)
|
||||
|
@ -125,7 +125,8 @@ def test_normalize_data(mocker, freqai_conf):
|
||||
freqai = make_data_dictionary(mocker, freqai_conf)
|
||||
data_dict = freqai.dk.data_dictionary
|
||||
freqai.dk.normalize_data(data_dict)
|
||||
assert len(freqai.dk.data) == 32
|
||||
assert any('_max' in entry for entry in freqai.dk.data.keys())
|
||||
assert any('_min' in entry for entry in freqai.dk.data.keys())
|
||||
|
||||
|
||||
def test_filter_features(mocker, freqai_conf):
|
||||
|
@ -1,7 +1,10 @@
|
||||
import logging
|
||||
import time
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import time_machine
|
||||
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.enums import State
|
||||
from freqtrade.worker import Worker
|
||||
@ -59,13 +62,58 @@ def test_throttle(mocker, default_conf, caplog) -> None:
|
||||
end = time.time()
|
||||
|
||||
assert result == 42
|
||||
assert end - start > 0.1
|
||||
assert 0.3 > end - start > 0.1
|
||||
assert log_has_re(r"Throttling with 'throttled_func\(\)': sleep for \d\.\d{2} s.*", caplog)
|
||||
|
||||
result = worker._throttle(throttled_func, throttle_secs=-1)
|
||||
assert result == 42
|
||||
|
||||
|
||||
def test_throttle_sleep_time(mocker, default_conf, caplog) -> None:
|
||||
|
||||
caplog.set_level(logging.DEBUG)
|
||||
worker = get_patched_worker(mocker, default_conf)
|
||||
sleep_mock = mocker.patch("freqtrade.worker.Worker._sleep")
|
||||
with time_machine.travel("2022-09-01 05:00:00 +00:00") as t:
|
||||
def throttled_func(x=1):
|
||||
t.shift(timedelta(seconds=x))
|
||||
return 42
|
||||
|
||||
assert worker._throttle(throttled_func, throttle_secs=5) == 42
|
||||
# This moves the clock by 1 second
|
||||
assert sleep_mock.call_count == 1
|
||||
assert 3.8 < sleep_mock.call_args[0][0] < 4.1
|
||||
|
||||
sleep_mock.reset_mock()
|
||||
# This moves the clock by 1 second
|
||||
assert worker._throttle(throttled_func, throttle_secs=10) == 42
|
||||
assert sleep_mock.call_count == 1
|
||||
assert 8.8 < sleep_mock.call_args[0][0] < 9.1
|
||||
|
||||
sleep_mock.reset_mock()
|
||||
# This moves the clock by 5 second, so we only throttle by 5s
|
||||
assert worker._throttle(throttled_func, throttle_secs=10, x=5) == 42
|
||||
assert sleep_mock.call_count == 1
|
||||
assert 4.8 < sleep_mock.call_args[0][0] < 5.1
|
||||
|
||||
t.move_to("2022-09-01 05:01:00 +00:00")
|
||||
sleep_mock.reset_mock()
|
||||
# Throttle for more than 5m (1 timeframe)
|
||||
assert worker._throttle(throttled_func, throttle_secs=400, x=5) == 42
|
||||
assert sleep_mock.call_count == 1
|
||||
assert 394.8 < sleep_mock.call_args[0][0] < 395.1
|
||||
|
||||
t.move_to("2022-09-01 05:01:00 +00:00")
|
||||
|
||||
sleep_mock.reset_mock()
|
||||
# Throttle for more than 5m (1 timeframe)
|
||||
assert worker._throttle(throttled_func, throttle_secs=400, timeframe='5m',
|
||||
timeframe_offset=0.4, x=5) == 42
|
||||
assert sleep_mock.call_count == 1
|
||||
# 300 (5m) - 60 (1m - see set time above) - 5 (duration of throttled_func) = 235
|
||||
assert 235.2 < sleep_mock.call_args[0][0] < 235.6
|
||||
|
||||
|
||||
def test_throttle_with_assets(mocker, default_conf) -> None:
|
||||
def throttled_func(nb_assets=-1):
|
||||
return nb_assets
|
||||
|
Loading…
Reference in New Issue
Block a user