merge develop into track-current-candle
This commit is contained in:
commit
444a068481
@ -286,6 +286,18 @@ Min price precision for SHITCOIN/BTC is 8 decimals. If its price is 0.00000011 -
|
|||||||
|
|
||||||
Shuffles (randomizes) pairs in the pairlist. It can be used for preventing the bot from trading some of the pairs more frequently then others when you want all pairs be treated with the same priority.
|
Shuffles (randomizes) pairs in the pairlist. It can be used for preventing the bot from trading some of the pairs more frequently then others when you want all pairs be treated with the same priority.
|
||||||
|
|
||||||
|
By default, ShuffleFilter will shuffle pairs once per candle.
|
||||||
|
To shuffle on every iteration, set `"shuffle_frequency"` to `"iteration"` instead of the default of `"candle"`.
|
||||||
|
|
||||||
|
``` json
|
||||||
|
{
|
||||||
|
"method": "ShuffleFilter",
|
||||||
|
"shuffle_frequency": "candle",
|
||||||
|
"seed": 42
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
!!! Tip
|
!!! Tip
|
||||||
You may set the `seed` value for this Pairlist to obtain reproducible results, which can be useful for repeated backtesting sessions. If `seed` is not set, the pairs are shuffled in the non-repeatable random order. ShuffleFilter will automatically detect runmodes and apply the `seed` only for backtesting modes - if a `seed` value is set.
|
You may set the `seed` value for this Pairlist to obtain reproducible results, which can be useful for repeated backtesting sessions. If `seed` is not set, the pairs are shuffled in the non-repeatable random order. ShuffleFilter will automatically detect runmodes and apply the `seed` only for backtesting modes - if a `seed` value is set.
|
||||||
|
|
||||||
|
@ -1316,14 +1316,16 @@ class FreqaiDataKitchen:
|
|||||||
append_df = pd.read_hdf(self.backtesting_results_path)
|
append_df = pd.read_hdf(self.backtesting_results_path)
|
||||||
return append_df
|
return append_df
|
||||||
|
|
||||||
def check_if_backtest_prediction_exists(
|
def check_if_backtest_prediction_is_valid(
|
||||||
self
|
self,
|
||||||
|
length_backtesting_dataframe: int
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""
|
"""
|
||||||
Check if a backtesting prediction already exists
|
Check if a backtesting prediction already exists and if the predictions
|
||||||
:param dk: FreqaiDataKitchen
|
to append has the same size of backtesting dataframe slice
|
||||||
|
:param length_backtesting_dataframe: Length of backtesting dataframe slice
|
||||||
:return:
|
:return:
|
||||||
:boolean: whether the prediction file exists or not.
|
:boolean: whether the prediction file is valid.
|
||||||
"""
|
"""
|
||||||
path_to_predictionfile = Path(self.full_path /
|
path_to_predictionfile = Path(self.full_path /
|
||||||
self.backtest_predictions_folder /
|
self.backtest_predictions_folder /
|
||||||
@ -1331,13 +1333,21 @@ class FreqaiDataKitchen:
|
|||||||
self.backtesting_results_path = path_to_predictionfile
|
self.backtesting_results_path = path_to_predictionfile
|
||||||
|
|
||||||
file_exists = path_to_predictionfile.is_file()
|
file_exists = path_to_predictionfile.is_file()
|
||||||
|
|
||||||
if file_exists:
|
if file_exists:
|
||||||
logger.info(f"Found backtesting prediction file at {path_to_predictionfile}")
|
append_df = self.get_backtesting_prediction()
|
||||||
|
if len(append_df) == length_backtesting_dataframe:
|
||||||
|
logger.info(f"Found backtesting prediction file at {path_to_predictionfile}")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.info("A new backtesting prediction file is required. "
|
||||||
|
"(Number of predictions is different from dataframe length).")
|
||||||
|
return False
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Could not find backtesting prediction file at {path_to_predictionfile}"
|
f"Could not find backtesting prediction file at {path_to_predictionfile}"
|
||||||
)
|
)
|
||||||
return file_exists
|
return False
|
||||||
|
|
||||||
def remove_special_chars_from_feature_names(self, dataframe: pd.DataFrame) -> pd.DataFrame:
|
def remove_special_chars_from_feature_names(self, dataframe: pd.DataFrame) -> pd.DataFrame:
|
||||||
"""
|
"""
|
||||||
|
@ -277,7 +277,7 @@ class IFreqaiModel(ABC):
|
|||||||
|
|
||||||
dk.set_new_model_names(pair, trained_timestamp)
|
dk.set_new_model_names(pair, trained_timestamp)
|
||||||
|
|
||||||
if dk.check_if_backtest_prediction_exists():
|
if dk.check_if_backtest_prediction_is_valid(len(dataframe_backtest)):
|
||||||
self.dd.load_metadata(dk)
|
self.dd.load_metadata(dk)
|
||||||
dk.find_features(dataframe_train)
|
dk.find_features(dataframe_train)
|
||||||
self.check_if_feature_list_matches_strategy(dk)
|
self.check_if_feature_list_matches_strategy(dk)
|
||||||
|
@ -36,7 +36,6 @@ class IPairList(LoggingMixin, ABC):
|
|||||||
self._pairlistconfig = pairlistconfig
|
self._pairlistconfig = pairlistconfig
|
||||||
self._pairlist_pos = pairlist_pos
|
self._pairlist_pos = pairlist_pos
|
||||||
self.refresh_period = self._pairlistconfig.get('refresh_period', 1800)
|
self.refresh_period = self._pairlistconfig.get('refresh_period', 1800)
|
||||||
self._last_refresh = 0
|
|
||||||
LoggingMixin.__init__(self, logger, self.refresh_period)
|
LoggingMixin.__init__(self, logger, self.refresh_period)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -3,16 +3,20 @@ Shuffle pair list filter
|
|||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List, Literal
|
||||||
|
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
from freqtrade.enums import RunMode
|
from freqtrade.enums import RunMode
|
||||||
|
from freqtrade.exchange import timeframe_to_seconds
|
||||||
from freqtrade.exchange.types import Tickers
|
from freqtrade.exchange.types import Tickers
|
||||||
from freqtrade.plugins.pairlist.IPairList import IPairList
|
from freqtrade.plugins.pairlist.IPairList import IPairList
|
||||||
|
from freqtrade.util.periodic_cache import PeriodicCache
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ShuffleValues = Literal['candle', 'iteration']
|
||||||
|
|
||||||
|
|
||||||
class ShuffleFilter(IPairList):
|
class ShuffleFilter(IPairList):
|
||||||
|
|
||||||
@ -31,6 +35,9 @@ class ShuffleFilter(IPairList):
|
|||||||
logger.info(f"Backtesting mode detected, applying seed value: {self._seed}")
|
logger.info(f"Backtesting mode detected, applying seed value: {self._seed}")
|
||||||
|
|
||||||
self._random = random.Random(self._seed)
|
self._random = random.Random(self._seed)
|
||||||
|
self._shuffle_freq: ShuffleValues = pairlistconfig.get('shuffle_frequency', 'candle')
|
||||||
|
self.__pairlist_cache = PeriodicCache(
|
||||||
|
maxsize=1000, ttl=timeframe_to_seconds(self._config['timeframe']))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def needstickers(self) -> bool:
|
def needstickers(self) -> bool:
|
||||||
@ -45,7 +52,7 @@ class ShuffleFilter(IPairList):
|
|||||||
"""
|
"""
|
||||||
Short whitelist method description - used for startup-messages
|
Short whitelist method description - used for startup-messages
|
||||||
"""
|
"""
|
||||||
return (f"{self.name} - Shuffling pairs" +
|
return (f"{self.name} - Shuffling pairs every {self._shuffle_freq}" +
|
||||||
(f", seed = {self._seed}." if self._seed is not None else "."))
|
(f", seed = {self._seed}." if self._seed is not None else "."))
|
||||||
|
|
||||||
def filter_pairlist(self, pairlist: List[str], tickers: Tickers) -> List[str]:
|
def filter_pairlist(self, pairlist: List[str], tickers: Tickers) -> List[str]:
|
||||||
@ -56,7 +63,13 @@ class ShuffleFilter(IPairList):
|
|||||||
:param tickers: Tickers (from exchange.get_tickers). May be cached.
|
:param tickers: Tickers (from exchange.get_tickers). May be cached.
|
||||||
:return: new whitelist
|
:return: new whitelist
|
||||||
"""
|
"""
|
||||||
|
pairlist_bef = tuple(pairlist)
|
||||||
|
pairlist_new = self.__pairlist_cache.get(pairlist_bef)
|
||||||
|
if pairlist_new and self._shuffle_freq == 'candle':
|
||||||
|
# Use cached pairlist.
|
||||||
|
return pairlist_new
|
||||||
# Shuffle is done inplace
|
# Shuffle is done inplace
|
||||||
self._random.shuffle(pairlist)
|
self._random.shuffle(pairlist)
|
||||||
|
self.__pairlist_cache[pairlist_bef] = pairlist
|
||||||
|
|
||||||
return pairlist
|
return pairlist
|
||||||
|
@ -127,13 +127,6 @@ async def message_endpoint(
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.info(f"Consumer connection failed - {channel}: {e}")
|
logger.info(f"Consumer connection failed - {channel}: {e}")
|
||||||
logger.debug(e, exc_info=e)
|
logger.debug(e, exc_info=e)
|
||||||
finally:
|
|
||||||
await channel_manager.on_disconnect(ws)
|
|
||||||
|
|
||||||
else:
|
|
||||||
if channel:
|
|
||||||
await channel_manager.on_disconnect(ws)
|
|
||||||
await ws.close()
|
|
||||||
|
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
# WebSocket was closed
|
# WebSocket was closed
|
||||||
@ -144,4 +137,5 @@ async def message_endpoint(
|
|||||||
# Log tracebacks to keep track of what errors are happening
|
# Log tracebacks to keep track of what errors are happening
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
finally:
|
finally:
|
||||||
await channel_manager.on_disconnect(ws)
|
if channel:
|
||||||
|
await channel_manager.on_disconnect(ws)
|
||||||
|
@ -197,6 +197,7 @@ class ApiServer(RPCHandler):
|
|||||||
# Get data from queue
|
# Get data from queue
|
||||||
message: WSMessageSchemaType = await async_queue.get()
|
message: WSMessageSchemaType = await async_queue.get()
|
||||||
logger.debug(f"Found message of type: {message.get('type')}")
|
logger.debug(f"Found message of type: {message.get('type')}")
|
||||||
|
async_queue.task_done()
|
||||||
# Broadcast it
|
# Broadcast it
|
||||||
await self._ws_channel_manager.broadcast(message)
|
await self._ws_channel_manager.broadcast(message)
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
@ -210,6 +211,9 @@ class ApiServer(RPCHandler):
|
|||||||
# Disconnect channels and stop the loop on cancel
|
# Disconnect channels and stop the loop on cancel
|
||||||
await self._ws_channel_manager.disconnect_all()
|
await self._ws_channel_manager.disconnect_all()
|
||||||
self._ws_loop.stop()
|
self._ws_loop.stop()
|
||||||
|
# Avoid adding more items to the queue if they aren't
|
||||||
|
# going to get broadcasted.
|
||||||
|
self._ws_queue = None
|
||||||
|
|
||||||
def start_api(self):
|
def start_api(self):
|
||||||
"""
|
"""
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
import time
|
||||||
from threading import RLock
|
from threading import RLock
|
||||||
from typing import Any, Dict, List, Optional, Type, Union
|
from typing import Any, Dict, List, Optional, Type, Union
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
@ -46,7 +47,7 @@ class WebSocketChannel:
|
|||||||
self._relay_task = asyncio.create_task(self.relay())
|
self._relay_task = asyncio.create_task(self.relay())
|
||||||
|
|
||||||
# Internal event to signify a closed websocket
|
# Internal event to signify a closed websocket
|
||||||
self._closed = False
|
self._closed = asyncio.Event()
|
||||||
|
|
||||||
# Wrap the WebSocket in the Serializing class
|
# Wrap the WebSocket in the Serializing class
|
||||||
self._wrapped_ws = self._serializer_cls(self._websocket)
|
self._wrapped_ws = self._serializer_cls(self._websocket)
|
||||||
@ -73,15 +74,26 @@ class WebSocketChannel:
|
|||||||
Add the data to the queue to be sent.
|
Add the data to the queue to be sent.
|
||||||
:returns: True if data added to queue, False otherwise
|
:returns: True if data added to queue, False otherwise
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# This block only runs if the queue is full, it will wait
|
||||||
|
# until self.drain_timeout for the relay to drain the outgoing queue
|
||||||
|
# We can't use asyncio.wait_for here because the queue may have been created with a
|
||||||
|
# different eventloop
|
||||||
|
start = time.time()
|
||||||
|
while self.queue.full():
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
if (time.time() - start) > self.drain_timeout:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If for some reason the queue is still full, just return False
|
||||||
try:
|
try:
|
||||||
await asyncio.wait_for(
|
self.queue.put_nowait(data)
|
||||||
self.queue.put(data),
|
except asyncio.QueueFull:
|
||||||
timeout=self.drain_timeout
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# If we got here everything is ok
|
||||||
|
return True
|
||||||
|
|
||||||
async def recv(self):
|
async def recv(self):
|
||||||
"""
|
"""
|
||||||
Receive data on the wrapped websocket
|
Receive data on the wrapped websocket
|
||||||
@ -99,14 +111,19 @@ class WebSocketChannel:
|
|||||||
Close the WebSocketChannel
|
Close the WebSocketChannel
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self._closed = True
|
try:
|
||||||
|
await self.raw_websocket.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self._closed.set()
|
||||||
self._relay_task.cancel()
|
self._relay_task.cancel()
|
||||||
|
|
||||||
def is_closed(self) -> bool:
|
def is_closed(self) -> bool:
|
||||||
"""
|
"""
|
||||||
Closed flag
|
Closed flag
|
||||||
"""
|
"""
|
||||||
return self._closed
|
return self._closed.is_set()
|
||||||
|
|
||||||
def set_subscriptions(self, subscriptions: List[str] = []) -> None:
|
def set_subscriptions(self, subscriptions: List[str] = []) -> None:
|
||||||
"""
|
"""
|
||||||
@ -129,7 +146,7 @@ class WebSocketChannel:
|
|||||||
Relay messages from the channel's queue and send them out. This is started
|
Relay messages from the channel's queue and send them out. This is started
|
||||||
as a task.
|
as a task.
|
||||||
"""
|
"""
|
||||||
while True:
|
while not self._closed.is_set():
|
||||||
message = await self.queue.get()
|
message = await self.queue.get()
|
||||||
try:
|
try:
|
||||||
await self._send(message)
|
await self._send(message)
|
||||||
|
@ -264,10 +264,10 @@ class ExternalMessageConsumer:
|
|||||||
# We haven't received data yet. Check the connection and continue.
|
# We haven't received data yet. Check the connection and continue.
|
||||||
try:
|
try:
|
||||||
# ping
|
# ping
|
||||||
ping = await channel.ping()
|
pong = await channel.ping()
|
||||||
|
latency = (await asyncio.wait_for(pong, timeout=self.ping_timeout) * 1000)
|
||||||
|
|
||||||
await asyncio.wait_for(ping, timeout=self.ping_timeout)
|
logger.info(f"Connection to {channel} still alive, latency: {latency}ms")
|
||||||
logger.debug(f"Connection to {channel} still alive...")
|
|
||||||
|
|
||||||
continue
|
continue
|
||||||
except (websockets.exceptions.ConnectionClosed):
|
except (websockets.exceptions.ConnectionClosed):
|
||||||
@ -276,7 +276,7 @@ class ExternalMessageConsumer:
|
|||||||
await asyncio.sleep(self.sleep_time)
|
await asyncio.sleep(self.sleep_time)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Ping error {channel} - retrying in {self.sleep_time}s")
|
logger.warning(f"Ping error {channel} - {e} - retrying in {self.sleep_time}s")
|
||||||
logger.debug(e, exc_info=e)
|
logger.debug(e, exc_info=e)
|
||||||
await asyncio.sleep(self.sleep_time)
|
await asyncio.sleep(self.sleep_time)
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ import orjson
|
|||||||
import pandas
|
import pandas
|
||||||
import rapidjson
|
import rapidjson
|
||||||
import websockets
|
import websockets
|
||||||
from dateutil.relativedelta import relativedelta
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("WebSocketClient")
|
logger = logging.getLogger("WebSocketClient")
|
||||||
@ -28,7 +27,7 @@ logger = logging.getLogger("WebSocketClient")
|
|||||||
|
|
||||||
def setup_logging(filename: str):
|
def setup_logging(filename: str):
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=logging.INFO,
|
level=logging.DEBUG,
|
||||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||||
handlers=[
|
handlers=[
|
||||||
logging.FileHandler(filename),
|
logging.FileHandler(filename),
|
||||||
@ -75,16 +74,15 @@ def load_config(configfile):
|
|||||||
|
|
||||||
def readable_timedelta(delta):
|
def readable_timedelta(delta):
|
||||||
"""
|
"""
|
||||||
Convert a dateutil.relativedelta to a readable format
|
Convert a millisecond delta to a readable format
|
||||||
|
|
||||||
:param delta: A dateutil.relativedelta
|
:param delta: A delta between two timestamps in milliseconds
|
||||||
:returns: The readable time difference string
|
:returns: The readable time difference string
|
||||||
"""
|
"""
|
||||||
attrs = ['years', 'months', 'days', 'hours', 'minutes', 'seconds', 'microseconds']
|
seconds, milliseconds = divmod(delta, 1000)
|
||||||
return ", ".join([
|
minutes, seconds = divmod(seconds, 60)
|
||||||
'%d %s' % (getattr(delta, attr), attr if getattr(delta, attr) > 0 else attr[:-1])
|
|
||||||
for attr in attrs if getattr(delta, attr)
|
return f"{int(minutes)}:{int(seconds)}.{int(milliseconds)}"
|
||||||
])
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
|
||||||
@ -170,8 +168,8 @@ class ClientProtocol:
|
|||||||
|
|
||||||
def _calculate_time_difference(self):
|
def _calculate_time_difference(self):
|
||||||
old_last_received_at = self._LAST_RECEIVED_AT
|
old_last_received_at = self._LAST_RECEIVED_AT
|
||||||
self._LAST_RECEIVED_AT = time.time() * 1e6
|
self._LAST_RECEIVED_AT = time.time() * 1e3
|
||||||
time_delta = relativedelta(microseconds=(self._LAST_RECEIVED_AT - old_last_received_at))
|
time_delta = self._LAST_RECEIVED_AT - old_last_received_at
|
||||||
|
|
||||||
return readable_timedelta(time_delta)
|
return readable_timedelta(time_delta)
|
||||||
|
|
||||||
@ -242,12 +240,10 @@ async def create_client(
|
|||||||
):
|
):
|
||||||
# Try pinging
|
# Try pinging
|
||||||
try:
|
try:
|
||||||
pong = ws.ping()
|
pong = await ws.ping()
|
||||||
await asyncio.wait_for(
|
latency = (await asyncio.wait_for(pong, timeout=ping_timeout) * 1000)
|
||||||
pong,
|
|
||||||
timeout=ping_timeout
|
logger.info(f"Connection still alive, latency: {latency}ms")
|
||||||
)
|
|
||||||
logger.info("Connection still alive...")
|
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -272,6 +268,7 @@ async def create_client(
|
|||||||
websockets.exceptions.ConnectionClosedError,
|
websockets.exceptions.ConnectionClosedError,
|
||||||
websockets.exceptions.ConnectionClosedOK
|
websockets.exceptions.ConnectionClosedOK
|
||||||
):
|
):
|
||||||
|
logger.info("Connection was closed")
|
||||||
# Just keep trying to connect again indefinitely
|
# Just keep trying to connect again indefinitely
|
||||||
await asyncio.sleep(sleep_time)
|
await asyncio.sleep(sleep_time)
|
||||||
|
|
||||||
|
@ -27,13 +27,13 @@ def is_mac() -> bool:
|
|||||||
return "Darwin" in machine
|
return "Darwin" in machine
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('model', [
|
@pytest.mark.parametrize('model, pca, dbscan', [
|
||||||
'LightGBMRegressor',
|
('LightGBMRegressor', True, False),
|
||||||
'XGBoostRegressor',
|
('XGBoostRegressor', False, True),
|
||||||
'XGBoostRFRegressor',
|
('XGBoostRFRegressor', False, False),
|
||||||
'CatboostRegressor',
|
('CatboostRegressor', False, False),
|
||||||
])
|
])
|
||||||
def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model):
|
def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model, pca, dbscan):
|
||||||
if is_arm() and model == 'CatboostRegressor':
|
if is_arm() and model == 'CatboostRegressor':
|
||||||
pytest.skip("CatBoost is not supported on ARM")
|
pytest.skip("CatBoost is not supported on ARM")
|
||||||
|
|
||||||
@ -41,6 +41,8 @@ def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model):
|
|||||||
freqai_conf.update({"freqaimodel": model})
|
freqai_conf.update({"freqaimodel": model})
|
||||||
freqai_conf.update({"timerange": "20180110-20180130"})
|
freqai_conf.update({"timerange": "20180110-20180130"})
|
||||||
freqai_conf.update({"strategy": "freqai_test_strat"})
|
freqai_conf.update({"strategy": "freqai_test_strat"})
|
||||||
|
freqai_conf['freqai']['feature_parameters'].update({"principal_component_analysis": pca})
|
||||||
|
freqai_conf['freqai']['feature_parameters'].update({"use_DBSCAN_to_remove_outliers": dbscan})
|
||||||
|
|
||||||
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
||||||
exchange = get_patched_exchange(mocker, freqai_conf)
|
exchange = get_patched_exchange(mocker, freqai_conf)
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
from copy import deepcopy
|
||||||
|
from datetime import timedelta
|
||||||
from unittest.mock import MagicMock, PropertyMock
|
from unittest.mock import MagicMock, PropertyMock
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
@ -719,15 +721,26 @@ def test_PerformanceFilter_error(mocker, whitelist_conf, caplog) -> None:
|
|||||||
def test_ShuffleFilter_init(mocker, whitelist_conf, caplog) -> None:
|
def test_ShuffleFilter_init(mocker, whitelist_conf, caplog) -> None:
|
||||||
whitelist_conf['pairlists'] = [
|
whitelist_conf['pairlists'] = [
|
||||||
{"method": "StaticPairList"},
|
{"method": "StaticPairList"},
|
||||||
{"method": "ShuffleFilter", "seed": 42}
|
{"method": "ShuffleFilter", "seed": 43}
|
||||||
]
|
]
|
||||||
|
|
||||||
exchange = get_patched_exchange(mocker, whitelist_conf)
|
exchange = get_patched_exchange(mocker, whitelist_conf)
|
||||||
PairListManager(exchange, whitelist_conf)
|
plm = PairListManager(exchange, whitelist_conf)
|
||||||
assert log_has("Backtesting mode detected, applying seed value: 42", caplog)
|
assert log_has("Backtesting mode detected, applying seed value: 43", caplog)
|
||||||
|
|
||||||
|
with time_machine.travel("2021-09-01 05:01:00 +00:00") as t:
|
||||||
|
plm.refresh_pairlist()
|
||||||
|
pl1 = deepcopy(plm.whitelist)
|
||||||
|
plm.refresh_pairlist()
|
||||||
|
assert plm.whitelist == pl1
|
||||||
|
|
||||||
|
t.shift(timedelta(minutes=10))
|
||||||
|
plm.refresh_pairlist()
|
||||||
|
assert plm.whitelist != pl1
|
||||||
|
|
||||||
caplog.clear()
|
caplog.clear()
|
||||||
whitelist_conf['runmode'] = RunMode.DRY_RUN
|
whitelist_conf['runmode'] = RunMode.DRY_RUN
|
||||||
PairListManager(exchange, whitelist_conf)
|
plm = PairListManager(exchange, whitelist_conf)
|
||||||
assert not log_has("Backtesting mode detected, applying seed value: 42", caplog)
|
assert not log_has("Backtesting mode detected, applying seed value: 42", caplog)
|
||||||
assert log_has("Live mode detected, not applying seed.", caplog)
|
assert log_has("Live mode detected, not applying seed.", caplog)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user