Merge branch 'freqtrade:develop' into develop
This commit is contained in:
commit
14f8a1a2b3
@ -225,7 +225,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
|||||||
| `webhook.webhookexitcancel` | Payload to send on exit order cancel. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
|
| `webhook.webhookexitcancel` | Payload to send on exit order cancel. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
|
||||||
| `webhook.webhookexitfill` | Payload to send on exit order filled. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
|
| `webhook.webhookexitfill` | Payload to send on exit order filled. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
|
||||||
| `webhook.webhookstatus` | Payload to send on status calls. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
|
| `webhook.webhookstatus` | Payload to send on status calls. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
|
||||||
| | **Rest API / FreqUI / External Signals**
|
| | **Rest API / FreqUI / Producer-Consumer**
|
||||||
| `api_server.enabled` | Enable usage of API Server. See the [API Server documentation](rest-api.md) for more details. <br> **Datatype:** Boolean
|
| `api_server.enabled` | Enable usage of API Server. See the [API Server documentation](rest-api.md) for more details. <br> **Datatype:** Boolean
|
||||||
| `api_server.listen_ip_address` | Bind IP address. See the [API Server documentation](rest-api.md) for more details. <br> **Datatype:** IPv4
|
| `api_server.listen_ip_address` | Bind IP address. See the [API Server documentation](rest-api.md) for more details. <br> **Datatype:** IPv4
|
||||||
| `api_server.listen_port` | Bind Port. See the [API Server documentation](rest-api.md) for more details. <br>**Datatype:** Integer between 1024 and 65535
|
| `api_server.listen_port` | Bind Port. See the [API Server documentation](rest-api.md) for more details. <br>**Datatype:** Integer between 1024 and 65535
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
from time import time
|
||||||
from typing import Any, Tuple
|
from typing import Any, Tuple
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@ -32,7 +33,9 @@ class BaseClassifierModel(IFreqaiModel):
|
|||||||
:model: Trained model which can be used to inference (self.predict)
|
:model: Trained model which can be used to inference (self.predict)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.info("-------------------- Starting training " f"{pair} --------------------")
|
logger.info(f"-------------------- Starting training {pair} --------------------")
|
||||||
|
|
||||||
|
start_time = time()
|
||||||
|
|
||||||
# filter the features requested by user in the configuration file and elegantly handle NaNs
|
# filter the features requested by user in the configuration file and elegantly handle NaNs
|
||||||
features_filtered, labels_filtered = dk.filter_features(
|
features_filtered, labels_filtered = dk.filter_features(
|
||||||
@ -45,10 +48,10 @@ class BaseClassifierModel(IFreqaiModel):
|
|||||||
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
|
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
|
||||||
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
|
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
|
||||||
logger.info(f"-------------------- Training on data from {start_date} to "
|
logger.info(f"-------------------- Training on data from {start_date} to "
|
||||||
f"{end_date}--------------------")
|
f"{end_date} --------------------")
|
||||||
# split data into train/test data.
|
# split data into train/test data.
|
||||||
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
||||||
if not self.freqai_info.get('fit_live_predictions', 0) or not self.live:
|
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
|
||||||
dk.fit_labels()
|
dk.fit_labels()
|
||||||
# normalize all data based on train_dataset only
|
# normalize all data based on train_dataset only
|
||||||
data_dictionary = dk.normalize_data(data_dictionary)
|
data_dictionary = dk.normalize_data(data_dictionary)
|
||||||
@ -57,13 +60,16 @@ class BaseClassifierModel(IFreqaiModel):
|
|||||||
self.data_cleaning_train(dk)
|
self.data_cleaning_train(dk)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
|
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
|
||||||
)
|
)
|
||||||
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
|
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
|
||||||
|
|
||||||
model = self.fit(data_dictionary, dk)
|
model = self.fit(data_dictionary, dk)
|
||||||
|
|
||||||
logger.info(f"--------------------done training {pair}--------------------")
|
end_time = time()
|
||||||
|
|
||||||
|
logger.info(f"-------------------- Done training {pair} "
|
||||||
|
f"({end_time - start_time:.2f} secs) --------------------")
|
||||||
|
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
from time import time
|
||||||
from typing import Any, Tuple
|
from typing import Any, Tuple
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@ -31,7 +32,9 @@ class BaseRegressionModel(IFreqaiModel):
|
|||||||
:model: Trained model which can be used to inference (self.predict)
|
:model: Trained model which can be used to inference (self.predict)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.info("-------------------- Starting training " f"{pair} --------------------")
|
logger.info(f"-------------------- Starting training {pair} --------------------")
|
||||||
|
|
||||||
|
start_time = time()
|
||||||
|
|
||||||
# filter the features requested by user in the configuration file and elegantly handle NaNs
|
# filter the features requested by user in the configuration file and elegantly handle NaNs
|
||||||
features_filtered, labels_filtered = dk.filter_features(
|
features_filtered, labels_filtered = dk.filter_features(
|
||||||
@ -44,10 +47,10 @@ class BaseRegressionModel(IFreqaiModel):
|
|||||||
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
|
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
|
||||||
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
|
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
|
||||||
logger.info(f"-------------------- Training on data from {start_date} to "
|
logger.info(f"-------------------- Training on data from {start_date} to "
|
||||||
f"{end_date}--------------------")
|
f"{end_date} --------------------")
|
||||||
# split data into train/test data.
|
# split data into train/test data.
|
||||||
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
||||||
if not self.freqai_info.get('fit_live_predictions', 0) or not self.live:
|
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
|
||||||
dk.fit_labels()
|
dk.fit_labels()
|
||||||
# normalize all data based on train_dataset only
|
# normalize all data based on train_dataset only
|
||||||
data_dictionary = dk.normalize_data(data_dictionary)
|
data_dictionary = dk.normalize_data(data_dictionary)
|
||||||
@ -56,13 +59,16 @@ class BaseRegressionModel(IFreqaiModel):
|
|||||||
self.data_cleaning_train(dk)
|
self.data_cleaning_train(dk)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
|
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
|
||||||
)
|
)
|
||||||
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
|
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
|
||||||
|
|
||||||
model = self.fit(data_dictionary, dk)
|
model = self.fit(data_dictionary, dk)
|
||||||
|
|
||||||
logger.info(f"--------------------done training {pair}--------------------")
|
end_time = time()
|
||||||
|
|
||||||
|
logger.info(f"-------------------- Done training {pair} "
|
||||||
|
f"({end_time - start_time:.2f} secs) --------------------")
|
||||||
|
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
from time import time
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
@ -28,7 +29,9 @@ class BaseTensorFlowModel(IFreqaiModel):
|
|||||||
:model: Trained model which can be used to inference (self.predict)
|
:model: Trained model which can be used to inference (self.predict)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.info("-------------------- Starting training " f"{pair} --------------------")
|
logger.info(f"-------------------- Starting training {pair} --------------------")
|
||||||
|
|
||||||
|
start_time = time()
|
||||||
|
|
||||||
# filter the features requested by user in the configuration file and elegantly handle NaNs
|
# filter the features requested by user in the configuration file and elegantly handle NaNs
|
||||||
features_filtered, labels_filtered = dk.filter_features(
|
features_filtered, labels_filtered = dk.filter_features(
|
||||||
@ -41,10 +44,10 @@ class BaseTensorFlowModel(IFreqaiModel):
|
|||||||
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
|
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
|
||||||
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
|
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
|
||||||
logger.info(f"-------------------- Training on data from {start_date} to "
|
logger.info(f"-------------------- Training on data from {start_date} to "
|
||||||
f"{end_date}--------------------")
|
f"{end_date} --------------------")
|
||||||
# split data into train/test data.
|
# split data into train/test data.
|
||||||
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
||||||
if not self.freqai_info.get('fit_live_predictions', 0) or not self.live:
|
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
|
||||||
dk.fit_labels()
|
dk.fit_labels()
|
||||||
# normalize all data based on train_dataset only
|
# normalize all data based on train_dataset only
|
||||||
data_dictionary = dk.normalize_data(data_dictionary)
|
data_dictionary = dk.normalize_data(data_dictionary)
|
||||||
@ -53,12 +56,15 @@ class BaseTensorFlowModel(IFreqaiModel):
|
|||||||
self.data_cleaning_train(dk)
|
self.data_cleaning_train(dk)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
|
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
|
||||||
)
|
)
|
||||||
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
|
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
|
||||||
|
|
||||||
model = self.fit(data_dictionary, dk)
|
model = self.fit(data_dictionary, dk)
|
||||||
|
|
||||||
logger.info(f"--------------------done training {pair}--------------------")
|
end_time = time()
|
||||||
|
|
||||||
|
logger.info(f"-------------------- Done training {pair} "
|
||||||
|
f"({end_time - start_time:.2f} secs) --------------------")
|
||||||
|
|
||||||
return model
|
return model
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
from joblib import Parallel
|
from joblib import Parallel
|
||||||
from sklearn.multioutput import MultiOutputRegressor, _fit_estimator
|
from sklearn.multioutput import MultiOutputRegressor, _fit_estimator
|
||||||
from sklearn.utils.fixes import delayed
|
from sklearn.utils.fixes import delayed
|
||||||
|
@ -244,7 +244,8 @@ class IFreqaiModel(ABC):
|
|||||||
# following tr_train. Both of these windows slide through the
|
# following tr_train. Both of these windows slide through the
|
||||||
# entire backtest
|
# entire backtest
|
||||||
for tr_train, tr_backtest in zip(dk.training_timeranges, dk.backtesting_timeranges):
|
for tr_train, tr_backtest in zip(dk.training_timeranges, dk.backtesting_timeranges):
|
||||||
(_, _, _) = self.dd.get_pair_dict_info(metadata["pair"])
|
pair = metadata["pair"]
|
||||||
|
(_, _, _) = self.dd.get_pair_dict_info(pair)
|
||||||
train_it += 1
|
train_it += 1
|
||||||
total_trains = len(dk.backtesting_timeranges)
|
total_trains = len(dk.backtesting_timeranges)
|
||||||
self.training_timerange = tr_train
|
self.training_timerange = tr_train
|
||||||
@ -266,12 +267,10 @@ class IFreqaiModel(ABC):
|
|||||||
|
|
||||||
trained_timestamp_int = int(trained_timestamp.stopts)
|
trained_timestamp_int = int(trained_timestamp.stopts)
|
||||||
dk.data_path = Path(
|
dk.data_path = Path(
|
||||||
dk.full_path
|
dk.full_path / f"sub-train-{pair.split('/')[0]}_{trained_timestamp_int}"
|
||||||
/
|
|
||||||
f"sub-train-{metadata['pair'].split('/')[0]}_{trained_timestamp_int}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
dk.set_new_model_names(metadata["pair"], trained_timestamp)
|
dk.set_new_model_names(pair, trained_timestamp)
|
||||||
|
|
||||||
if dk.check_if_backtest_prediction_exists():
|
if dk.check_if_backtest_prediction_exists():
|
||||||
append_df = dk.get_backtesting_prediction()
|
append_df = dk.get_backtesting_prediction()
|
||||||
@ -281,15 +280,15 @@ class IFreqaiModel(ABC):
|
|||||||
metadata["pair"], dk, trained_timestamp=trained_timestamp_int
|
metadata["pair"], dk, trained_timestamp=trained_timestamp_int
|
||||||
):
|
):
|
||||||
dk.find_features(dataframe_train)
|
dk.find_features(dataframe_train)
|
||||||
self.model = self.train(dataframe_train, metadata["pair"], dk)
|
self.model = self.train(dataframe_train, pair, dk)
|
||||||
self.dd.pair_dict[metadata["pair"]]["trained_timestamp"] = int(
|
self.dd.pair_dict[pair]["trained_timestamp"] = int(
|
||||||
trained_timestamp.stopts)
|
trained_timestamp.stopts)
|
||||||
|
|
||||||
if self.save_backtest_models:
|
if self.save_backtest_models:
|
||||||
logger.info('Saving backtest model to disk.')
|
logger.info('Saving backtest model to disk.')
|
||||||
self.dd.save_data(self.model, metadata["pair"], dk)
|
self.dd.save_data(self.model, pair, dk)
|
||||||
else:
|
else:
|
||||||
self.model = self.dd.load_data(metadata["pair"], dk)
|
self.model = self.dd.load_data(pair, dk)
|
||||||
|
|
||||||
self.check_if_feature_list_matches_strategy(dataframe_train, dk)
|
self.check_if_feature_list_matches_strategy(dataframe_train, dk)
|
||||||
|
|
||||||
|
@ -217,11 +217,14 @@ class ExternalMessageConsumer:
|
|||||||
) as e:
|
) as e:
|
||||||
logger.error(f"Connection Refused - {e} retrying in {self.sleep_time}s")
|
logger.error(f"Connection Refused - {e} retrying in {self.sleep_time}s")
|
||||||
await asyncio.sleep(self.sleep_time)
|
await asyncio.sleep(self.sleep_time)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
except websockets.exceptions.ConnectionClosedOK:
|
except (
|
||||||
# Successfully closed, just keep trying to connect again indefinitely
|
websockets.exceptions.ConnectionClosedError,
|
||||||
|
websockets.exceptions.ConnectionClosedOK
|
||||||
|
):
|
||||||
|
# Just keep trying to connect again indefinitely
|
||||||
|
await asyncio.sleep(self.sleep_time)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -200,43 +200,60 @@ async def test_emc_create_connection_success(default_conf, caplog, mocker):
|
|||||||
emc.shutdown()
|
emc.shutdown()
|
||||||
|
|
||||||
|
|
||||||
# async def test_emc_create_connection_invalid(default_conf, caplog, mocker):
|
async def test_emc_create_connection_invalid_port(default_conf, caplog, mocker):
|
||||||
# default_conf.update({
|
default_conf.update({
|
||||||
# "external_message_consumer": {
|
"external_message_consumer": {
|
||||||
# "enabled": True,
|
"enabled": True,
|
||||||
# "producers": [
|
"producers": [
|
||||||
# {
|
{
|
||||||
# "name": "default",
|
"name": "default",
|
||||||
# "host": _TEST_WS_HOST,
|
"host": _TEST_WS_HOST,
|
||||||
# "port": _TEST_WS_PORT,
|
"port": -1,
|
||||||
# "ws_token": _TEST_WS_TOKEN
|
"ws_token": _TEST_WS_TOKEN
|
||||||
# }
|
}
|
||||||
# ],
|
],
|
||||||
# "wait_timeout": 60,
|
"wait_timeout": 60,
|
||||||
# "ping_timeout": 60,
|
"ping_timeout": 60,
|
||||||
# "sleep_timeout": 60
|
"sleep_timeout": 60
|
||||||
# }
|
}
|
||||||
# })
|
})
|
||||||
#
|
|
||||||
# mocker.patch('freqtrade.rpc.external_message_consumer.ExternalMessageConsumer.start',
|
dp = DataProvider(default_conf, None, None, None)
|
||||||
# MagicMock())
|
emc = ExternalMessageConsumer(default_conf, dp)
|
||||||
#
|
|
||||||
# test_producer = default_conf['external_message_consumer']['producers'][0]
|
try:
|
||||||
# lock = asyncio.Lock()
|
await asyncio.sleep(0.01)
|
||||||
#
|
assert log_has_re(r".+ is an invalid WebSocket URL .+", caplog)
|
||||||
# dp = DataProvider(default_conf, None, None, None)
|
finally:
|
||||||
# emc = ExternalMessageConsumer(default_conf, dp)
|
emc.shutdown()
|
||||||
#
|
|
||||||
# try:
|
|
||||||
# # Test invalid URL
|
async def test_emc_create_connection_invalid_host(default_conf, caplog, mocker):
|
||||||
# test_producer['url'] = "tcp://null:8080/api/v1/message/ws"
|
default_conf.update({
|
||||||
# emc._running = True
|
"external_message_consumer": {
|
||||||
# await emc._create_connection(test_producer, lock)
|
"enabled": True,
|
||||||
# emc._running = False
|
"producers": [
|
||||||
#
|
{
|
||||||
# assert log_has_re(r".+is an invalid WebSocket URL.+", caplog)
|
"name": "default",
|
||||||
# finally:
|
"host": "10000.1241..2121/",
|
||||||
# emc.shutdown()
|
"port": _TEST_WS_PORT,
|
||||||
|
"ws_token": _TEST_WS_TOKEN
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"wait_timeout": 60,
|
||||||
|
"ping_timeout": 60,
|
||||||
|
"sleep_timeout": 60
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
dp = DataProvider(default_conf, None, None, None)
|
||||||
|
emc = ExternalMessageConsumer(default_conf, dp)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await asyncio.sleep(0.01)
|
||||||
|
assert log_has_re(r".+ is an invalid WebSocket URL .+", caplog)
|
||||||
|
finally:
|
||||||
|
emc.shutdown()
|
||||||
|
|
||||||
|
|
||||||
async def test_emc_create_connection_error(default_conf, caplog, mocker):
|
async def test_emc_create_connection_error(default_conf, caplog, mocker):
|
||||||
@ -376,7 +393,7 @@ async def test_emc_receive_messages_timeout(default_conf, caplog, mocker):
|
|||||||
"ws_token": _TEST_WS_TOKEN
|
"ws_token": _TEST_WS_TOKEN
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"wait_timeout": 1,
|
"wait_timeout": 0.1,
|
||||||
"ping_timeout": 1,
|
"ping_timeout": 1,
|
||||||
"sleep_time": 1
|
"sleep_time": 1
|
||||||
}
|
}
|
||||||
@ -396,7 +413,7 @@ async def test_emc_receive_messages_timeout(default_conf, caplog, mocker):
|
|||||||
|
|
||||||
class TestChannel:
|
class TestChannel:
|
||||||
async def recv(self, *args, **kwargs):
|
async def recv(self, *args, **kwargs):
|
||||||
await asyncio.sleep(10)
|
await asyncio.sleep(0.2)
|
||||||
|
|
||||||
async def ping(self, *args, **kwargs):
|
async def ping(self, *args, **kwargs):
|
||||||
return asyncio.Future()
|
return asyncio.Future()
|
||||||
|
Loading…
Reference in New Issue
Block a user