From 0a55753faf54fb3e7e741041617de6a44c9e37aa Mon Sep 17 00:00:00 2001 From: Yinon Polak Date: Mon, 20 Mar 2023 19:40:36 +0200 Subject: [PATCH] move default attributes of pytorch classifier to initializer, to prevent mypy from complaining --- .../prediction_models/PyTorchMLPClassifier.py | 16 +++++++++++----- .../prediction_models/PyTorchMLPRegressor.py | 16 +++++++++++----- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/freqtrade/freqai/prediction_models/PyTorchMLPClassifier.py b/freqtrade/freqai/prediction_models/PyTorchMLPClassifier.py index ce8fbd336..edba75c2a 100644 --- a/freqtrade/freqai/prediction_models/PyTorchMLPClassifier.py +++ b/freqtrade/freqai/prediction_models/PyTorchMLPClassifier.py @@ -41,12 +41,18 @@ class PyTorchMLPClassifier(PyTorchClassifier): """ - def __init__(self, **kwargs): + def __init__( + self, + learning_rate: float = 3e-4, + model_kwargs: Dict[str, Any] = {}, + trainer_kwargs: Dict[str, Any] = {}, + **kwargs + ): super().__init__(**kwargs) - model_training_params = self.freqai_info.get("model_training_parameters", {}) - self.learning_rate: float = model_training_params.get("learning_rate", 3e-4) - self.model_kwargs: Dict[str, any] = model_training_params.get("model_kwargs", {}) - self.trainer_kwargs: Dict[str, any] = model_training_params.get("trainer_kwargs", {}) + config = self.freqai_info.get("model_training_parameters", {}) + self.learning_rate: float = config.get("learning_rate", learning_rate) + self.model_kwargs: Dict[str, any] = config.get("model_kwargs", model_kwargs) + self.trainer_kwargs: Dict[str, any] = config.get("trainer_kwargs", trainer_kwargs) def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any: """ diff --git a/freqtrade/freqai/prediction_models/PyTorchMLPRegressor.py b/freqtrade/freqai/prediction_models/PyTorchMLPRegressor.py index 4685c332a..2118c27e1 100644 --- a/freqtrade/freqai/prediction_models/PyTorchMLPRegressor.py +++ b/freqtrade/freqai/prediction_models/PyTorchMLPRegressor.py @@ -41,12 +41,18 @@ class PyTorchMLPRegressor(PyTorchRegressor): """ - def __init__(self, **kwargs): + def __init__( + self, + learning_rate: float = 3e-4, + model_kwargs: Dict[str, Any] = {}, + trainer_kwargs: Dict[str, Any] = {}, + **kwargs + ): super().__init__(**kwargs) - model_training_params = self.freqai_info.get("model_training_parameters", {}) - self.learning_rate: float = model_training_params.get("learning_rate", 3e-4) - self.model_kwargs: Dict[str, any] = model_training_params.get("model_kwargs", {}) - self.trainer_kwargs: Dict[str, any] = model_training_params.get("trainer_kwargs", {}) + config = self.freqai_info.get("model_training_parameters", {}) + self.learning_rate: float = config.get("learning_rate", learning_rate) + self.model_kwargs: Dict[str, any] = config.get("model_kwargs", model_kwargs) + self.trainer_kwargs: Dict[str, any] = config.get("trainer_kwargs", trainer_kwargs) def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any: """