move default attributes of pytorch classifier to initializer,

to prevent mypy from complaining
This commit is contained in:
Yinon Polak 2023-03-20 19:40:36 +02:00
parent 6b4d9f97c1
commit 0a55753faf
2 changed files with 22 additions and 10 deletions

View File

@ -41,12 +41,18 @@ class PyTorchMLPClassifier(PyTorchClassifier):
""" """
def __init__(self, **kwargs): def __init__(
self,
learning_rate: float = 3e-4,
model_kwargs: Dict[str, Any] = {},
trainer_kwargs: Dict[str, Any] = {},
**kwargs
):
super().__init__(**kwargs) super().__init__(**kwargs)
model_training_params = self.freqai_info.get("model_training_parameters", {}) config = self.freqai_info.get("model_training_parameters", {})
self.learning_rate: float = model_training_params.get("learning_rate", 3e-4) self.learning_rate: float = config.get("learning_rate", learning_rate)
self.model_kwargs: Dict[str, any] = model_training_params.get("model_kwargs", {}) self.model_kwargs: Dict[str, any] = config.get("model_kwargs", model_kwargs)
self.trainer_kwargs: Dict[str, any] = model_training_params.get("trainer_kwargs", {}) self.trainer_kwargs: Dict[str, any] = config.get("trainer_kwargs", trainer_kwargs)
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any: def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
""" """

View File

@ -41,12 +41,18 @@ class PyTorchMLPRegressor(PyTorchRegressor):
""" """
def __init__(self, **kwargs): def __init__(
self,
learning_rate: float = 3e-4,
model_kwargs: Dict[str, Any] = {},
trainer_kwargs: Dict[str, Any] = {},
**kwargs
):
super().__init__(**kwargs) super().__init__(**kwargs)
model_training_params = self.freqai_info.get("model_training_parameters", {}) config = self.freqai_info.get("model_training_parameters", {})
self.learning_rate: float = model_training_params.get("learning_rate", 3e-4) self.learning_rate: float = config.get("learning_rate", learning_rate)
self.model_kwargs: Dict[str, any] = model_training_params.get("model_kwargs", {}) self.model_kwargs: Dict[str, any] = config.get("model_kwargs", model_kwargs)
self.trainer_kwargs: Dict[str, any] = model_training_params.get("trainer_kwargs", {}) self.trainer_kwargs: Dict[str, any] = config.get("trainer_kwargs", trainer_kwargs)
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any: def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
""" """