2023-03-19 12:38:49 +00:00
|
|
|
from typing import Any, Dict
|
|
|
|
|
2023-03-19 13:09:50 +00:00
|
|
|
import torch
|
|
|
|
|
2023-03-19 12:38:49 +00:00
|
|
|
from freqtrade.freqai.base_models.PyTorchModelTrainer import PyTorchModelTrainer
|
|
|
|
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
2023-03-20 09:54:17 +00:00
|
|
|
from freqtrade.freqai.prediction_models.PyTorchClassifier import PyTorchClassifier
|
2023-03-19 12:38:49 +00:00
|
|
|
from freqtrade.freqai.prediction_models.PyTorchMLPModel import PyTorchMLPModel
|
|
|
|
|
|
|
|
|
2023-03-20 09:54:17 +00:00
|
|
|
class PyTorchMLPClassifier(PyTorchClassifier):
|
2023-03-19 12:38:49 +00:00
|
|
|
"""
|
|
|
|
This class implements the fit method of IFreqaiModel.
|
2023-03-20 15:06:33 +00:00
|
|
|
in the fit method we initialize the model and trainer objects.
|
2023-03-19 12:38:49 +00:00
|
|
|
the only requirement from the model is to be aligned to PyTorchClassifier
|
2023-03-20 18:22:28 +00:00
|
|
|
predict method that expects the model to predict a tensor of type long.
|
2023-03-19 12:38:49 +00:00
|
|
|
|
|
|
|
parameters are passed via `model_training_parameters` under the freqai
|
|
|
|
section in the config file. e.g:
|
|
|
|
{
|
|
|
|
...
|
|
|
|
"freqai": {
|
|
|
|
...
|
|
|
|
"model_training_parameters" : {
|
|
|
|
"learning_rate": 3e-4,
|
|
|
|
"trainer_kwargs": {
|
|
|
|
"max_iters": 5000,
|
|
|
|
"batch_size": 64,
|
|
|
|
"max_n_eval_batches": None,
|
|
|
|
},
|
|
|
|
"model_kwargs": {
|
|
|
|
"hidden_dim": 512,
|
|
|
|
"dropout_percent": 0.2,
|
|
|
|
"n_layer": 1,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2023-03-20 17:40:36 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
learning_rate: float = 3e-4,
|
|
|
|
model_kwargs: Dict[str, Any] = {},
|
|
|
|
trainer_kwargs: Dict[str, Any] = {},
|
|
|
|
**kwargs
|
|
|
|
):
|
2023-03-19 12:38:49 +00:00
|
|
|
super().__init__(**kwargs)
|
2023-03-20 17:40:36 +00:00
|
|
|
config = self.freqai_info.get("model_training_parameters", {})
|
|
|
|
self.learning_rate: float = config.get("learning_rate", learning_rate)
|
2023-03-20 18:22:28 +00:00
|
|
|
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", model_kwargs)
|
|
|
|
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", trainer_kwargs)
|
2023-03-19 12:38:49 +00:00
|
|
|
|
|
|
|
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
|
|
|
"""
|
|
|
|
User sets up the training and test data to fit their desired model here
|
|
|
|
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
|
|
|
all the training and test data/labels.
|
2023-03-20 18:38:43 +00:00
|
|
|
:raises ValueError: If self.class_names is not defined in the parent class.
|
2023-03-19 12:38:49 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
class_names = self.get_class_names()
|
|
|
|
self.convert_label_column_to_int(data_dictionary, dk, class_names)
|
|
|
|
n_features = data_dictionary["train_features"].shape[-1]
|
|
|
|
model = PyTorchMLPModel(
|
|
|
|
input_dim=n_features,
|
|
|
|
output_dim=len(class_names),
|
|
|
|
**self.model_kwargs
|
|
|
|
)
|
|
|
|
model.to(self.device)
|
|
|
|
optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
|
|
|
|
criterion = torch.nn.CrossEntropyLoss()
|
|
|
|
init_model = self.get_init_model(dk.pair)
|
|
|
|
trainer = PyTorchModelTrainer(
|
|
|
|
model=model,
|
|
|
|
optimizer=optimizer,
|
|
|
|
criterion=criterion,
|
|
|
|
model_meta_data={"class_names": class_names},
|
|
|
|
device=self.device,
|
|
|
|
init_model=init_model,
|
2023-03-20 15:06:33 +00:00
|
|
|
target_tensor_type=torch.long,
|
2023-03-19 12:38:49 +00:00
|
|
|
**self.trainer_kwargs,
|
|
|
|
)
|
|
|
|
trainer.fit(data_dictionary)
|
|
|
|
return trainer
|