Merge branch 'freqtrade:develop' into develop

This commit is contained in:
lolong
2022-10-23 10:09:08 +02:00
committed by GitHub
44 changed files with 561 additions and 178 deletions

View File

@@ -51,7 +51,7 @@ class BaseClassifierModel(IFreqaiModel):
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions_candles", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)

View File

@@ -50,7 +50,7 @@ class BaseRegressionModel(IFreqaiModel):
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions_candles", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)

View File

@@ -47,7 +47,7 @@ class BaseTensorFlowModel(IFreqaiModel):
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions_candles", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)

View File

@@ -971,6 +971,9 @@ class FreqaiDataKitchen:
append_df[f"{label}_mean"] = self.data["labels_mean"][label]
append_df[f"{label}_std"] = self.data["labels_std"][label]
for extra_col in self.data["extra_returns_per_train"]:
append_df["{extra_col}"] = self.data["extra_returns_per_train"][extra_col]
append_df["do_predict"] = do_predict
if self.freqai_config["feature_parameters"].get("DI_threshold", 0) > 0:
append_df["DI_values"] = self.DI_values

View File

@@ -1,4 +1,5 @@
import logging
import sys
from pathlib import Path
from typing import Any, Dict
@@ -48,6 +49,7 @@ class CatboostClassifier(BaseClassifierModel):
init_model = self.get_init_model(dk.pair)
cbr.fit(X=train_data, eval_set=test_data, init_model=init_model)
cbr.fit(X=train_data, eval_set=test_data, init_model=init_model,
log_cout=sys.stdout, log_cerr=sys.stderr)
return cbr

View File

@@ -1,4 +1,5 @@
import logging
import sys
from pathlib import Path
from typing import Any, Dict
@@ -47,6 +48,7 @@ class CatboostRegressor(BaseRegressionModel):
**self.model_training_parameters,
)
model.fit(X=train_data, eval_set=test_data, init_model=init_model)
model.fit(X=train_data, eval_set=test_data, init_model=init_model,
log_cout=sys.stdout, log_cerr=sys.stderr)
return model

View File

@@ -1,4 +1,5 @@
import logging
import sys
from pathlib import Path
from typing import Any, Dict
@@ -58,8 +59,10 @@ class CatboostRegressorMultiTarget(BaseRegressionModel):
fit_params = []
for i in range(len(eval_sets)):
fit_params.append(
{'eval_set': eval_sets[i], 'init_model': init_models[i]})
fit_params.append({
'eval_set': eval_sets[i], 'init_model': init_models[i],
'log_cout': sys.stdout, 'log_cerr': sys.stderr,
})
model = FreqaiMultiOutputRegressor(estimator=cbr)
thread_training = self.freqai_info.get('multitarget_parallel_training', False)