paying closer attention to managing live retraining on separate thread without affecting prediction of other coins on master thread

This commit is contained in:
robcaulk
2022-05-24 12:01:01 +02:00
parent b0d2d13eb1
commit 059c285425
4 changed files with 139 additions and 118 deletions

View File

@@ -91,14 +91,15 @@ class FreqaiDataKitchen:
assert config.get('freqai', {}).get('feature_parameters'), ("No Freqai feature_parameters"
"found in config file.")
def set_paths(self, trained_timestamp: int = None) -> None:
def set_paths(self, metadata: dict, trained_timestamp: int = None,) -> None:
self.full_path = Path(self.config['user_data_dir'] /
"models" /
str(self.freqai_config.get('live_full_backtestrange') +
self.freqai_config.get('identifier')))
self.data_path = Path(self.full_path / str("sub-train" + "-" + self.pair.split("/")[0] +
str(trained_timestamp)))
self.data_path = Path(self.full_path / str("sub-train" + "-" +
metadata['pair'].split("/")[0] +
str(trained_timestamp)))
return