Update docstring format

This commit is contained in:
Matthias 2022-10-10 12:15:30 +00:00
parent 60de192d47
commit eaae9c9e03
6 changed files with 44 additions and 52 deletions

View File

@ -78,7 +78,7 @@ class BaseClassifierModel(IFreqaiModel):
) -> Tuple[DataFrame, npt.NDArray[np.int_]]: ) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
""" """
Filter the prediction features data and predict with it. Filter the prediction features data and predict with it.
:param: unfiltered_df: Full dataframe for the current backtest period. :param unfiltered_df: Full dataframe for the current backtest period.
:return: :return:
:pred_df: dataframe containing the predictions :pred_df: dataframe containing the predictions
:do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove :do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove

View File

@ -77,7 +77,7 @@ class BaseRegressionModel(IFreqaiModel):
) -> Tuple[DataFrame, npt.NDArray[np.int_]]: ) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
""" """
Filter the prediction features data and predict with it. Filter the prediction features data and predict with it.
:param: unfiltered_df: Full dataframe for the current backtest period. :param unfiltered_df: Full dataframe for the current backtest period.
:return: :return:
:pred_df: dataframe containing the predictions :pred_df: dataframe containing the predictions
:do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove :do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove

View File

@ -412,9 +412,8 @@ class FreqaiDataDrawer:
def save_data(self, model: Any, coin: str, dk: FreqaiDataKitchen) -> None: def save_data(self, model: Any, coin: str, dk: FreqaiDataKitchen) -> None:
""" """
Saves all data associated with a model for a single sub-train time range Saves all data associated with a model for a single sub-train time range
:params: :param model: User trained model which can be reused for inferencing to generate
:model: User trained model which can be reused for inferencing to generate predictions
predictions
""" """
if not dk.data_path.is_dir(): if not dk.data_path.is_dir():
@ -532,8 +531,7 @@ class FreqaiDataDrawer:
Append new candles to our stores historic data (in memory) so that Append new candles to our stores historic data (in memory) so that
we do not need to load candle history from disk and we dont need to we do not need to load candle history from disk and we dont need to
pinging exchange multiple times for the same candle. pinging exchange multiple times for the same candle.
:params: :param dataframe: DataFrame = strategy provided dataframe
dataframe: DataFrame = strategy provided dataframe
""" """
feat_params = self.freqai_info["feature_parameters"] feat_params = self.freqai_info["feature_parameters"]
with self.history_lock: with self.history_lock:
@ -579,9 +577,8 @@ class FreqaiDataDrawer:
""" """
Load pair histories for all whitelist and corr_pairlist pairs. Load pair histories for all whitelist and corr_pairlist pairs.
Only called once upon startup of bot. Only called once upon startup of bot.
:params: :param timerange: TimeRange = full timerange required to populate all indicators
timerange: TimeRange = full timerange required to populate all indicators for training according to user defined train_period_days
for training according to user defined train_period_days
""" """
history_data = self.historic_data history_data = self.historic_data
@ -604,10 +601,9 @@ class FreqaiDataDrawer:
""" """
Searches through our historic_data in memory and returns the dataframes relevant Searches through our historic_data in memory and returns the dataframes relevant
to the present pair. to the present pair.
:params: :param timerange: TimeRange = full timerange required to populate all indicators
timerange: TimeRange = full timerange required to populate all indicators for training according to user defined train_period_days
for training according to user defined train_period_days :param metadata: dict = strategy furnished pair metadata
metadata: dict = strategy furnished pair metadata
""" """
with self.history_lock: with self.history_lock:
corr_dataframes: Dict[Any, Any] = {} corr_dataframes: Dict[Any, Any] = {}

View File

@ -107,9 +107,8 @@ class FreqaiDataKitchen:
) -> None: ) -> None:
""" """
Set the paths to the data for the present coin/botloop Set the paths to the data for the present coin/botloop
:params: :param metadata: dict = strategy furnished pair metadata
metadata: dict = strategy furnished pair metadata :param trained_timestamp: int = timestamp of most recent training
trained_timestamp: int = timestamp of most recent training
""" """
self.full_path = Path( self.full_path = Path(
self.config["user_data_dir"] / "models" / str(self.freqai_config.get("identifier")) self.config["user_data_dir"] / "models" / str(self.freqai_config.get("identifier"))
@ -129,8 +128,8 @@ class FreqaiDataKitchen:
Given the dataframe for the full history for training, split the data into Given the dataframe for the full history for training, split the data into
training and test data according to user specified parameters in configuration training and test data according to user specified parameters in configuration
file. file.
:filtered_dataframe: cleaned dataframe ready to be split. :param filtered_dataframe: cleaned dataframe ready to be split.
:labels: cleaned labels ready to be split. :param labels: cleaned labels ready to be split.
""" """
feat_dict = self.freqai_config["feature_parameters"] feat_dict = self.freqai_config["feature_parameters"]
@ -189,13 +188,14 @@ class FreqaiDataKitchen:
remove all NaNs. Any row with a NaN is removed from training dataset or replaced with remove all NaNs. Any row with a NaN is removed from training dataset or replaced with
0s in the prediction dataset. However, prediction dataset do_predict will reflect any 0s in the prediction dataset. However, prediction dataset do_predict will reflect any
row that had a NaN and will shield user from that prediction. row that had a NaN and will shield user from that prediction.
:params:
:unfiltered_df: the full dataframe for the present training period :param unfiltered_df: the full dataframe for the present training period
:training_feature_list: list, the training feature list constructed by :param training_feature_list: list, the training feature list constructed by
self.build_feature_list() according to user specified parameters in the configuration file. self.build_feature_list() according to user specified
:labels: the labels for the dataset parameters in the configuration file.
:training_filter: boolean which lets the function know if it is training data or :param labels: the labels for the dataset
prediction data to be filtered. :param training_filter: boolean which lets the function know if it is training data or
prediction data to be filtered.
:returns: :returns:
:filtered_df: dataframe cleaned of NaNs and only containing the user :filtered_df: dataframe cleaned of NaNs and only containing the user
requested feature set. requested feature set.
@ -285,8 +285,8 @@ class FreqaiDataKitchen:
def normalize_data(self, data_dictionary: Dict) -> Dict[Any, Any]: def normalize_data(self, data_dictionary: Dict) -> Dict[Any, Any]:
""" """
Normalize all data in the data_dictionary according to the training dataset Normalize all data in the data_dictionary according to the training dataset
:params: :param data_dictionary: dictionary containing the cleaned and
:data_dictionary: dictionary containing the cleaned and split training/test data/labels split training/test data/labels
:returns: :returns:
:data_dictionary: updated dictionary with standardized values. :data_dictionary: updated dictionary with standardized values.
""" """
@ -516,8 +516,7 @@ class FreqaiDataKitchen:
def pca_transform(self, filtered_dataframe: DataFrame) -> None: def pca_transform(self, filtered_dataframe: DataFrame) -> None:
""" """
Use an existing pca transform to transform data into components Use an existing pca transform to transform data into components
:params: :param filtered_dataframe: DataFrame = the cleaned dataframe
filtered_dataframe: DataFrame = the cleaned dataframe
""" """
pca_components = self.pca.transform(filtered_dataframe) pca_components = self.pca.transform(filtered_dataframe)
self.data_dictionary["prediction_features"] = pd.DataFrame( self.data_dictionary["prediction_features"] = pd.DataFrame(
@ -561,8 +560,7 @@ class FreqaiDataKitchen:
""" """
Build/inference a Support Vector Machine to detect outliers Build/inference a Support Vector Machine to detect outliers
in training data and prediction in training data and prediction
:params: :param predict: bool = If true, inference an existing SVM model, else construct one
predict: bool = If true, inference an existing SVM model, else construct one
""" """
if self.keras: if self.keras:
@ -647,11 +645,11 @@ class FreqaiDataKitchen:
Use DBSCAN to cluster training data and remove "noisy" data (read outliers). Use DBSCAN to cluster training data and remove "noisy" data (read outliers).
User controls this via the config param `DBSCAN_outlier_pct` which indicates the User controls this via the config param `DBSCAN_outlier_pct` which indicates the
pct of training data that they want to be considered outliers. pct of training data that they want to be considered outliers.
:params: :param predict: bool = If False (training), iterate to find the best hyper parameters
predict: bool = If False (training), iterate to find the best hyper parameters to match to match user requested outlier percent target.
user requested outlier percent target. If True (prediction), use the parameters If True (prediction), use the parameters determined from
determined from the previous training to estimate if the current prediction point the previous training to estimate if the current prediction point
is an outlier. is an outlier.
""" """
if predict: if predict:
@ -1118,15 +1116,13 @@ class FreqaiDataKitchen:
prediction_dataframe: DataFrame = pd.DataFrame(), prediction_dataframe: DataFrame = pd.DataFrame(),
) -> DataFrame: ) -> DataFrame:
""" """
Use the user defined strategy for populating indicators during Use the user defined strategy for populating indicators during retrain
retrain :param strategy: IStrategy = user defined strategy object
:params: :param corr_dataframes: dict = dict containing the informative pair dataframes
strategy: IStrategy = user defined strategy object (for user defined timeframes)
corr_dataframes: dict = dict containing the informative pair dataframes :param base_dataframes: dict = dict containing the current pair dataframes
(for user defined timeframes) (for user defined timeframes)
base_dataframes: dict = dict containing the current pair dataframes :param metadata: dict = strategy furnished pair metadata
(for user defined timeframes)
metadata: dict = strategy furnished pair metadata
:returns: :returns:
dataframe: DataFrame = dataframe containing populated indicators dataframe: DataFrame = dataframe containing populated indicators
""" """

View File

@ -603,11 +603,11 @@ class IFreqaiModel(ABC):
If the user reuses an identifier on a subsequent instance, If the user reuses an identifier on a subsequent instance,
this function will not be called. In that case, "real" predictions this function will not be called. In that case, "real" predictions
will be appended to the loaded set of historic predictions. will be appended to the loaded set of historic predictions.
:param: df: DataFrame = the dataframe containing the training feature data :param df: DataFrame = the dataframe containing the training feature data
:param: model: Any = A model which was `fit` using a common library such as :param model: Any = A model which was `fit` using a common library such as
catboost or lightgbm catboost or lightgbm
:param: dk: FreqaiDataKitchen = object containing methods for data analysis :param dk: FreqaiDataKitchen = object containing methods for data analysis
:param: pair: str = current pair :param pair: str = current pair
""" """
self.dd.historic_predictions[pair] = pred_df self.dd.historic_predictions[pair] = pred_df

View File

@ -64,7 +64,7 @@ class XGBoostClassifier(BaseClassifierModel):
) -> Tuple[DataFrame, npt.NDArray[np.int_]]: ) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
""" """
Filter the prediction features data and predict with it. Filter the prediction features data and predict with it.
:param: unfiltered_df: Full dataframe for the current backtest period. :param unfiltered_df: Full dataframe for the current backtest period.
:return: :return:
:pred_df: dataframe containing the predictions :pred_df: dataframe containing the predictions
:do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove :do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove