From f7a5b2cb71b8a766cdb003877f4b5f1137d5ef56 Mon Sep 17 00:00:00 2001 From: Italo <45588475+italodamato@users.noreply.github.com> Date: Tue, 25 Jan 2022 11:43:40 +0000 Subject: [PATCH] explicit dimensions, added **kwargs, updated docs --- docs/advanced-hyperopt.md | 25 ++++++++++++++++++++++-- freqtrade/optimize/hyperopt.py | 2 +- freqtrade/optimize/hyperopt_auto.py | 4 ++-- freqtrade/optimize/hyperopt_interface.py | 2 +- 4 files changed, 27 insertions(+), 6 deletions(-) diff --git a/docs/advanced-hyperopt.md b/docs/advanced-hyperopt.md index 9ac31bf16..dff8dde1d 100644 --- a/docs/advanced-hyperopt.md +++ b/docs/advanced-hyperopt.md @@ -105,7 +105,7 @@ You can define your own estimator for Hyperopt by implementing `generate_estimat ```python class MyAwesomeStrategy(IStrategy): class HyperOpt: - def generate_estimator(): + def generate_estimator(dimensions, **kwargs): return "RF" ``` @@ -119,13 +119,34 @@ Example for `ExtraTreesRegressor` ("ET") with additional parameters: ```python class MyAwesomeStrategy(IStrategy): class HyperOpt: - def generate_estimator(): + def generate_estimator(dimensions, **kwargs): from skopt.learning import ExtraTreesRegressor # Corresponds to "ET" - but allows additional parameters. return ExtraTreesRegressor(n_estimators=100) ``` +The `dimensions` parameter is the list of `skopt.space.Dimension` objects corresponding to the parameters to be optimized. It can be used to create isotropic kernels for the `skopt.learning.GaussianProcessRegressor` estimator. Here's an example: + +```python +class MyAwesomeStrategy(IStrategy): + class HyperOpt: + def generate_estimator(dimensions, **kwargs): + from skopt.utils import cook_estimator + from skopt.learning.gaussian_process.kernels import (Matern, ConstantKernel) + kernel_bounds = (0.0001, 10000) + kernel = ( + ConstantKernel(1.0, kernel_bounds) * + Matern(length_scale=np.ones(len(dimensions)), length_scale_bounds=[kernel_bounds for d in dimensions], nu=2.5) + ) + kernel += ( + ConstantKernel(1.0, kernel_bounds) * + Matern(length_scale=np.ones(len(dimensions)), length_scale_bounds=[kernel_bounds for d in dimensions], nu=1.5) + ) + + return cook_estimator("GP", space=dimensions, kernel=kernel, n_restarts_optimizer=2) +``` + !!! Note While custom estimators can be provided, it's up to you as User to do research on possible parameters and analyze / understand which ones should be used. If you're unsure about this, best use one of the Defaults (`"ET"` has proven to be the most versatile) without further parameters. diff --git a/freqtrade/optimize/hyperopt.py b/freqtrade/optimize/hyperopt.py index 209edd157..9664e6f07 100644 --- a/freqtrade/optimize/hyperopt.py +++ b/freqtrade/optimize/hyperopt.py @@ -367,7 +367,7 @@ class Hyperopt: } def get_optimizer(self, dimensions: List[Dimension], cpu_count) -> Optimizer: - estimator = self.custom_hyperopt.generate_estimator(dimensions) + estimator = self.custom_hyperopt.generate_estimator(dimensions=dimensions) acq_optimizer = "sampling" if isinstance(estimator, str): diff --git a/freqtrade/optimize/hyperopt_auto.py b/freqtrade/optimize/hyperopt_auto.py index e7843ff55..5bc0af42b 100644 --- a/freqtrade/optimize/hyperopt_auto.py +++ b/freqtrade/optimize/hyperopt_auto.py @@ -91,5 +91,5 @@ class HyperOptAuto(IHyperOpt): def trailing_space(self) -> List['Dimension']: return self._get_func('trailing_space')() - def generate_estimator(self, dimensions: List['Dimension']) -> EstimatorType: - return self._get_func('generate_estimator')(dimensions) + def generate_estimator(self, dimensions: List['Dimension'], **kwargs) -> EstimatorType: + return self._get_func('generate_estimator')(dimensions=dimensions, **kwargs) diff --git a/freqtrade/optimize/hyperopt_interface.py b/freqtrade/optimize/hyperopt_interface.py index 53b4f087c..1611970db 100644 --- a/freqtrade/optimize/hyperopt_interface.py +++ b/freqtrade/optimize/hyperopt_interface.py @@ -40,7 +40,7 @@ class IHyperOpt(ABC): IHyperOpt.ticker_interval = str(config['timeframe']) # DEPRECATED IHyperOpt.timeframe = str(config['timeframe']) - def generate_estimator(self) -> EstimatorType: + def generate_estimator(self, dimensions, **kwargs) -> EstimatorType: """ Return base_estimator. Can be any of "GP", "RF", "ET", "GBRT" or an instance of a class