explicit dimensions, added **kwargs, updated docs

This commit is contained in:
Italo 2022-01-25 11:43:40 +00:00
parent a4dbdb549d
commit f7a5b2cb71
4 changed files with 27 additions and 6 deletions

View File

@ -105,7 +105,7 @@ You can define your own estimator for Hyperopt by implementing `generate_estimat
```python
class MyAwesomeStrategy(IStrategy):
class HyperOpt:
def generate_estimator():
def generate_estimator(dimensions, **kwargs):
return "RF"
```
@ -119,13 +119,34 @@ Example for `ExtraTreesRegressor` ("ET") with additional parameters:
```python
class MyAwesomeStrategy(IStrategy):
class HyperOpt:
def generate_estimator():
def generate_estimator(dimensions, **kwargs):
from skopt.learning import ExtraTreesRegressor
# Corresponds to "ET" - but allows additional parameters.
return ExtraTreesRegressor(n_estimators=100)
```
The `dimensions` parameter is the list of `skopt.space.Dimension` objects corresponding to the parameters to be optimized. It can be used to create isotropic kernels for the `skopt.learning.GaussianProcessRegressor` estimator. Here's an example:
```python
class MyAwesomeStrategy(IStrategy):
class HyperOpt:
def generate_estimator(dimensions, **kwargs):
from skopt.utils import cook_estimator
from skopt.learning.gaussian_process.kernels import (Matern, ConstantKernel)
kernel_bounds = (0.0001, 10000)
kernel = (
ConstantKernel(1.0, kernel_bounds) *
Matern(length_scale=np.ones(len(dimensions)), length_scale_bounds=[kernel_bounds for d in dimensions], nu=2.5)
)
kernel += (
ConstantKernel(1.0, kernel_bounds) *
Matern(length_scale=np.ones(len(dimensions)), length_scale_bounds=[kernel_bounds for d in dimensions], nu=1.5)
)
return cook_estimator("GP", space=dimensions, kernel=kernel, n_restarts_optimizer=2)
```
!!! Note
While custom estimators can be provided, it's up to you as User to do research on possible parameters and analyze / understand which ones should be used.
If you're unsure about this, best use one of the Defaults (`"ET"` has proven to be the most versatile) without further parameters.

View File

@ -367,7 +367,7 @@ class Hyperopt:
}
def get_optimizer(self, dimensions: List[Dimension], cpu_count) -> Optimizer:
estimator = self.custom_hyperopt.generate_estimator(dimensions)
estimator = self.custom_hyperopt.generate_estimator(dimensions=dimensions)
acq_optimizer = "sampling"
if isinstance(estimator, str):

View File

@ -91,5 +91,5 @@ class HyperOptAuto(IHyperOpt):
def trailing_space(self) -> List['Dimension']:
return self._get_func('trailing_space')()
def generate_estimator(self, dimensions: List['Dimension']) -> EstimatorType:
return self._get_func('generate_estimator')(dimensions)
def generate_estimator(self, dimensions: List['Dimension'], **kwargs) -> EstimatorType:
return self._get_func('generate_estimator')(dimensions=dimensions, **kwargs)

View File

@ -40,7 +40,7 @@ class IHyperOpt(ABC):
IHyperOpt.ticker_interval = str(config['timeframe']) # DEPRECATED
IHyperOpt.timeframe = str(config['timeframe'])
def generate_estimator(self) -> EstimatorType:
def generate_estimator(self, dimensions, **kwargs) -> EstimatorType:
"""
Return base_estimator.
Can be any of "GP", "RF", "ET", "GBRT" or an instance of a class