Add possibility to override estimator from within hyperopt

This commit is contained in:
Matthias 2021-09-15 21:36:53 +02:00
parent 90ad178932
commit c0811ae896
4 changed files with 54 additions and 4 deletions

View File

@ -98,6 +98,38 @@ class MyAwesomeStrategy(IStrategy):
!!! Note
All overrides are optional and can be mixed/matched as necessary.
### Overriding Base estimator
You can define your own estimator for Hyperopt by implementing `generate_estimator()` in the Hyperopt subclass.
```python
class MyAwesomeStrategy(IStrategy):
class HyperOpt:
def generate_estimator():
return "RF"
```
Possible values are either one of "GP", "RF", "ET", "GBRT" (Details can be found in the [scikit-optimize documentation](https://scikit-optimize.github.io/)), or "an instance of a class that inherits from `RegressorMixin` (from sklearn) and where the `predict` method has an optional `return_std` argument, which returns `std(Y | x)` along with `E[Y | x]`".
Some research will be necessary to find additional Regressors.
Example for `ExtraTreesRegressor` ("ET") with additional parameters:
```python
class MyAwesomeStrategy(IStrategy):
class HyperOpt:
def generate_estimator():
from skopt.learning import ExtraTreesRegressor
# Corresponds to "ET" - but allows additional parameters.
return ExtraTreesRegressor(n_estimators=100)
```
!!! Note
While custom estimators can be provided, it's up to you as User to do research on possible parameters and analyze / understand which ones should be used.
If you're unsure about this, best use one of the Defaults (`"ET"` has proven to be the most versatile) without further parameters.
## Space options
For the additional spaces, scikit-optimize (in combination with Freqtrade) provides the following space types:

View File

@ -365,10 +365,14 @@ class Hyperopt:
}
def get_optimizer(self, dimensions: List[Dimension], cpu_count) -> Optimizer:
estimator = self.custom_hyperopt.generate_estimator()
logger.info(f"Using estimator {estimator}.")
# TODO: Impact of changing acq_optimizer to "sampling" is unclear
# (other than that it fails with other optimizers when using custom sklearn regressors)
return Optimizer(
dimensions,
base_estimator="ET",
acq_optimizer="auto",
base_estimator=estimator,
acq_optimizer="sampling",
n_initial_points=INITIAL_POINTS,
acq_optimizer_kwargs={'n_jobs': cpu_count},
random_state=self.random_state,

View File

@ -12,7 +12,7 @@ from freqtrade.exceptions import OperationalException
with suppress(ImportError):
from skopt.space import Dimension
from freqtrade.optimize.hyperopt_interface import IHyperOpt
from freqtrade.optimize.hyperopt_interface import EstimatorType, IHyperOpt
def _format_exception_message(space: str) -> str:
@ -79,3 +79,6 @@ class HyperOptAuto(IHyperOpt):
def trailing_space(self) -> List['Dimension']:
return self._get_func('trailing_space')()
def generate_estimator(self) -> EstimatorType:
return self._get_func('generate_estimator')()

View File

@ -5,8 +5,9 @@ This module defines the interface to apply for hyperopt
import logging
import math
from abc import ABC
from typing import Dict, List
from typing import Dict, List, Union
from sklearn.base import RegressorMixin
from skopt.space import Categorical, Dimension, Integer
from freqtrade.exchange import timeframe_to_minutes
@ -17,6 +18,8 @@ from freqtrade.strategy import IStrategy
logger = logging.getLogger(__name__)
EstimatorType = Union[RegressorMixin, str]
class IHyperOpt(ABC):
"""
@ -37,6 +40,14 @@ class IHyperOpt(ABC):
IHyperOpt.ticker_interval = str(config['timeframe']) # DEPRECATED
IHyperOpt.timeframe = str(config['timeframe'])
def generate_estimator(self) -> EstimatorType:
"""
Return base_estimator.
Can be any of "GP", "RF", "ET", "GBRT" or an instance of a class
inheriting from RegressorMixin (from sklearn).
"""
return 'ET'
def generate_roi_table(self, params: Dict) -> Dict[int, float]:
"""
Create a ROI table.