Merge branch 'develop' into hyperopt-jobs

This commit is contained in:
hroff-1902 2019-04-24 10:31:03 +03:00 committed by GitHub
commit 2898067318
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 28 additions and 3 deletions

View File

@ -317,6 +317,14 @@ class Arguments(object):
type=int,
metavar='JOBS',
)
parser.add_argument(
'--random-state',
help='Set random state to some positive integer for reproducible hyperopt results.',
dest='hyperopt_random_state',
default=None,
type=Arguments.check_int_positive,
metavar='INT',
)
def _build_subcommands(self) -> None:
"""
@ -387,6 +395,18 @@ class Arguments(object):
return TimeRange(stype[0], stype[1], start, stop)
raise Exception('Incorrect syntax for timerange "%s"' % text)
@staticmethod
def check_int_positive(value) -> int:
try:
uint = int(value)
if uint <= 0:
raise ValueError
except ValueError:
raise argparse.ArgumentTypeError(
f"{value} is invalid for this parameter, should be a positive integer value"
)
return uint
def scripts_options(self) -> None:
"""
Parses given arguments for scripts.

View File

@ -201,8 +201,7 @@ class Configuration(object):
:return: configuration as dictionary
"""
# If -i/--ticker-interval is used we override the configuration parameter
# (that will override the strategy configuration)
# This will override the strategy configuration
if 'ticker_interval' in self.args and self.args.ticker_interval:
config.update({'ticker_interval': self.args.ticker_interval})
logger.info('Parameter -i/--ticker-interval detected ...')
@ -318,6 +317,11 @@ class Configuration(object):
config.update({'refresh_pairs': True})
logger.info('Parameter -r/--refresh-pairs-cached detected ...')
if 'hyperopt_random_state' in self.args and self.args.hyperopt_random_state is not None:
config.update({'hyperopt_random_state': self.args.hyperopt_random_state})
logger.info("Parameter --random-state detected: %s",
config.get('hyperopt_random_state'))
return config
def _validate_config_schema(self, conf: Dict[str, Any]) -> Dict[str, Any]:

View File

@ -235,7 +235,8 @@ class Hyperopt(Backtesting):
base_estimator="ET",
acq_optimizer="auto",
n_initial_points=30,
acq_optimizer_kwargs={'n_jobs': cpu_count}
acq_optimizer_kwargs={'n_jobs': cpu_count},
random_state=self.config.get('hyperopt_random_state', None)
)
def run_optimizer_parallel(self, parallel, asked) -> List: