optimize hyperopt
This commit is contained in:
parent
f204af173d
commit
a8399533e2
@ -3,11 +3,11 @@
|
|||||||
"""
|
"""
|
||||||
This module contains the hyperopt logic
|
This module contains the hyperopt logic
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
from math import exp
|
from math import exp
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
@ -28,10 +28,13 @@ from freqtrade.optimize.backtesting import Backtesting
|
|||||||
from freqtrade.state import RunMode
|
from freqtrade.state import RunMode
|
||||||
from freqtrade.resolvers import HyperOptResolver
|
from freqtrade.resolvers import HyperOptResolver
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
MAX_LOSS = 100000 # just a big enough number to be bad result in loss optimization
|
MAX_LOSS = 100000 # just a big enough number to be bad result in loss optimization
|
||||||
TICKERDATA_PICKLE = os.path.join('user_data', 'hyperopt_tickerdata.pkl')
|
TICKERDATA_PICKLE = os.path.join('user_data', 'hyperopt_tickerdata.pkl')
|
||||||
|
EVALS_FRAME = 100
|
||||||
|
|
||||||
|
|
||||||
class Hyperopt(Backtesting):
|
class Hyperopt(Backtesting):
|
||||||
@ -111,21 +114,22 @@ class Hyperopt(Backtesting):
|
|||||||
logger.info('ROI table:')
|
logger.info('ROI table:')
|
||||||
pprint(self.custom_hyperopt.generate_roi_table(best_result['params']), indent=4)
|
pprint(self.custom_hyperopt.generate_roi_table(best_result['params']), indent=4)
|
||||||
|
|
||||||
def log_results(self, results) -> None:
|
def log_results_immediate(self) -> None:
|
||||||
|
print('.', end='')
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
def log_results(self, f_val, frame_start, total_tries) -> None:
|
||||||
"""
|
"""
|
||||||
Log results if it is better than any previous evaluation
|
Log results if it is better than any previous evaluation
|
||||||
"""
|
"""
|
||||||
if results['loss'] < self.current_best_loss:
|
for i, v in enumerate(f_val):
|
||||||
current = results['current_tries']
|
if v['loss'] < self.current_best_loss:
|
||||||
total = results['total_tries']
|
current = frame_start + i + 1
|
||||||
res = results['result']
|
res = v['result']
|
||||||
loss = results['loss']
|
loss = v['loss']
|
||||||
self.current_best_loss = results['loss']
|
self.current_best_loss = v['loss']
|
||||||
log_msg = f'\n{current:5d}/{total}: {res}. Loss {loss:.5f}'
|
log_msg = f'\n{current:5d}/{total_tries}: {res}. Loss {loss:.5f}'
|
||||||
print(log_msg)
|
print(log_msg)
|
||||||
else:
|
|
||||||
print('.', end='')
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
|
||||||
def calculate_loss(self, total_profit: float, trade_count: int, trade_duration: float) -> float:
|
def calculate_loss(self, total_profit: float, trade_count: int, trade_duration: float) -> float:
|
||||||
"""
|
"""
|
||||||
@ -238,9 +242,17 @@ class Hyperopt(Backtesting):
|
|||||||
acq_optimizer_kwargs={'n_jobs': cpu_count}
|
acq_optimizer_kwargs={'n_jobs': cpu_count}
|
||||||
)
|
)
|
||||||
|
|
||||||
def run_optimizer_parallel(self, parallel, asked) -> List:
|
def run_optimizer_parallel(self, parallel, opt, tries: int, first_try: int) -> List:
|
||||||
return parallel(delayed(
|
result = parallel(delayed(
|
||||||
wrap_non_picklable_objects(self.generate_optimizer))(v) for v in asked)
|
wrap_non_picklable_objects(self.parallel_tell_and_log))
|
||||||
|
(opt, i, opt.ask()) for i in range(first_try, first_try + tries))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def parallel_tell_and_log(self, opt, i, asked):
|
||||||
|
f_val = self.generate_optimizer(asked)
|
||||||
|
opt.tell(asked, f_val['loss'])
|
||||||
|
self.log_results_immediate()
|
||||||
|
return f_val
|
||||||
|
|
||||||
def load_previous_results(self):
|
def load_previous_results(self):
|
||||||
""" read trials file if we have one """
|
""" read trials file if we have one """
|
||||||
@ -272,22 +284,25 @@ class Hyperopt(Backtesting):
|
|||||||
logger.info(f'Found {cpus} CPU cores. Let\'s make them scream!')
|
logger.info(f'Found {cpus} CPU cores. Let\'s make them scream!')
|
||||||
|
|
||||||
opt = self.get_optimizer(cpus)
|
opt = self.get_optimizer(cpus)
|
||||||
EVALS = max(self.total_tries // cpus, 1)
|
|
||||||
try:
|
|
||||||
with Parallel(n_jobs=cpus) as parallel:
|
|
||||||
for i in range(EVALS):
|
|
||||||
asked = opt.ask(n_points=cpus)
|
|
||||||
f_val = self.run_optimizer_parallel(parallel, asked)
|
|
||||||
opt.tell(asked, [i['loss'] for i in f_val])
|
|
||||||
|
|
||||||
|
frames = self.total_tries // EVALS_FRAME
|
||||||
|
last_frame_len = self.total_tries % EVALS_FRAME
|
||||||
|
|
||||||
|
try:
|
||||||
|
with Parallel(n_jobs=cpus, verbose=0) as parallel:
|
||||||
|
for frame in range(frames + 1):
|
||||||
|
frame_start = frame * EVALS_FRAME
|
||||||
|
frame_len = last_frame_len if frame == frames else EVALS_FRAME
|
||||||
|
print(f"\n{frame_start+1}-{frame_start+frame_len}"
|
||||||
|
f"/{self.total_tries}: ", end='')
|
||||||
|
f_val = self.run_optimizer_parallel(
|
||||||
|
parallel, opt,
|
||||||
|
frame_len,
|
||||||
|
frame_start
|
||||||
|
)
|
||||||
self.trials += f_val
|
self.trials += f_val
|
||||||
for j in range(cpus):
|
self.log_results(f_val, frame_start, self.total_tries)
|
||||||
self.log_results({
|
|
||||||
'loss': f_val[j]['loss'],
|
|
||||||
'current_tries': i * cpus + j,
|
|
||||||
'total_tries': self.total_tries,
|
|
||||||
'result': f_val[j]['result'],
|
|
||||||
})
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print('User interrupted..')
|
print('User interrupted..')
|
||||||
|
|
||||||
|
@ -142,12 +142,12 @@ def test_loss_calculation_has_limited_profit(hyperopt) -> None:
|
|||||||
def test_log_results_if_loss_improves(hyperopt, capsys) -> None:
|
def test_log_results_if_loss_improves(hyperopt, capsys) -> None:
|
||||||
hyperopt.current_best_loss = 2
|
hyperopt.current_best_loss = 2
|
||||||
hyperopt.log_results(
|
hyperopt.log_results(
|
||||||
{
|
[{
|
||||||
'loss': 1,
|
'loss': 1,
|
||||||
'current_tries': 1,
|
|
||||||
'total_tries': 2,
|
|
||||||
'result': 'foo'
|
'result': 'foo'
|
||||||
}
|
}],
|
||||||
|
0,
|
||||||
|
2
|
||||||
)
|
)
|
||||||
out, err = capsys.readouterr()
|
out, err = capsys.readouterr()
|
||||||
assert ' 1/2: foo. Loss 1.00000' in out
|
assert ' 1/2: foo. Loss 1.00000' in out
|
||||||
@ -156,9 +156,11 @@ def test_log_results_if_loss_improves(hyperopt, capsys) -> None:
|
|||||||
def test_no_log_if_loss_does_not_improve(hyperopt, caplog) -> None:
|
def test_no_log_if_loss_does_not_improve(hyperopt, caplog) -> None:
|
||||||
hyperopt.current_best_loss = 2
|
hyperopt.current_best_loss = 2
|
||||||
hyperopt.log_results(
|
hyperopt.log_results(
|
||||||
{
|
[{
|
||||||
'loss': 3,
|
'loss': 3,
|
||||||
}
|
}],
|
||||||
|
0,
|
||||||
|
1
|
||||||
)
|
)
|
||||||
assert caplog.record_tuples == []
|
assert caplog.record_tuples == []
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user