Revert "Merge branch 'plot_hyperopt_stats' into opt-ask-force-new-points"

This reverts commit 4eb9cc6e8b, reversing
changes made to a3b401a762.
This commit is contained in:
Italo
2022-03-30 09:39:07 +01:00
parent 4eb9cc6e8b
commit 9f171193ef
15 changed files with 169 additions and 184 deletions

View File

@@ -1,14 +1,27 @@
""" Freqtrade bot """
__version__ = 'develop'
if 'dev' in __version__:
if __version__ == 'develop':
try:
import subprocess
__version__ = __version__ + '-' + subprocess.check_output(
__version__ = 'develop-' + subprocess.check_output(
['git', 'log', '--format="%h"', '-n 1'],
stderr=subprocess.DEVNULL).decode("utf-8").rstrip().strip('"')
# from datetime import datetime
# last_release = subprocess.check_output(
# ['git', 'tag']
# ).decode('utf-8').split()[-1].split(".")
# # Releases are in the format "2020.1" - we increment the latest version for dev.
# prefix = f"{last_release[0]}.{int(last_release[1]) + 1}"
# dev_version = int(datetime.now().timestamp() // 1000)
# __version__ = f"{prefix}.dev{dev_version}"
# subprocess.check_output(
# ['git', 'log', '--format="%h"', '-n 1'],
# stderr=subprocess.DEVNULL).decode("utf-8").rstrip().strip('"')
except Exception: # pragma: no cover
# git not available, ignore
try:

View File

@@ -32,24 +32,20 @@ from freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss # noqa: F4
from freqtrade.optimize.hyperopt_tools import HyperoptTools, hyperopt_serializer
from freqtrade.optimize.optimize_reports import generate_strategy_stats
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
import matplotlib.pyplot as plt
import numpy as np
import random
# Suppress scikit-learn FutureWarnings from skopt
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
from skopt import Optimizer
from skopt.space import Dimension
from sklearn.model_selection import cross_val_score
from skopt.plots import plot_convergence, plot_regret, plot_evaluations, plot_objective
progressbar.streams.wrap_stderr()
progressbar.streams.wrap_stdout()
logger = logging.getLogger(__name__)
INITIAL_POINTS = 32
INITIAL_POINTS = 30
# Keep no more than SKOPT_MODEL_QUEUE_SIZE models
# in the skopt model queue, to optimize memory consumption
@@ -413,35 +409,6 @@ class Hyperopt:
f'({(self.max_date - self.min_date).days} days)..')
# Store non-trimmed data - will be trimmed after signal generation.
dump(preprocessed, self.data_pickle_file)
def get_asked_points(self, n_points: int) -> List[List[Any]]:
'''
Enforce points returned from `self.opt.ask` have not been already evaluated
Steps:
1. Try to get points using `self.opt.ask` first
2. Discard the points that have already been evaluated
3. Retry using `self.opt.ask` up to 3 times
4. If still some points are missing in respect to `n_points`, random sample some points
5. Repeat until at least `n_points` points in the `asked_non_tried` list
6. Return a list with length truncated at `n_points`
'''
i = 0
asked_non_tried: List[List[Any]] = []
while i < 100 and len(asked_non_tried) < n_points:
if i < 3:
self.opt.cache_ = {}
asked = self.opt.ask(n_points=n_points * 5)
else:
asked = self.opt.space.rvs(n_samples=n_points * 5)
asked_non_tried += [x for x in asked
if x not in self.opt.Xi
and x not in asked_non_tried]
i += 1
if asked_non_tried:
return asked_non_tried[:min(len(asked_non_tried), n_points)]
else:
return self.opt.ask(n_points=n_points)
def get_asked_points(self, n_points: int) -> Tuple[List[List[Any]], List[bool]]:
'''
@@ -548,13 +515,7 @@ class Hyperopt:
asked, is_random = self.get_asked_points(n_points=current_jobs)
f_val = self.run_optimizer_parallel(parallel, asked, i)
res = self.opt.tell(asked, [v['loss'] for v in f_val])
self.plot_optimizer(res, path='user_data/scripts', convergence=False, regret=False, r2=False, objective=True, jobs=jobs)
if res.models and hasattr(res.models[-1], "kernel_"):
print(f'kernel: {res.models[-1].kernel_}')
print(datetime.now())
self.opt.tell(asked, [v['loss'] for v in f_val])
# Calculate progressbar outputs
for j, val in enumerate(f_val):
@@ -600,47 +561,3 @@ class Hyperopt:
# This is printed when Ctrl+C is pressed quickly, before first epochs have
# a chance to be evaluated.
print("No epochs evaluated yet, no best result.")
def plot_r2(self, res, ax, jobs):
if len(res.x_iters) < 10:
return
if not hasattr(self, 'r2_list'):
self.r2_list = []
model = res.models[-1]
model.criterion = 'squared_error'
r2 = cross_val_score(model, X=res.x_iters, y=res.func_vals, scoring='r2', cv=5, n_jobs=jobs).mean()
r2 = r2 if r2 > -5 else -5
self.r2_list.append(r2)
ax.plot(range(INITIAL_POINTS, INITIAL_POINTS + jobs * len(self.r2_list), jobs), self.r2_list, label='R2', marker=".", markersize=12, lw=2)
def plot_optimizer(self, res, path, jobs, convergence=True, regret=True, evaluations=True, objective=True, r2=True):
path = Path(path)
if convergence:
ax = plot_convergence(res)
ax.flatten()[0].figure.savefig(path / 'convergence.png')
if regret:
ax = plot_regret(res)
ax.flatten()[0].figure.savefig(path / 'regret.png')
if evaluations:
# print('evaluations')
ax = plot_evaluations(res)
ax.flatten()[0].figure.savefig(path / 'evaluations.png')
if objective and res.models:
# print('objective')
ax = plot_objective(res, sample_source='result', n_samples=50, n_points=10)
ax.flatten()[0].figure.savefig(path / 'objective.png')
if r2 and res.models:
fig, ax = plt.subplots()
ax.set_ylabel('R2')
ax.set_xlabel('Epoch')
ax.set_title('R2')
ax = self.plot_r2(res, ax, jobs)
fig.savefig(path / 'r2.png')

View File

@@ -6,7 +6,6 @@ This module load custom objects
import importlib.util
import inspect
import logging
import sys
from pathlib import Path
from typing import Any, Dict, Iterator, List, Optional, Tuple, Type, Union
@@ -16,22 +15,6 @@ from freqtrade.exceptions import OperationalException
logger = logging.getLogger(__name__)
class PathModifier:
def __init__(self, path: Path):
self.path = path
def __enter__(self):
"""Inject path to allow importing with relative imports."""
sys.path.insert(0, str(self.path))
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Undo insertion of local path."""
str_path = str(self.path)
if str_path in sys.path:
sys.path.remove(str_path)
class IResolver:
"""
This class contains all the logic to load custom classes
@@ -74,32 +57,27 @@ class IResolver:
# Generate spec based on absolute path
# Pass object_name as first argument to have logging print a reasonable name.
with PathModifier(module_path.parent):
module_name = module_path.stem or ""
spec = importlib.util.spec_from_file_location(module_name, str(module_path))
if not spec:
spec = importlib.util.spec_from_file_location(object_name or "", str(module_path))
if not spec:
return iter([None])
module = importlib.util.module_from_spec(spec)
try:
spec.loader.exec_module(module) # type: ignore # importlib does not use typehints
except (ModuleNotFoundError, SyntaxError, ImportError, NameError) as err:
# Catch errors in case a specific module is not installed
logger.warning(f"Could not import {module_path} due to '{err}'")
if enum_failed:
return iter([None])
module = importlib.util.module_from_spec(spec)
try:
spec.loader.exec_module(module) # type: ignore # importlib does not use typehints
except (ModuleNotFoundError, SyntaxError, ImportError, NameError) as err:
# Catch errors in case a specific module is not installed
logger.warning(f"Could not import {module_path} due to '{err}'")
if enum_failed:
return iter([None])
valid_objects_gen = (
(obj, inspect.getsource(module)) for
name, obj in inspect.getmembers(
module, inspect.isclass) if ((object_name is None or object_name == name)
and issubclass(obj, cls.object_type)
and obj is not cls.object_type
and obj.__module__ == module_name
)
)
# The __module__ check ensures we only use strategies that are defined in this folder.
return valid_objects_gen
valid_objects_gen = (
(obj, inspect.getsource(module)) for
name, obj in inspect.getmembers(
module, inspect.isclass) if ((object_name is None or object_name == name)
and issubclass(obj, cls.object_type)
and obj is not cls.object_type)
)
return valid_objects_gen
@classmethod
def _search_object(cls, directory: Path, *, object_name: str, add_source: bool = False