Ensure the same timestamp is used for backtest and signal export
This commit is contained in:
parent
01a68e1060
commit
3c62df6b86
@ -1264,13 +1264,14 @@ class Backtesting:
|
|||||||
self.results['strategy_comparison'].extend(results['strategy_comparison'])
|
self.results['strategy_comparison'].extend(results['strategy_comparison'])
|
||||||
else:
|
else:
|
||||||
self.results = results
|
self.results = results
|
||||||
|
dt_appendix = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||||
if self.config.get('export', 'none') in ('trades', 'signals'):
|
if self.config.get('export', 'none') in ('trades', 'signals'):
|
||||||
store_backtest_stats(self.config['exportfilename'], self.results)
|
store_backtest_stats(self.config['exportfilename'], self.results, dt_appendix)
|
||||||
|
|
||||||
if (self.config.get('export', 'none') == 'signals' and
|
if (self.config.get('export', 'none') == 'signals' and
|
||||||
self.dataprovider.runmode == RunMode.BACKTEST):
|
self.dataprovider.runmode == RunMode.BACKTEST):
|
||||||
store_backtest_signal_candles(self.config['exportfilename'], self.processed_dfs)
|
store_backtest_signal_candles(
|
||||||
|
self.config['exportfilename'], self.processed_dfs, dt_appendix)
|
||||||
|
|
||||||
# Results may be mixed up now. Sort them so they follow --strategy-list order.
|
# Results may be mixed up now. Sort them so they follow --strategy-list order.
|
||||||
if 'strategy_list' in self.config and len(self.results) > 0:
|
if 'strategy_list' in self.config and len(self.results) > 0:
|
||||||
|
@ -17,21 +17,21 @@ from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def store_backtest_stats(recordfilename: Path, stats: Dict[str, DataFrame]) -> None:
|
def store_backtest_stats(
|
||||||
|
recordfilename: Path, stats: Dict[str, DataFrame], dtappendix: str) -> None:
|
||||||
"""
|
"""
|
||||||
Stores backtest results
|
Stores backtest results
|
||||||
:param recordfilename: Path object, which can either be a filename or a directory.
|
:param recordfilename: Path object, which can either be a filename or a directory.
|
||||||
Filenames will be appended with a timestamp right before the suffix
|
Filenames will be appended with a timestamp right before the suffix
|
||||||
while for directories, <directory>/backtest-result-<datetime>.json will be used as filename
|
while for directories, <directory>/backtest-result-<datetime>.json will be used as filename
|
||||||
:param stats: Dataframe containing the backtesting statistics
|
:param stats: Dataframe containing the backtesting statistics
|
||||||
|
:param dtappendix: Datetime to use for the filename
|
||||||
"""
|
"""
|
||||||
if recordfilename.is_dir():
|
if recordfilename.is_dir():
|
||||||
filename = (recordfilename /
|
filename = (recordfilename / f'backtest-result-{dtappendix}.json')
|
||||||
f'backtest-result-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}.json')
|
|
||||||
else:
|
else:
|
||||||
filename = Path.joinpath(
|
filename = Path.joinpath(
|
||||||
recordfilename.parent,
|
recordfilename.parent, f'{recordfilename.stem}-{dtappendix}'
|
||||||
f'{recordfilename.stem}-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}'
|
|
||||||
).with_suffix(recordfilename.suffix)
|
).with_suffix(recordfilename.suffix)
|
||||||
|
|
||||||
# Store metadata separately.
|
# Store metadata separately.
|
||||||
@ -44,7 +44,8 @@ def store_backtest_stats(recordfilename: Path, stats: Dict[str, DataFrame]) -> N
|
|||||||
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
|
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
|
||||||
|
|
||||||
|
|
||||||
def store_backtest_signal_candles(recordfilename: Path, candles: Dict[str, Dict]) -> Path:
|
def store_backtest_signal_candles(
|
||||||
|
recordfilename: Path, candles: Dict[str, Dict], dtappendix: str) -> Path:
|
||||||
"""
|
"""
|
||||||
Stores backtest trade signal candles
|
Stores backtest trade signal candles
|
||||||
:param recordfilename: Path object, which can either be a filename or a directory.
|
:param recordfilename: Path object, which can either be a filename or a directory.
|
||||||
@ -52,14 +53,13 @@ def store_backtest_signal_candles(recordfilename: Path, candles: Dict[str, Dict]
|
|||||||
while for directories, <directory>/backtest-result-<datetime>_signals.pkl will be used
|
while for directories, <directory>/backtest-result-<datetime>_signals.pkl will be used
|
||||||
as filename
|
as filename
|
||||||
:param stats: Dict containing the backtesting signal candles
|
:param stats: Dict containing the backtesting signal candles
|
||||||
|
:param dtappendix: Datetime to use for the filename
|
||||||
"""
|
"""
|
||||||
if recordfilename.is_dir():
|
if recordfilename.is_dir():
|
||||||
filename = (recordfilename /
|
filename = (recordfilename / f'backtest-result-{dtappendix}_signals.pkl')
|
||||||
f'backtest-result-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}_signals.pkl')
|
|
||||||
else:
|
else:
|
||||||
filename = Path.joinpath(
|
filename = Path.joinpath(
|
||||||
recordfilename.parent,
|
recordfilename.parent, f'{recordfilename.stem}-{dtappendix}_signals.pkl'
|
||||||
f'{recordfilename.stem}-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}_signals.pkl'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
file_dump_joblib(filename, candles)
|
file_dump_joblib(filename, candles)
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
from datetime import datetime
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
from fastapi import APIRouter, BackgroundTasks, Depends
|
from fastapi import APIRouter, BackgroundTasks, Depends
|
||||||
@ -102,7 +103,10 @@ async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: Bac
|
|||||||
min_date=min_date, max_date=max_date)
|
min_date=min_date, max_date=max_date)
|
||||||
|
|
||||||
if btconfig.get('export', 'none') == 'trades':
|
if btconfig.get('export', 'none') == 'trades':
|
||||||
store_backtest_stats(btconfig['exportfilename'], ApiServer._bt.results)
|
store_backtest_stats(
|
||||||
|
btconfig['exportfilename'], ApiServer._bt.results,
|
||||||
|
datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||||
|
)
|
||||||
|
|
||||||
logger.info("Backtest finished.")
|
logger.info("Backtest finished.")
|
||||||
|
|
||||||
|
@ -171,7 +171,7 @@ def test_generate_backtest_stats(default_conf, testdatadir, tmpdir):
|
|||||||
_backup_file(filename_last, copy_file=True)
|
_backup_file(filename_last, copy_file=True)
|
||||||
assert not filename.is_file()
|
assert not filename.is_file()
|
||||||
|
|
||||||
store_backtest_stats(filename, stats)
|
store_backtest_stats(filename, stats, '2022_01_01_15_05_13')
|
||||||
|
|
||||||
# get real Filename (it's btresult-<date>.json)
|
# get real Filename (it's btresult-<date>.json)
|
||||||
last_fn = get_latest_backtest_filename(filename_last.parent)
|
last_fn = get_latest_backtest_filename(filename_last.parent)
|
||||||
@ -194,7 +194,7 @@ def test_store_backtest_stats(testdatadir, mocker):
|
|||||||
|
|
||||||
dump_mock = mocker.patch('freqtrade.optimize.optimize_reports.file_dump_json')
|
dump_mock = mocker.patch('freqtrade.optimize.optimize_reports.file_dump_json')
|
||||||
|
|
||||||
store_backtest_stats(testdatadir, {'metadata': {}})
|
store_backtest_stats(testdatadir, {'metadata': {}}, '2022_01_01_15_05_13')
|
||||||
|
|
||||||
assert dump_mock.call_count == 3
|
assert dump_mock.call_count == 3
|
||||||
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||||
@ -202,7 +202,7 @@ def test_store_backtest_stats(testdatadir, mocker):
|
|||||||
|
|
||||||
dump_mock.reset_mock()
|
dump_mock.reset_mock()
|
||||||
filename = testdatadir / 'testresult.json'
|
filename = testdatadir / 'testresult.json'
|
||||||
store_backtest_stats(filename, {'metadata': {}})
|
store_backtest_stats(filename, {'metadata': {}}, '2022_01_01_15_05_13')
|
||||||
assert dump_mock.call_count == 3
|
assert dump_mock.call_count == 3
|
||||||
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||||
# result will be testdatadir / testresult-<timestamp>.json
|
# result will be testdatadir / testresult-<timestamp>.json
|
||||||
@ -216,7 +216,7 @@ def test_store_backtest_candles(testdatadir, mocker):
|
|||||||
candle_dict = {'DefStrat': {'UNITTEST/BTC': pd.DataFrame()}}
|
candle_dict = {'DefStrat': {'UNITTEST/BTC': pd.DataFrame()}}
|
||||||
|
|
||||||
# mock directory exporting
|
# mock directory exporting
|
||||||
store_backtest_signal_candles(testdatadir, candle_dict)
|
store_backtest_signal_candles(testdatadir, candle_dict, '2022_01_01_15_05_13')
|
||||||
|
|
||||||
assert dump_mock.call_count == 1
|
assert dump_mock.call_count == 1
|
||||||
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||||
@ -225,7 +225,7 @@ def test_store_backtest_candles(testdatadir, mocker):
|
|||||||
dump_mock.reset_mock()
|
dump_mock.reset_mock()
|
||||||
# mock file exporting
|
# mock file exporting
|
||||||
filename = Path(testdatadir / 'testresult')
|
filename = Path(testdatadir / 'testresult')
|
||||||
store_backtest_signal_candles(filename, candle_dict)
|
store_backtest_signal_candles(filename, candle_dict, '2022_01_01_15_05_13')
|
||||||
assert dump_mock.call_count == 1
|
assert dump_mock.call_count == 1
|
||||||
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||||
# result will be testdatadir / testresult-<timestamp>_signals.pkl
|
# result will be testdatadir / testresult-<timestamp>_signals.pkl
|
||||||
@ -238,7 +238,7 @@ def test_write_read_backtest_candles(tmpdir):
|
|||||||
candle_dict = {'DefStrat': {'UNITTEST/BTC': pd.DataFrame()}}
|
candle_dict = {'DefStrat': {'UNITTEST/BTC': pd.DataFrame()}}
|
||||||
|
|
||||||
# test directory exporting
|
# test directory exporting
|
||||||
stored_file = store_backtest_signal_candles(Path(tmpdir), candle_dict)
|
stored_file = store_backtest_signal_candles(Path(tmpdir), candle_dict, '2022_01_01_15_05_13')
|
||||||
scp = open(stored_file, "rb")
|
scp = open(stored_file, "rb")
|
||||||
pickled_signal_candles = joblib.load(scp)
|
pickled_signal_candles = joblib.load(scp)
|
||||||
scp.close()
|
scp.close()
|
||||||
@ -252,7 +252,7 @@ def test_write_read_backtest_candles(tmpdir):
|
|||||||
|
|
||||||
# test file exporting
|
# test file exporting
|
||||||
filename = Path(tmpdir / 'testresult')
|
filename = Path(tmpdir / 'testresult')
|
||||||
stored_file = store_backtest_signal_candles(filename, candle_dict)
|
stored_file = store_backtest_signal_candles(filename, candle_dict, '2022_01_01_15_05_13')
|
||||||
scp = open(stored_file, "rb")
|
scp = open(stored_file, "rb")
|
||||||
pickled_signal_candles = joblib.load(scp)
|
pickled_signal_candles = joblib.load(scp)
|
||||||
scp.close()
|
scp.close()
|
||||||
|
Loading…
Reference in New Issue
Block a user