Merge branch 'develop' into pr/nicolaspapp/6715
This commit is contained in:
commit
53a2f55cf0
8
.github/PULL_REQUEST_TEMPLATE.md
vendored
8
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,9 +1,9 @@
|
||||
Thank you for sending your pull request. But first, have you included
|
||||
<!-- Thank you for sending your pull request. But first, have you included
|
||||
unit tests, and is your code PEP8 conformant? [More details](https://github.com/freqtrade/freqtrade/blob/develop/CONTRIBUTING.md)
|
||||
|
||||
-->
|
||||
## Summary
|
||||
|
||||
Explain in one sentence the goal of this PR
|
||||
<!-- Explain in one sentence the goal of this PR -->
|
||||
|
||||
Solve the issue: #___
|
||||
|
||||
@ -14,4 +14,4 @@ Solve the issue: #___
|
||||
|
||||
## What's new?
|
||||
|
||||
*Explain in details what this PR solve or improve. You can include visuals.*
|
||||
<!-- Explain in details what this PR solve or improve. You can include visuals. -->
|
||||
|
@ -22,6 +22,6 @@ def setup_utils_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str
|
||||
|
||||
# Ensure these modes are using Dry-run
|
||||
config['dry_run'] = True
|
||||
validate_config_consistency(config)
|
||||
validate_config_consistency(config, preliminary=True)
|
||||
|
||||
return config
|
||||
|
@ -39,7 +39,7 @@ def _extend_validator(validator_class):
|
||||
FreqtradeValidator = _extend_validator(Draft4Validator)
|
||||
|
||||
|
||||
def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate the configuration follow the Config Schema
|
||||
:param conf: Config in JSON format
|
||||
@ -49,7 +49,10 @@ def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
if conf.get('runmode', RunMode.OTHER) in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
conf_schema['required'] = constants.SCHEMA_TRADE_REQUIRED
|
||||
elif conf.get('runmode', RunMode.OTHER) in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED
|
||||
if preliminary:
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED
|
||||
else:
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED_FINAL
|
||||
else:
|
||||
conf_schema['required'] = constants.SCHEMA_MINIMAL_REQUIRED
|
||||
try:
|
||||
@ -64,7 +67,7 @@ def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
)
|
||||
|
||||
|
||||
def validate_config_consistency(conf: Dict[str, Any]) -> None:
|
||||
def validate_config_consistency(conf: Dict[str, Any], preliminary: bool = False) -> None:
|
||||
"""
|
||||
Validate the configuration consistency.
|
||||
Should be ran after loading both configuration and strategy,
|
||||
@ -85,7 +88,7 @@ def validate_config_consistency(conf: Dict[str, Any]) -> None:
|
||||
|
||||
# validate configuration before returning
|
||||
logger.info('Validating configuration ...')
|
||||
validate_config_schema(conf)
|
||||
validate_config_schema(conf, preliminary=preliminary)
|
||||
|
||||
|
||||
def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
|
||||
|
@ -463,6 +463,10 @@ SCHEMA_BACKTEST_REQUIRED = [
|
||||
'dataformat_ohlcv',
|
||||
'dataformat_trades',
|
||||
]
|
||||
SCHEMA_BACKTEST_REQUIRED_FINAL = SCHEMA_BACKTEST_REQUIRED + [
|
||||
'stoploss',
|
||||
'minimal_roi',
|
||||
]
|
||||
|
||||
SCHEMA_MINIMAL_REQUIRED = [
|
||||
'exchange',
|
||||
|
@ -5,7 +5,7 @@ import logging
|
||||
from copy import copy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
@ -400,187 +400,3 @@ def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame,
|
||||
trades = trades.loc[(trades['open_date'] >= trades_start) &
|
||||
(trades['close_date'] <= trades_stop)]
|
||||
return trades
|
||||
|
||||
|
||||
def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close") -> float:
|
||||
"""
|
||||
Calculate market change based on "column".
|
||||
Calculation is done by taking the first non-null and the last non-null element of each column
|
||||
and calculating the pctchange as "(last - first) / first".
|
||||
Then the results per pair are combined as mean.
|
||||
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return:
|
||||
"""
|
||||
tmp_means = []
|
||||
for pair, df in data.items():
|
||||
start = df[column].dropna().iloc[0]
|
||||
end = df[column].dropna().iloc[-1]
|
||||
tmp_means.append((end - start) / start)
|
||||
|
||||
return float(np.mean(tmp_means))
|
||||
|
||||
|
||||
def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
||||
column: str = "close") -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return: DataFrame with the column renamed to the dict key, and a column
|
||||
named mean, containing the mean of all pairs.
|
||||
:raise: ValueError if no data is provided.
|
||||
"""
|
||||
df_comb = pd.concat([data[pair].set_index('date').rename(
|
||||
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
||||
|
||||
df_comb['mean'] = df_comb.mean(axis=1)
|
||||
|
||||
return df_comb
|
||||
|
||||
|
||||
def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
||||
timeframe: str) -> pd.DataFrame:
|
||||
"""
|
||||
Adds a column `col_name` with the cumulative profit for the given trades array.
|
||||
:param df: DataFrame with date index
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)
|
||||
:param col_name: Column name that will be assigned the results
|
||||
:param timeframe: Timeframe used during the operations
|
||||
:return: Returns df with one additional column, col_name, containing the cumulative profit.
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
timeframe_minutes = timeframe_to_minutes(timeframe)
|
||||
# Resample to timeframe to make sure trades match candles
|
||||
_trades_sum = trades.resample(f'{timeframe_minutes}min', on='close_date'
|
||||
)[['profit_abs']].sum()
|
||||
df.loc[:, col_name] = _trades_sum['profit_abs'].cumsum()
|
||||
# Set first value to 0
|
||||
df.loc[df.iloc[0].name, col_name] = 0
|
||||
# FFill to get continuous
|
||||
df[col_name] = df[col_name].ffill()
|
||||
return df
|
||||
|
||||
|
||||
def _calc_drawdown_series(profit_results: pd.DataFrame, *, date_col: str, value_col: str,
|
||||
starting_balance: Optional[float] = 0.0) -> pd.DataFrame:
|
||||
max_drawdown_df = pd.DataFrame()
|
||||
max_drawdown_df['cumulative'] = profit_results[value_col].cumsum()
|
||||
max_drawdown_df['high_value'] = max_drawdown_df['cumulative'].cummax()
|
||||
max_drawdown_df['drawdown'] = max_drawdown_df['cumulative'] - max_drawdown_df['high_value']
|
||||
max_drawdown_df['date'] = profit_results.loc[:, date_col]
|
||||
if starting_balance:
|
||||
cumulative_balance = starting_balance + max_drawdown_df['cumulative']
|
||||
max_balance = starting_balance + max_drawdown_df['high_value']
|
||||
max_drawdown_df['drawdown_relative'] = ((max_balance - cumulative_balance) / max_balance)
|
||||
else:
|
||||
# NOTE: This is not completely accurate,
|
||||
# but might good enough if starting_balance is not available
|
||||
max_drawdown_df['drawdown_relative'] = (
|
||||
(max_drawdown_df['high_value'] - max_drawdown_df['cumulative'])
|
||||
/ max_drawdown_df['high_value'])
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_underwater(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_ratio', starting_balance: Optional[float] = 0.0
|
||||
):
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
:param date_col: Column in DataFrame to use for dates (defaults to 'close_date')
|
||||
:param value_col: Column in DataFrame to use for values (defaults to 'profit_ratio')
|
||||
:return: Tuple (float, highdate, lowdate, highvalue, lowvalue) with absolute max drawdown,
|
||||
high and low time and high and low value.
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(
|
||||
profit_results,
|
||||
date_col=date_col,
|
||||
value_col=value_col,
|
||||
starting_balance=starting_balance)
|
||||
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_abs', starting_balance: float = 0,
|
||||
relative: bool = False
|
||||
) -> Tuple[float, pd.Timestamp, pd.Timestamp, float, float, float]:
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
:param date_col: Column in DataFrame to use for dates (defaults to 'close_date')
|
||||
:param value_col: Column in DataFrame to use for values (defaults to 'profit_abs')
|
||||
:param starting_balance: Portfolio starting balance - properly calculate relative drawdown.
|
||||
:return: Tuple (float, highdate, lowdate, highvalue, lowvalue, relative_drawdown)
|
||||
with absolute max drawdown, high and low time and high and low value,
|
||||
and the relative account drawdown
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(
|
||||
profit_results,
|
||||
date_col=date_col,
|
||||
value_col=value_col,
|
||||
starting_balance=starting_balance
|
||||
)
|
||||
|
||||
idxmin = max_drawdown_df['drawdown_relative'].idxmax() if relative \
|
||||
else max_drawdown_df['drawdown'].idxmin()
|
||||
if idxmin == 0:
|
||||
raise ValueError("No losing trade, therefore no drawdown.")
|
||||
high_date = profit_results.loc[max_drawdown_df.iloc[:idxmin]['high_value'].idxmax(), date_col]
|
||||
low_date = profit_results.loc[idxmin, date_col]
|
||||
high_val = max_drawdown_df.loc[max_drawdown_df.iloc[:idxmin]
|
||||
['high_value'].idxmax(), 'cumulative']
|
||||
low_val = max_drawdown_df.loc[idxmin, 'cumulative']
|
||||
max_drawdown_rel = max_drawdown_df.loc[idxmin, 'drawdown_relative']
|
||||
|
||||
return (
|
||||
abs(min(max_drawdown_df['drawdown'])),
|
||||
high_date,
|
||||
low_date,
|
||||
high_val,
|
||||
low_val,
|
||||
max_drawdown_rel
|
||||
)
|
||||
|
||||
|
||||
def calculate_csum(trades: pd.DataFrame, starting_balance: float = 0) -> Tuple[float, float]:
|
||||
"""
|
||||
Calculate min/max cumsum of trades, to show if the wallet/stake amount ratio is sane
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_percent)
|
||||
:param starting_balance: Add starting balance to results, to show the wallets high / low points
|
||||
:return: Tuple (float, float) with cumsum of profit_abs
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
|
||||
csum_df = pd.DataFrame()
|
||||
csum_df['sum'] = trades['profit_abs'].cumsum()
|
||||
csum_min = csum_df['sum'].min() + starting_balance
|
||||
csum_max = csum_df['sum'].max() + starting_balance
|
||||
|
||||
return csum_min, csum_max
|
||||
|
||||
|
||||
def calculate_cagr(days_passed: int, starting_balance: float, final_balance: float) -> float:
|
||||
"""
|
||||
Calculate CAGR
|
||||
:param days_passed: Days passed between start and ending balance
|
||||
:param starting_balance: Starting balance
|
||||
:param final_balance: Final balance to calculate CAGR against
|
||||
:return: CAGR
|
||||
"""
|
||||
return (final_balance / starting_balance) ** (1 / (days_passed / 365)) - 1
|
||||
|
192
freqtrade/data/metrics.py
Normal file
192
freqtrade/data/metrics.py
Normal file
@ -0,0 +1,192 @@
|
||||
import logging
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close") -> float:
|
||||
"""
|
||||
Calculate market change based on "column".
|
||||
Calculation is done by taking the first non-null and the last non-null element of each column
|
||||
and calculating the pctchange as "(last - first) / first".
|
||||
Then the results per pair are combined as mean.
|
||||
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return:
|
||||
"""
|
||||
tmp_means = []
|
||||
for pair, df in data.items():
|
||||
start = df[column].dropna().iloc[0]
|
||||
end = df[column].dropna().iloc[-1]
|
||||
tmp_means.append((end - start) / start)
|
||||
|
||||
return float(np.mean(tmp_means))
|
||||
|
||||
|
||||
def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
||||
column: str = "close") -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return: DataFrame with the column renamed to the dict key, and a column
|
||||
named mean, containing the mean of all pairs.
|
||||
:raise: ValueError if no data is provided.
|
||||
"""
|
||||
df_comb = pd.concat([data[pair].set_index('date').rename(
|
||||
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
||||
|
||||
df_comb['mean'] = df_comb.mean(axis=1)
|
||||
|
||||
return df_comb
|
||||
|
||||
|
||||
def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
||||
timeframe: str) -> pd.DataFrame:
|
||||
"""
|
||||
Adds a column `col_name` with the cumulative profit for the given trades array.
|
||||
:param df: DataFrame with date index
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)
|
||||
:param col_name: Column name that will be assigned the results
|
||||
:param timeframe: Timeframe used during the operations
|
||||
:return: Returns df with one additional column, col_name, containing the cumulative profit.
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
timeframe_minutes = timeframe_to_minutes(timeframe)
|
||||
# Resample to timeframe to make sure trades match candles
|
||||
_trades_sum = trades.resample(f'{timeframe_minutes}min', on='close_date'
|
||||
)[['profit_abs']].sum()
|
||||
df.loc[:, col_name] = _trades_sum['profit_abs'].cumsum()
|
||||
# Set first value to 0
|
||||
df.loc[df.iloc[0].name, col_name] = 0
|
||||
# FFill to get continuous
|
||||
df[col_name] = df[col_name].ffill()
|
||||
return df
|
||||
|
||||
|
||||
def _calc_drawdown_series(profit_results: pd.DataFrame, *, date_col: str, value_col: str,
|
||||
starting_balance: Optional[float] = 0.0) -> pd.DataFrame:
|
||||
max_drawdown_df = pd.DataFrame()
|
||||
max_drawdown_df['cumulative'] = profit_results[value_col].cumsum()
|
||||
max_drawdown_df['high_value'] = max_drawdown_df['cumulative'].cummax()
|
||||
max_drawdown_df['drawdown'] = max_drawdown_df['cumulative'] - max_drawdown_df['high_value']
|
||||
max_drawdown_df['date'] = profit_results.loc[:, date_col]
|
||||
if starting_balance:
|
||||
cumulative_balance = starting_balance + max_drawdown_df['cumulative']
|
||||
max_balance = starting_balance + max_drawdown_df['high_value']
|
||||
max_drawdown_df['drawdown_relative'] = ((max_balance - cumulative_balance) / max_balance)
|
||||
else:
|
||||
# NOTE: This is not completely accurate,
|
||||
# but might good enough if starting_balance is not available
|
||||
max_drawdown_df['drawdown_relative'] = (
|
||||
(max_drawdown_df['high_value'] - max_drawdown_df['cumulative'])
|
||||
/ max_drawdown_df['high_value'])
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_underwater(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_ratio', starting_balance: Optional[float] = 0.0
|
||||
):
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
:param date_col: Column in DataFrame to use for dates (defaults to 'close_date')
|
||||
:param value_col: Column in DataFrame to use for values (defaults to 'profit_ratio')
|
||||
:return: Tuple (float, highdate, lowdate, highvalue, lowvalue) with absolute max drawdown,
|
||||
high and low time and high and low value.
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(
|
||||
profit_results,
|
||||
date_col=date_col,
|
||||
value_col=value_col,
|
||||
starting_balance=starting_balance)
|
||||
|
||||
return max_drawdown_df
|
||||
|
||||
|
||||
def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||
value_col: str = 'profit_abs', starting_balance: float = 0,
|
||||
relative: bool = False
|
||||
) -> Tuple[float, pd.Timestamp, pd.Timestamp, float, float, float]:
|
||||
"""
|
||||
Calculate max drawdown and the corresponding close dates
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_ratio)
|
||||
:param date_col: Column in DataFrame to use for dates (defaults to 'close_date')
|
||||
:param value_col: Column in DataFrame to use for values (defaults to 'profit_abs')
|
||||
:param starting_balance: Portfolio starting balance - properly calculate relative drawdown.
|
||||
:return: Tuple (float, highdate, lowdate, highvalue, lowvalue, relative_drawdown)
|
||||
with absolute max drawdown, high and low time and high and low value,
|
||||
and the relative account drawdown
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
profit_results = trades.sort_values(date_col).reset_index(drop=True)
|
||||
max_drawdown_df = _calc_drawdown_series(
|
||||
profit_results,
|
||||
date_col=date_col,
|
||||
value_col=value_col,
|
||||
starting_balance=starting_balance
|
||||
)
|
||||
|
||||
idxmin = max_drawdown_df['drawdown_relative'].idxmax() if relative \
|
||||
else max_drawdown_df['drawdown'].idxmin()
|
||||
if idxmin == 0:
|
||||
raise ValueError("No losing trade, therefore no drawdown.")
|
||||
high_date = profit_results.loc[max_drawdown_df.iloc[:idxmin]['high_value'].idxmax(), date_col]
|
||||
low_date = profit_results.loc[idxmin, date_col]
|
||||
high_val = max_drawdown_df.loc[max_drawdown_df.iloc[:idxmin]
|
||||
['high_value'].idxmax(), 'cumulative']
|
||||
low_val = max_drawdown_df.loc[idxmin, 'cumulative']
|
||||
max_drawdown_rel = max_drawdown_df.loc[idxmin, 'drawdown_relative']
|
||||
|
||||
return (
|
||||
abs(min(max_drawdown_df['drawdown'])),
|
||||
high_date,
|
||||
low_date,
|
||||
high_val,
|
||||
low_val,
|
||||
max_drawdown_rel
|
||||
)
|
||||
|
||||
|
||||
def calculate_csum(trades: pd.DataFrame, starting_balance: float = 0) -> Tuple[float, float]:
|
||||
"""
|
||||
Calculate min/max cumsum of trades, to show if the wallet/stake amount ratio is sane
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_percent)
|
||||
:param starting_balance: Add starting balance to results, to show the wallets high / low points
|
||||
:return: Tuple (float, float) with cumsum of profit_abs
|
||||
:raise: ValueError if trade-dataframe was found empty.
|
||||
"""
|
||||
if len(trades) == 0:
|
||||
raise ValueError("Trade dataframe empty.")
|
||||
|
||||
csum_df = pd.DataFrame()
|
||||
csum_df['sum'] = trades['profit_abs'].cumsum()
|
||||
csum_min = csum_df['sum'].min() + starting_balance
|
||||
csum_max = csum_df['sum'].max() + starting_balance
|
||||
|
||||
return csum_min, csum_max
|
||||
|
||||
|
||||
def calculate_cagr(days_passed: int, starting_balance: float, final_balance: float) -> float:
|
||||
"""
|
||||
Calculate CAGR
|
||||
:param days_passed: Days passed between start and ending balance
|
||||
:param starting_balance: Starting balance
|
||||
:param final_balance: Final balance to calculate CAGR against
|
||||
:return: CAGR
|
||||
"""
|
||||
return (final_balance / starting_balance) ** (1 / (days_passed / 365)) - 1
|
@ -1038,7 +1038,7 @@ class Backtesting:
|
||||
timerange: TimeRange):
|
||||
self.progress.init_step(BacktestState.ANALYZE, 0)
|
||||
|
||||
logger.info("Running backtesting for Strategy %s", strat.get_strategy_name())
|
||||
logger.info(f"Running backtesting for Strategy {strat.get_strategy_name()}")
|
||||
backtest_start_time = datetime.now(timezone.utc)
|
||||
self._set_strategy(strat)
|
||||
|
||||
|
@ -10,7 +10,7 @@ from typing import Any, Dict
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.btanalysis import calculate_max_drawdown
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
|
@ -8,7 +8,7 @@ from datetime import datetime
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.btanalysis import calculate_max_drawdown
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
|
@ -8,7 +8,7 @@ from typing import Dict
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.btanalysis import calculate_underwater
|
||||
from freqtrade.data.metrics import calculate_underwater
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
|
@ -9,7 +9,7 @@ individual needs.
|
||||
"""
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.btanalysis import calculate_max_drawdown
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||
|
||||
|
||||
|
@ -9,8 +9,8 @@ from pandas import DataFrame, to_datetime
|
||||
from tabulate import tabulate
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN, UNLIMITED_STAKE_AMOUNT
|
||||
from freqtrade.data.btanalysis import (calculate_cagr, calculate_csum, calculate_market_change,
|
||||
calculate_max_drawdown)
|
||||
from freqtrade.data.metrics import (calculate_cagr, calculate_csum, calculate_market_change,
|
||||
calculate_max_drawdown)
|
||||
from freqtrade.misc import decimals_per_coin, file_dump_joblib, file_dump_json, round_coin_value
|
||||
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
||||
|
||||
|
@ -5,12 +5,13 @@ from typing import Any, Dict, List, Optional
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.data.btanalysis import (analyze_trade_parallelism, calculate_max_drawdown,
|
||||
calculate_underwater, combine_dataframes_with_mean,
|
||||
create_cum_profit, extract_trades_of_period, load_trades)
|
||||
from freqtrade.data.btanalysis import (analyze_trade_parallelism, extract_trades_of_period,
|
||||
load_trades)
|
||||
from freqtrade.data.converter import trim_dataframe
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.data.history import get_timerange, load_data
|
||||
from freqtrade.data.metrics import (calculate_max_drawdown, calculate_underwater,
|
||||
combine_dataframes_with_mean, create_cum_profit)
|
||||
from freqtrade.enums import CandleType
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import timeframe_to_prev_date, timeframe_to_seconds
|
||||
|
@ -5,7 +5,7 @@ from typing import Any, Dict
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.data.btanalysis import calculate_max_drawdown
|
||||
from freqtrade.data.metrics import calculate_max_drawdown
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.plugins.protections import IProtection, ProtectionReturn
|
||||
|
||||
|
@ -84,6 +84,7 @@ async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: Bac
|
||||
lastconfig['enable_protections'] = btconfig.get('enable_protections')
|
||||
lastconfig['dry_run_wallet'] = btconfig.get('dry_run_wallet')
|
||||
|
||||
ApiServer._bt.strategylist = [strat]
|
||||
ApiServer._bt.results = {}
|
||||
ApiServer._bt.load_prior_backtest()
|
||||
|
||||
|
@ -8,14 +8,14 @@ from pandas import DataFrame, DateOffset, Timestamp, to_datetime
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import LAST_BT_RESULT_FN
|
||||
from freqtrade.data.btanalysis import (BT_DATA_COLUMNS, analyze_trade_parallelism, calculate_cagr,
|
||||
calculate_csum, calculate_market_change,
|
||||
calculate_max_drawdown, calculate_underwater,
|
||||
combine_dataframes_with_mean, create_cum_profit,
|
||||
from freqtrade.data.btanalysis import (BT_DATA_COLUMNS, analyze_trade_parallelism,
|
||||
extract_trades_of_period, get_latest_backtest_filename,
|
||||
get_latest_hyperopt_file, load_backtest_data,
|
||||
load_backtest_metadata, load_trades, load_trades_from_db)
|
||||
from freqtrade.data.history import load_data, load_pair_history
|
||||
from freqtrade.data.metrics import (calculate_cagr, calculate_csum, calculate_market_change,
|
||||
calculate_max_drawdown, calculate_underwater,
|
||||
combine_dataframes_with_mean, create_cum_profit)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from tests.conftest import CURRENT_TEST_STRATEGY, create_mock_trades
|
||||
from tests.conftest_trades import MOCK_TRADE_COUNT
|
||||
|
@ -1483,7 +1483,7 @@ def test_api_backtesting(botclient, mocker, fee, caplog, tmpdir):
|
||||
assert not result['running']
|
||||
assert result['status_msg'] == 'Backtest reset'
|
||||
ftbot.config['export'] = 'trades'
|
||||
ftbot.config['backtest_cache'] = 'none'
|
||||
ftbot.config['backtest_cache'] = 'day'
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['exportfilename'] = Path(tmpdir) / "backtest_results"
|
||||
ftbot.config['exportfilename'].mkdir()
|
||||
@ -1556,19 +1556,19 @@ def test_api_backtesting(botclient, mocker, fee, caplog, tmpdir):
|
||||
|
||||
ApiServer._bgtask_running = False
|
||||
|
||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.backtest_one_strategy',
|
||||
side_effect=DependencyException())
|
||||
rc = client_post(client, f"{BASE_URI}/backtest", data=json.dumps(data))
|
||||
assert log_has("Backtesting caused an error: ", caplog)
|
||||
|
||||
ftbot.config['backtest_cache'] = 'day'
|
||||
|
||||
# Rerun backtest (should get previous result)
|
||||
rc = client_post(client, f"{BASE_URI}/backtest", data=json.dumps(data))
|
||||
assert_response(rc)
|
||||
result = rc.json()
|
||||
assert log_has_re('Reusing result of previous backtest.*', caplog)
|
||||
|
||||
data['stake_amount'] = 101
|
||||
|
||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.backtest_one_strategy',
|
||||
side_effect=DependencyException())
|
||||
rc = client_post(client, f"{BASE_URI}/backtest", data=json.dumps(data))
|
||||
assert log_has("Backtesting caused an error: ", caplog)
|
||||
|
||||
# Delete backtesting to avoid leakage since the backtest-object may stick around.
|
||||
rc = client_delete(client, f"{BASE_URI}/backtest")
|
||||
assert_response(rc)
|
||||
|
@ -10,7 +10,8 @@ from plotly.subplots import make_subplots
|
||||
from freqtrade.commands import start_plot_dataframe, start_plot_profit
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.data import history
|
||||
from freqtrade.data.btanalysis import create_cum_profit, load_backtest_data
|
||||
from freqtrade.data.btanalysis import load_backtest_data
|
||||
from freqtrade.data.metrics import create_cum_profit
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.plot.plotting import (add_areas, add_indicators, add_profit, create_plotconfig,
|
||||
generate_candlestick_graph, generate_plot_filename,
|
||||
|
Loading…
Reference in New Issue
Block a user