Merge remote-tracking branch 'origin/strategy_utils' into strategy_utils

This commit is contained in:
hippocritical 2022-12-29 22:32:02 +01:00
commit a6356c2821
25 changed files with 517 additions and 488 deletions

View File

@ -147,15 +147,20 @@ jobs:
- name: Installation - macOS
if: runner.os == 'macOS'
run: |
# homebrew fails to update python 3.9.1 to 3.9.1.1 due to unlinking failure
brew update
# homebrew fails to update python due to unlinking failures
# https://github.com/actions/runner-images/issues/6817
rm /usr/local/bin/2to3 || true
# homebrew fails to update python from 3.9 to 3.10 due to another unlinking failure
rm /usr/local/bin/2to3-3.11 || true
rm /usr/local/bin/idle3 || true
rm /usr/local/bin/idle3.11 || true
rm /usr/local/bin/pydoc3 || true
rm /usr/local/bin/pydoc3.11 || true
rm /usr/local/bin/python3 || true
rm /usr/local/bin/python3.11 || true
rm /usr/local/bin/python3-config || true
# Ignore brew update failures - https://github.com/actions/runner-images/issues/6817
brew update || true
rm /usr/local/bin/python3.11-config || true
brew install hdf5 c-blosc
python -m pip install --upgrade pip wheel
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH

View File

@ -15,7 +15,7 @@ repos:
additional_dependencies:
- types-cachetools==5.2.1
- types-filelock==3.2.7
- types-requests==2.28.11.5
- types-requests==2.28.11.7
- types-tabulate==0.9.0.0
- types-python-dateutil==2.8.19.5
# stages: [push]

View File

@ -15,7 +15,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
| `identifier` | **Required.** <br> A unique ID for the current model. If models are saved to disk, the `identifier` allows for reloading specific pre-trained models/data. <br> **Datatype:** String.
| `live_retrain_hours` | Frequency of retraining during dry/live runs. <br> **Datatype:** Float > 0. <br> Default: `0` (models retrain as often as possible).
| `expiration_hours` | Avoid making predictions if a model is more than `expiration_hours` old. <br> **Datatype:** Positive integer. <br> Default: `0` (models never expire).
| `purge_old_models` | Delete obsolete models. <br> **Datatype:** Boolean. <br> Default: `False` (all historic models remain on disk).
| `purge_old_models` | Delete all unused models during live runs (not relevant to backtesting). If set to false (not default), dry/live runs will accumulate all unused models to disk. If <br> **Datatype:** Boolean. <br> Default: `True`.
| `save_backtest_models` | Save models to disk when running backtesting. Backtesting operates most efficiently by saving the prediction data and reusing them directly for subsequent runs (when you wish to tune entry/exit parameters). Saving backtesting models to disk also allows to use the same model files for starting a dry/live instance with the same model `identifier`. <br> **Datatype:** Boolean. <br> Default: `False` (no models are saved).
| `fit_live_predictions_candles` | Number of historical candles to use for computing target (label) statistics from prediction data, instead of from the training dataset (more information can be found [here](freqai-configuration.md#creating-a-dynamic-target-threshold)). <br> **Datatype:** Positive integer.
| `follow_mode` | Use a `follower` that will look for models associated with a specific `identifier` and load those for inferencing. A `follower` will **not** train new models. <br> **Datatype:** Boolean. <br> Default: `False`.

View File

@ -20,8 +20,8 @@ from freqtrade.persistence import LocalTrade, Trade, init_db
logger = logging.getLogger(__name__)
# Newest format
BT_DATA_COLUMNS = ['pair', 'stake_amount', 'amount', 'open_date', 'close_date',
'open_rate', 'close_rate',
BT_DATA_COLUMNS = ['pair', 'stake_amount', 'max_stake_amount', 'amount',
'open_date', 'close_date', 'open_rate', 'close_rate',
'fee_open', 'fee_close', 'trade_duration',
'profit_ratio', 'profit_abs', 'exit_reason',
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
@ -241,6 +241,33 @@ def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, s
return results
def _load_backtest_data_df_compatibility(df: pd.DataFrame) -> pd.DataFrame:
"""
Compatibility support for older backtest data.
"""
df['open_date'] = pd.to_datetime(df['open_date'],
utc=True,
infer_datetime_format=True
)
df['close_date'] = pd.to_datetime(df['close_date'],
utc=True,
infer_datetime_format=True
)
# Compatibility support for pre short Columns
if 'is_short' not in df.columns:
df['is_short'] = False
if 'leverage' not in df.columns:
df['leverage'] = 1.0
if 'enter_tag' not in df.columns:
df['enter_tag'] = df['buy_tag']
df = df.drop(['buy_tag'], axis=1)
if 'max_stake_amount' not in df.columns:
df['max_stake_amount'] = df['stake_amount']
if 'orders' not in df.columns:
df['orders'] = None
return df
def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = None) -> pd.DataFrame:
"""
Load backtest data file.
@ -269,24 +296,7 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
data = data['strategy'][strategy]['trades']
df = pd.DataFrame(data)
if not df.empty:
df['open_date'] = pd.to_datetime(df['open_date'],
utc=True,
infer_datetime_format=True
)
df['close_date'] = pd.to_datetime(df['close_date'],
utc=True,
infer_datetime_format=True
)
# Compatibility support for pre short Columns
if 'is_short' not in df.columns:
df['is_short'] = 0
if 'leverage' not in df.columns:
df['leverage'] = 1.0
if 'enter_tag' not in df.columns:
df['enter_tag'] = df['buy_tag']
df = df.drop(['buy_tag'], axis=1)
if 'orders' not in df.columns:
df['orders'] = None
df = _load_backtest_data_df_compatibility(df)
else:
# old format - only with lists.

View File

@ -31,7 +31,7 @@ class Binance(Exchange):
"ccxt_futures_name": "future"
}
_ft_has_futures: Dict = {
"stoploss_order_types": {"limit": "limit", "market": "market"},
"stoploss_order_types": {"limit": "stop", "market": "stop_market"},
"tickers_have_price": False,
}

View File

@ -109,11 +109,10 @@ def migrate_trades_and_orders_table(
else:
is_short = get_column_def(cols, 'is_short', '0')
# Margin Properties
# Futures Properties
interest_rate = get_column_def(cols, 'interest_rate', '0.0')
# Futures properties
funding_fees = get_column_def(cols, 'funding_fees', '0.0')
max_stake_amount = get_column_def(cols, 'max_stake_amount', 'stake_amount')
# If ticker-interval existed use that, else null.
if has_column(cols, 'ticker_interval'):
@ -162,7 +161,8 @@ def migrate_trades_and_orders_table(
timeframe, open_trade_value, close_profit_abs,
trading_mode, leverage, liquidation_price, is_short,
interest_rate, funding_fees, realized_profit,
amount_precision, price_precision, precision_mode, contract_size
amount_precision, price_precision, precision_mode, contract_size,
max_stake_amount
)
select id, lower(exchange), pair, {base_currency} base_currency,
{stake_currency} stake_currency,
@ -190,7 +190,8 @@ def migrate_trades_and_orders_table(
{is_short} is_short, {interest_rate} interest_rate,
{funding_fees} funding_fees, {realized_profit} realized_profit,
{amount_precision} amount_precision, {price_precision} price_precision,
{precision_mode} precision_mode, {contract_size} contract_size
{precision_mode} precision_mode, {contract_size} contract_size,
{max_stake_amount} max_stake_amount
from {trade_back_name}
"""))
@ -310,8 +311,8 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
# if ('orders' not in previous_tables
# or not has_column(cols_orders, 'funding_fee')):
migrating = False
# if not has_column(cols_trades, 'contract_size'):
if not has_column(cols_orders, 'funding_fee'):
# if not has_column(cols_orders, 'funding_fee'):
if not has_column(cols_trades, 'max_stake_amount'):
migrating = True
logger.info(f"Running database migration for trades - "
f"backup: {table_back_name}, {order_table_bak_name}")

View File

@ -293,6 +293,7 @@ class LocalTrade():
close_profit: Optional[float] = None
close_profit_abs: Optional[float] = None
stake_amount: float = 0.0
max_stake_amount: float = 0.0
amount: float = 0.0
amount_requested: Optional[float] = None
open_date: datetime
@ -469,8 +470,8 @@ class LocalTrade():
'amount': round(self.amount, 8),
'amount_requested': round(self.amount_requested, 8) if self.amount_requested else None,
'stake_amount': round(self.stake_amount, 8),
'max_stake_amount': round(self.max_stake_amount, 8) if self.max_stake_amount else None,
'strategy': self.strategy,
'buy_tag': self.enter_tag,
'enter_tag': self.enter_tag,
'timeframe': self.timeframe,
@ -507,7 +508,6 @@ class LocalTrade():
'profit_pct': round(self.close_profit * 100, 2) if self.close_profit else None,
'profit_abs': self.close_profit_abs,
'sell_reason': self.exit_reason, # Deprecated
'exit_reason': self.exit_reason,
'exit_order_status': self.exit_order_status,
'stop_loss_abs': self.stop_loss,
@ -876,6 +876,7 @@ class LocalTrade():
ZERO = FtPrecise(0.0)
current_amount = FtPrecise(0.0)
current_stake = FtPrecise(0.0)
max_stake_amount = FtPrecise(0.0)
total_stake = 0.0 # Total stake after all buy orders (does not subtract!)
avg_price = FtPrecise(0.0)
close_profit = 0.0
@ -917,7 +918,9 @@ class LocalTrade():
exit_rate, amount=exit_amount, open_rate=avg_price)
else:
total_stake = total_stake + self._calc_open_trade_value(tmp_amount, price)
max_stake_amount += (tmp_amount * price)
self.funding_fees = funding_fees
self.max_stake_amount = float(max_stake_amount)
if close_profit:
self.close_profit = close_profit
@ -1169,6 +1172,7 @@ class Trade(_DECL_BASE, LocalTrade):
close_profit = Column(Float)
close_profit_abs = Column(Float)
stake_amount = Column(Float, nullable=False)
max_stake_amount = Column(Float)
amount = Column(Float)
amount_requested = Column(Float)
open_date = Column(DateTime, nullable=False, default=datetime.utcnow)

View File

@ -217,8 +217,8 @@ class TradeSchema(BaseModel):
amount: float
amount_requested: float
stake_amount: float
max_stake_amount: Optional[float]
strategy: str
buy_tag: Optional[str] # Deprecated
enter_tag: Optional[str]
timeframe: int
fee_open: Optional[float]
@ -243,7 +243,6 @@ class TradeSchema(BaseModel):
profit_pct: Optional[float]
profit_abs: Optional[float]
profit_fiat: Optional[float]
sell_reason: Optional[str] # Deprecated
exit_reason: Optional[str]
exit_order_status: Optional[str]
stop_loss_abs: Optional[float]

View File

@ -10,24 +10,24 @@ coveralls==3.3.1
flake8==6.0.0
flake8-tidy-imports==4.8.0
mypy==0.991
pre-commit==2.20.0
pre-commit==2.21.0
pytest==7.2.0
pytest-asyncio==0.20.3
pytest-cov==4.0.0
pytest-mock==3.10.0
pytest-random-order==1.1.0
isort==5.11.3
isort==5.11.4
# For datetime mocking
time-machine==2.8.2
# fastapi testing
httpx==0.23.1
# Convert jupyter notebooks to markdown documents
nbconvert==7.2.6
nbconvert==7.2.7
# mypy types
types-cachetools==5.2.1
types-filelock==3.2.7
types-requests==2.28.11.5
types-requests==2.28.11.7
types-tabulate==0.9.0.0
types-python-dateutil==2.8.19.5

View File

@ -1,8 +1,8 @@
numpy==1.23.5
numpy==1.24.1
pandas==1.5.2
pandas-ta==0.3.14b
ccxt==2.4.27
ccxt==2.4.60
# Pin cryptography for now due to rust build errors with piwheels
cryptography==38.0.1; platform_machine == 'armv7l'
cryptography==38.0.4; platform_machine != 'armv7l'

View File

@ -1529,7 +1529,7 @@ def test_backtesting_show(mocker, testdatadir, capsys):
args = [
"backtesting-show",
"--export-filename",
f"{testdatadir / 'backtest_results/backtest-result_new.json'}",
f"{testdatadir / 'backtest_results/backtest-result.json'}",
"--show-pair-list"
]
pargs = get_args(args)

View File

@ -30,10 +30,10 @@ def test_get_latest_backtest_filename(testdatadir, mocker):
testdir_bt = testdatadir / "backtest_results"
res = get_latest_backtest_filename(testdir_bt)
assert res == 'backtest-result_new.json'
assert res == 'backtest-result.json'
res = get_latest_backtest_filename(str(testdir_bt))
assert res == 'backtest-result_new.json'
assert res == 'backtest-result.json'
mocker.patch("freqtrade.data.btanalysis.json_load", return_value={})
@ -81,7 +81,7 @@ def test_load_backtest_data_old_format(testdatadir, mocker):
def test_load_backtest_data_new_format(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
assert isinstance(bt_data, DataFrame)
assert set(bt_data.columns) == set(BT_DATA_COLUMNS)
@ -182,7 +182,7 @@ def test_extract_trades_of_period(testdatadir):
def test_analyze_trade_parallelism(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
res = analyze_trade_parallelism(bt_data, "5m")
@ -256,7 +256,7 @@ def test_combine_dataframes_with_mean_no_data(testdatadir):
def test_create_cum_profit(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
timerange = TimeRange.parse_timerange("20180110-20180112")
@ -268,11 +268,11 @@ def test_create_cum_profit(testdatadir):
"cum_profits", timeframe="5m")
assert "cum_profits" in cum_profits.columns
assert cum_profits.iloc[0]['cum_profits'] == 0
assert pytest.approx(cum_profits.iloc[-1]['cum_profits']) == 8.723007518796964e-06
assert pytest.approx(cum_profits.iloc[-1]['cum_profits']) == 9.0225563e-05
def test_create_cum_profit1(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
# Move close-time to "off" the candle, to make sure the logic still works
bt_data['close_date'] = bt_data.loc[:, 'close_date'] + DateOffset(seconds=20)
@ -286,7 +286,7 @@ def test_create_cum_profit1(testdatadir):
"cum_profits", timeframe="5m")
assert "cum_profits" in cum_profits.columns
assert cum_profits.iloc[0]['cum_profits'] == 0
assert pytest.approx(cum_profits.iloc[-1]['cum_profits']) == 8.723007518796964e-06
assert pytest.approx(cum_profits.iloc[-1]['cum_profits']) == 9.0225563e-05
with pytest.raises(ValueError, match='Trade dataframe empty.'):
create_cum_profit(df.set_index('date'), bt_data[bt_data["pair"] == 'NOTAPAIR'],
@ -294,18 +294,18 @@ def test_create_cum_profit1(testdatadir):
def test_calculate_max_drawdown(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
_, hdate, lowdate, hval, lval, drawdown = calculate_max_drawdown(
bt_data, value_col="profit_abs")
assert isinstance(drawdown, float)
assert pytest.approx(drawdown) == 0.12071099
assert pytest.approx(drawdown) == 0.29753914
assert isinstance(hdate, Timestamp)
assert isinstance(lowdate, Timestamp)
assert isinstance(hval, float)
assert isinstance(lval, float)
assert hdate == Timestamp('2018-01-25 01:30:00', tz='UTC')
assert lowdate == Timestamp('2018-01-25 03:50:00', tz='UTC')
assert hdate == Timestamp('2018-01-16 19:30:00', tz='UTC')
assert lowdate == Timestamp('2018-01-16 22:25:00', tz='UTC')
underwater = calculate_underwater(bt_data)
assert isinstance(underwater, DataFrame)
@ -318,14 +318,15 @@ def test_calculate_max_drawdown(testdatadir):
def test_calculate_csum(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
csum_min, csum_max = calculate_csum(bt_data)
assert isinstance(csum_min, float)
assert isinstance(csum_max, float)
assert csum_min < 0.01
assert csum_max > 0.02
assert csum_min < csum_max
assert csum_min < 0.0001
assert csum_max > 0.0002
csum_min1, csum_max1 = calculate_csum(bt_data, 5)
assert csum_min1 == csum_min + 5

View File

@ -23,7 +23,7 @@ from tests.exchange.test_exchange import ccxt_exceptionhandlers
def test_stoploss_order_binance(default_conf, mocker, limitratio, expected, side, trademode):
api_mock = MagicMock()
order_id = 'test_prod_buy_{}'.format(randint(0, 10 ** 6))
order_type = 'stop_loss_limit' if trademode == TradingMode.SPOT else 'limit'
order_type = 'stop_loss_limit' if trademode == TradingMode.SPOT else 'stop'
api_mock.create_order = MagicMock(return_value={
'id': order_id,

View File

@ -710,6 +710,7 @@ def test_backtest_one(default_conf, fee, mocker, testdatadir) -> None:
expected = pd.DataFrame(
{'pair': [pair, pair],
'stake_amount': [0.001, 0.001],
'max_stake_amount': [0.001, 0.001],
'amount': [0.00957442, 0.0097064],
'open_date': pd.to_datetime([Arrow(2018, 1, 29, 18, 40, 0).datetime,
Arrow(2018, 1, 30, 3, 30, 0).datetime], utc=True

View File

@ -50,6 +50,7 @@ def test_backtest_position_adjustment(default_conf, fee, mocker, testdatadir) ->
expected = pd.DataFrame(
{'pair': [pair, pair],
'stake_amount': [500.0, 100.0],
'max_stake_amount': [500.0, 100],
'amount': [4806.87657523, 970.63960782],
'open_date': pd.to_datetime([Arrow(2018, 1, 29, 18, 40, 0).datetime,
Arrow(2018, 1, 30, 3, 30, 0).datetime], utc=True

View File

@ -308,7 +308,7 @@ def test_generate_pair_metrics():
def test_generate_daily_stats(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
res = generate_daily_stats(bt_data)
assert isinstance(res, dict)
@ -328,7 +328,7 @@ def test_generate_daily_stats(testdatadir):
def test_generate_trading_stats(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
res = generate_trading_stats(bt_data)
assert isinstance(res, dict)
@ -444,7 +444,7 @@ def test_generate_edge_table():
def test_generate_periodic_breakdown_stats(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename).to_dict(orient='records')
res = generate_periodic_breakdown_stats(bt_data, 'day')
@ -472,7 +472,7 @@ def test__get_resample_from_period():
def test_show_sorted_pairlist(testdatadir, default_conf, capsys):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_stats(filename)
default_conf['backtest_show_pair_list'] = True

View File

@ -0,0 +1,412 @@
# pragma pylint: disable=missing-docstring, C0103
import logging
from pathlib import Path
from unittest.mock import MagicMock
import pytest
from sqlalchemy import create_engine, text
from freqtrade.constants import DEFAULT_DB_PROD_URL
from freqtrade.enums import TradingMode
from freqtrade.exceptions import OperationalException
from freqtrade.persistence import Trade, init_db
from freqtrade.persistence.migrations import get_last_sequence_ids, set_sequence_ids
from freqtrade.persistence.models import PairLock
from tests.conftest import log_has
spot, margin, futures = TradingMode.SPOT, TradingMode.MARGIN, TradingMode.FUTURES
def test_init_create_session(default_conf):
# Check if init create a session
init_db(default_conf['db_url'])
assert hasattr(Trade, '_session')
assert 'scoped_session' in type(Trade._session).__name__
def test_init_custom_db_url(default_conf, tmpdir):
# Update path to a value other than default, but still in-memory
filename = f"{tmpdir}/freqtrade2_test.sqlite"
assert not Path(filename).is_file()
default_conf.update({'db_url': f'sqlite:///{filename}'})
init_db(default_conf['db_url'])
assert Path(filename).is_file()
r = Trade._session.execute(text("PRAGMA journal_mode"))
assert r.first() == ('wal',)
def test_init_invalid_db_url():
# Update path to a value other than default, but still in-memory
with pytest.raises(OperationalException, match=r'.*no valid database URL*'):
init_db('unknown:///some.url')
with pytest.raises(OperationalException, match=r'Bad db-url.*For in-memory database, pl.*'):
init_db('sqlite:///')
def test_init_prod_db(default_conf, mocker):
default_conf.update({'dry_run': False})
default_conf.update({'db_url': DEFAULT_DB_PROD_URL})
create_engine_mock = mocker.patch('freqtrade.persistence.models.create_engine', MagicMock())
init_db(default_conf['db_url'])
assert create_engine_mock.call_count == 1
assert create_engine_mock.mock_calls[0][1][0] == 'sqlite:///tradesv3.sqlite'
def test_init_dryrun_db(default_conf, tmpdir):
filename = f"{tmpdir}/freqtrade2_prod.sqlite"
assert not Path(filename).is_file()
default_conf.update({
'dry_run': True,
'db_url': f'sqlite:///{filename}'
})
init_db(default_conf['db_url'])
assert Path(filename).is_file()
def test_migrate_new(mocker, default_conf, fee, caplog):
"""
Test Database migration (starting with new pairformat)
"""
caplog.set_level(logging.DEBUG)
amount = 103.223
# Always create all columns apart from the last!
create_table_old = """CREATE TABLE IF NOT EXISTS "trades" (
id INTEGER NOT NULL,
exchange VARCHAR NOT NULL,
pair VARCHAR NOT NULL,
is_open BOOLEAN NOT NULL,
fee FLOAT NOT NULL,
open_rate FLOAT,
close_rate FLOAT,
close_profit FLOAT,
stake_amount FLOAT NOT NULL,
amount FLOAT,
open_date DATETIME NOT NULL,
close_date DATETIME,
open_order_id VARCHAR,
stop_loss FLOAT,
initial_stop_loss FLOAT,
max_rate FLOAT,
sell_reason VARCHAR,
strategy VARCHAR,
ticker_interval INTEGER,
stoploss_order_id VARCHAR,
PRIMARY KEY (id),
CHECK (is_open IN (0, 1))
);"""
create_table_order = """CREATE TABLE orders (
id INTEGER NOT NULL,
ft_trade_id INTEGER,
ft_order_side VARCHAR(25) NOT NULL,
ft_pair VARCHAR(25) NOT NULL,
ft_is_open BOOLEAN NOT NULL,
order_id VARCHAR(255) NOT NULL,
status VARCHAR(255),
symbol VARCHAR(25),
order_type VARCHAR(50),
side VARCHAR(25),
price FLOAT,
amount FLOAT,
filled FLOAT,
remaining FLOAT,
cost FLOAT,
order_date DATETIME,
order_filled_date DATETIME,
order_update_date DATETIME,
PRIMARY KEY (id)
);"""
insert_table_old = """INSERT INTO trades (exchange, pair, is_open, fee,
open_rate, stake_amount, amount, open_date,
stop_loss, initial_stop_loss, max_rate, ticker_interval,
open_order_id, stoploss_order_id)
VALUES ('binance', 'ETC/BTC', 1, {fee},
0.00258580, {stake}, {amount},
'2019-11-28 12:44:24.000000',
0.0, 0.0, 0.0, '5m',
'buy_order', 'dry_stop_order_id222')
""".format(fee=fee.return_value,
stake=default_conf.get("stake_amount"),
amount=amount
)
insert_orders = f"""
insert into orders (
ft_trade_id,
ft_order_side,
ft_pair,
ft_is_open,
order_id,
status,
symbol,
order_type,
side,
price,
amount,
filled,
remaining,
cost)
values (
1,
'buy',
'ETC/BTC',
0,
'dry_buy_order',
'closed',
'ETC/BTC',
'limit',
'buy',
0.00258580,
{amount},
{amount},
0,
{amount * 0.00258580}
),
(
1,
'buy',
'ETC/BTC',
1,
'dry_buy_order22',
'canceled',
'ETC/BTC',
'limit',
'buy',
0.00258580,
{amount},
{amount},
0,
{amount * 0.00258580}
),
(
1,
'stoploss',
'ETC/BTC',
1,
'dry_stop_order_id11X',
'canceled',
'ETC/BTC',
'limit',
'sell',
0.00258580,
{amount},
{amount},
0,
{amount * 0.00258580}
),
(
1,
'stoploss',
'ETC/BTC',
1,
'dry_stop_order_id222',
'open',
'ETC/BTC',
'limit',
'sell',
0.00258580,
{amount},
{amount},
0,
{amount * 0.00258580}
)
"""
engine = create_engine('sqlite://')
mocker.patch('freqtrade.persistence.models.create_engine', lambda *args, **kwargs: engine)
# Create table using the old format
with engine.begin() as connection:
connection.execute(text(create_table_old))
connection.execute(text(create_table_order))
connection.execute(text("create index ix_trades_is_open on trades(is_open)"))
connection.execute(text("create index ix_trades_pair on trades(pair)"))
connection.execute(text(insert_table_old))
connection.execute(text(insert_orders))
# fake previous backup
connection.execute(text("create table trades_bak as select * from trades"))
connection.execute(text("create table trades_bak1 as select * from trades"))
# Run init to test migration
init_db(default_conf['db_url'])
assert len(Trade.query.filter(Trade.id == 1).all()) == 1
trade = Trade.query.filter(Trade.id == 1).first()
assert trade.fee_open == fee.return_value
assert trade.fee_close == fee.return_value
assert trade.open_rate_requested is None
assert trade.close_rate_requested is None
assert trade.is_open == 1
assert trade.amount == amount
assert trade.amount_requested == amount
assert trade.stake_amount == default_conf.get("stake_amount")
assert trade.pair == "ETC/BTC"
assert trade.exchange == "binance"
assert trade.max_rate == 0.0
assert trade.min_rate is None
assert trade.stop_loss == 0.0
assert trade.initial_stop_loss == 0.0
assert trade.exit_reason is None
assert trade.strategy is None
assert trade.timeframe == '5m'
assert trade.stoploss_order_id == 'dry_stop_order_id222'
assert trade.stoploss_last_update is None
assert log_has("trying trades_bak1", caplog)
assert log_has("trying trades_bak2", caplog)
assert log_has("Running database migration for trades - backup: trades_bak2, orders_bak0",
caplog)
assert log_has("Database migration finished.", caplog)
assert pytest.approx(trade.open_trade_value) == trade._calc_open_trade_value(
trade.amount, trade.open_rate)
assert trade.close_profit_abs is None
assert trade.stake_amount == trade.max_stake_amount
orders = trade.orders
assert len(orders) == 4
assert orders[0].order_id == 'dry_buy_order'
assert orders[0].ft_order_side == 'buy'
assert orders[-1].order_id == 'dry_stop_order_id222'
assert orders[-1].ft_order_side == 'stoploss'
assert orders[-1].ft_is_open is True
assert orders[1].order_id == 'dry_buy_order22'
assert orders[1].ft_order_side == 'buy'
assert orders[1].ft_is_open is False
assert orders[2].order_id == 'dry_stop_order_id11X'
assert orders[2].ft_order_side == 'stoploss'
assert orders[2].ft_is_open is False
def test_migrate_too_old(mocker, default_conf, fee, caplog):
"""
Test Database migration (starting with new pairformat)
"""
caplog.set_level(logging.DEBUG)
amount = 103.223
create_table_old = """CREATE TABLE IF NOT EXISTS "trades" (
id INTEGER NOT NULL,
exchange VARCHAR NOT NULL,
pair VARCHAR NOT NULL,
is_open BOOLEAN NOT NULL,
fee_open FLOAT NOT NULL,
fee_close FLOAT NOT NULL,
open_rate FLOAT,
close_rate FLOAT,
close_profit FLOAT,
stake_amount FLOAT NOT NULL,
amount FLOAT,
open_date DATETIME NOT NULL,
close_date DATETIME,
open_order_id VARCHAR,
PRIMARY KEY (id),
CHECK (is_open IN (0, 1))
);"""
insert_table_old = """INSERT INTO trades (exchange, pair, is_open, fee_open, fee_close,
open_rate, stake_amount, amount, open_date)
VALUES ('binance', 'ETC/BTC', 1, {fee}, {fee},
0.00258580, {stake}, {amount},
'2019-11-28 12:44:24.000000')
""".format(fee=fee.return_value,
stake=default_conf.get("stake_amount"),
amount=amount
)
engine = create_engine('sqlite://')
mocker.patch('freqtrade.persistence.models.create_engine', lambda *args, **kwargs: engine)
# Create table using the old format
with engine.begin() as connection:
connection.execute(text(create_table_old))
connection.execute(text(insert_table_old))
# Run init to test migration
with pytest.raises(OperationalException, match=r'Your database seems to be very old'):
init_db(default_conf['db_url'])
def test_migrate_get_last_sequence_ids():
engine = MagicMock()
engine.begin = MagicMock()
engine.name = 'postgresql'
get_last_sequence_ids(engine, 'trades_bak', 'orders_bak')
assert engine.begin.call_count == 2
engine.reset_mock()
engine.begin.reset_mock()
engine.name = 'somethingelse'
get_last_sequence_ids(engine, 'trades_bak', 'orders_bak')
assert engine.begin.call_count == 0
def test_migrate_set_sequence_ids():
engine = MagicMock()
engine.begin = MagicMock()
engine.name = 'postgresql'
set_sequence_ids(engine, 22, 55, 5)
assert engine.begin.call_count == 1
engine.reset_mock()
engine.begin.reset_mock()
engine.name = 'somethingelse'
set_sequence_ids(engine, 22, 55, 6)
assert engine.begin.call_count == 0
def test_migrate_pairlocks(mocker, default_conf, fee, caplog):
"""
Test Database migration (starting with new pairformat)
"""
caplog.set_level(logging.DEBUG)
# Always create all columns apart from the last!
create_table_old = """CREATE TABLE pairlocks (
id INTEGER NOT NULL,
pair VARCHAR(25) NOT NULL,
reason VARCHAR(255),
lock_time DATETIME NOT NULL,
lock_end_time DATETIME NOT NULL,
active BOOLEAN NOT NULL,
PRIMARY KEY (id)
)
"""
create_index1 = "CREATE INDEX ix_pairlocks_pair ON pairlocks (pair)"
create_index2 = "CREATE INDEX ix_pairlocks_lock_end_time ON pairlocks (lock_end_time)"
create_index3 = "CREATE INDEX ix_pairlocks_active ON pairlocks (active)"
insert_table_old = """INSERT INTO pairlocks (
id, pair, reason, lock_time, lock_end_time, active)
VALUES (1, 'ETH/BTC', 'Auto lock', '2021-07-12 18:41:03', '2021-07-11 18:45:00', 1)
"""
insert_table_old2 = """INSERT INTO pairlocks (
id, pair, reason, lock_time, lock_end_time, active)
VALUES (2, '*', 'Lock all', '2021-07-12 18:41:03', '2021-07-12 19:00:00', 1)
"""
engine = create_engine('sqlite://')
mocker.patch('freqtrade.persistence.models.create_engine', lambda *args, **kwargs: engine)
# Create table using the old format
with engine.begin() as connection:
connection.execute(text(create_table_old))
connection.execute(text(insert_table_old))
connection.execute(text(insert_table_old2))
connection.execute(text(create_index1))
connection.execute(text(create_index2))
connection.execute(text(create_index3))
init_db(default_conf['db_url'])
assert len(PairLock.query.all()) == 2
assert len(PairLock.query.filter(PairLock.pair == '*').all()) == 1
pairlocks = PairLock.query.filter(PairLock.pair == 'ETH/BTC').all()
assert len(pairlocks) == 1
pairlocks[0].pair == 'ETH/BTC'
pairlocks[0].side == '*'

View File

@ -1,78 +1,20 @@
# pragma pylint: disable=missing-docstring, C0103
import logging
from datetime import datetime, timedelta, timezone
from pathlib import Path
from types import FunctionType
from unittest.mock import MagicMock
import arrow
import pytest
from sqlalchemy import create_engine, text
from freqtrade.constants import DATETIME_PRINT_FORMAT, DEFAULT_DB_PROD_URL
from freqtrade.constants import DATETIME_PRINT_FORMAT
from freqtrade.enums import TradingMode
from freqtrade.exceptions import DependencyException, OperationalException
from freqtrade.exceptions import DependencyException
from freqtrade.persistence import LocalTrade, Order, Trade, init_db
from freqtrade.persistence.migrations import get_last_sequence_ids, set_sequence_ids
from freqtrade.persistence.models import PairLock
from tests.conftest import create_mock_trades, create_mock_trades_with_leverage, log_has, log_has_re
spot, margin, futures = TradingMode.SPOT, TradingMode.MARGIN, TradingMode.FUTURES
def test_init_create_session(default_conf):
# Check if init create a session
init_db(default_conf['db_url'])
assert hasattr(Trade, '_session')
assert 'scoped_session' in type(Trade._session).__name__
def test_init_custom_db_url(default_conf, tmpdir):
# Update path to a value other than default, but still in-memory
filename = f"{tmpdir}/freqtrade2_test.sqlite"
assert not Path(filename).is_file()
default_conf.update({'db_url': f'sqlite:///{filename}'})
init_db(default_conf['db_url'])
assert Path(filename).is_file()
r = Trade._session.execute(text("PRAGMA journal_mode"))
assert r.first() == ('wal',)
def test_init_invalid_db_url():
# Update path to a value other than default, but still in-memory
with pytest.raises(OperationalException, match=r'.*no valid database URL*'):
init_db('unknown:///some.url')
with pytest.raises(OperationalException, match=r'Bad db-url.*For in-memory database, pl.*'):
init_db('sqlite:///')
def test_init_prod_db(default_conf, mocker):
default_conf.update({'dry_run': False})
default_conf.update({'db_url': DEFAULT_DB_PROD_URL})
create_engine_mock = mocker.patch('freqtrade.persistence.models.create_engine', MagicMock())
init_db(default_conf['db_url'])
assert create_engine_mock.call_count == 1
assert create_engine_mock.mock_calls[0][1][0] == 'sqlite:///tradesv3.sqlite'
def test_init_dryrun_db(default_conf, tmpdir):
filename = f"{tmpdir}/freqtrade2_prod.sqlite"
assert not Path(filename).is_file()
default_conf.update({
'dry_run': True,
'db_url': f'sqlite:///{filename}'
})
init_db(default_conf['db_url'])
assert Path(filename).is_file()
@pytest.mark.parametrize('is_short', [False, True])
@pytest.mark.usefixtures("init_persistence")
def test_enter_exit_side(fee, is_short):
@ -316,8 +258,7 @@ def test_interest(fee, exchange, is_short, lev, minutes, rate, interest,
(True, 3.0, 30.0, margin),
])
@pytest.mark.usefixtures("init_persistence")
def test_borrowed(limit_buy_order_usdt, limit_sell_order_usdt, fee,
caplog, is_short, lev, borrowed, trading_mode):
def test_borrowed(fee, is_short, lev, borrowed, trading_mode):
"""
10 minute limit trade on Binance/Kraken at 1x, 3x leverage
fee: 0.25% quote
@ -1204,347 +1145,6 @@ def test_calc_profit(
trade.open_rate)) == round(profit_ratio, 8)
def test_migrate_new(mocker, default_conf, fee, caplog):
"""
Test Database migration (starting with new pairformat)
"""
caplog.set_level(logging.DEBUG)
amount = 103.223
# Always create all columns apart from the last!
create_table_old = """CREATE TABLE IF NOT EXISTS "trades" (
id INTEGER NOT NULL,
exchange VARCHAR NOT NULL,
pair VARCHAR NOT NULL,
is_open BOOLEAN NOT NULL,
fee FLOAT NOT NULL,
open_rate FLOAT,
close_rate FLOAT,
close_profit FLOAT,
stake_amount FLOAT NOT NULL,
amount FLOAT,
open_date DATETIME NOT NULL,
close_date DATETIME,
open_order_id VARCHAR,
stop_loss FLOAT,
initial_stop_loss FLOAT,
max_rate FLOAT,
sell_reason VARCHAR,
strategy VARCHAR,
ticker_interval INTEGER,
stoploss_order_id VARCHAR,
PRIMARY KEY (id),
CHECK (is_open IN (0, 1))
);"""
create_table_order = """CREATE TABLE orders (
id INTEGER NOT NULL,
ft_trade_id INTEGER,
ft_order_side VARCHAR(25) NOT NULL,
ft_pair VARCHAR(25) NOT NULL,
ft_is_open BOOLEAN NOT NULL,
order_id VARCHAR(255) NOT NULL,
status VARCHAR(255),
symbol VARCHAR(25),
order_type VARCHAR(50),
side VARCHAR(25),
price FLOAT,
amount FLOAT,
filled FLOAT,
remaining FLOAT,
cost FLOAT,
order_date DATETIME,
order_filled_date DATETIME,
order_update_date DATETIME,
PRIMARY KEY (id)
);"""
insert_table_old = """INSERT INTO trades (exchange, pair, is_open, fee,
open_rate, stake_amount, amount, open_date,
stop_loss, initial_stop_loss, max_rate, ticker_interval,
open_order_id, stoploss_order_id)
VALUES ('binance', 'ETC/BTC', 1, {fee},
0.00258580, {stake}, {amount},
'2019-11-28 12:44:24.000000',
0.0, 0.0, 0.0, '5m',
'buy_order', 'dry_stop_order_id222')
""".format(fee=fee.return_value,
stake=default_conf.get("stake_amount"),
amount=amount
)
insert_orders = f"""
insert into orders (
ft_trade_id,
ft_order_side,
ft_pair,
ft_is_open,
order_id,
status,
symbol,
order_type,
side,
price,
amount,
filled,
remaining,
cost)
values (
1,
'buy',
'ETC/BTC',
0,
'dry_buy_order',
'closed',
'ETC/BTC',
'limit',
'buy',
0.00258580,
{amount},
{amount},
0,
{amount * 0.00258580}
),
(
1,
'buy',
'ETC/BTC',
1,
'dry_buy_order22',
'canceled',
'ETC/BTC',
'limit',
'buy',
0.00258580,
{amount},
{amount},
0,
{amount * 0.00258580}
),
(
1,
'stoploss',
'ETC/BTC',
1,
'dry_stop_order_id11X',
'canceled',
'ETC/BTC',
'limit',
'sell',
0.00258580,
{amount},
{amount},
0,
{amount * 0.00258580}
),
(
1,
'stoploss',
'ETC/BTC',
1,
'dry_stop_order_id222',
'open',
'ETC/BTC',
'limit',
'sell',
0.00258580,
{amount},
{amount},
0,
{amount * 0.00258580}
)
"""
engine = create_engine('sqlite://')
mocker.patch('freqtrade.persistence.models.create_engine', lambda *args, **kwargs: engine)
# Create table using the old format
with engine.begin() as connection:
connection.execute(text(create_table_old))
connection.execute(text(create_table_order))
connection.execute(text("create index ix_trades_is_open on trades(is_open)"))
connection.execute(text("create index ix_trades_pair on trades(pair)"))
connection.execute(text(insert_table_old))
connection.execute(text(insert_orders))
# fake previous backup
connection.execute(text("create table trades_bak as select * from trades"))
connection.execute(text("create table trades_bak1 as select * from trades"))
# Run init to test migration
init_db(default_conf['db_url'])
assert len(Trade.query.filter(Trade.id == 1).all()) == 1
trade = Trade.query.filter(Trade.id == 1).first()
assert trade.fee_open == fee.return_value
assert trade.fee_close == fee.return_value
assert trade.open_rate_requested is None
assert trade.close_rate_requested is None
assert trade.is_open == 1
assert trade.amount == amount
assert trade.amount_requested == amount
assert trade.stake_amount == default_conf.get("stake_amount")
assert trade.pair == "ETC/BTC"
assert trade.exchange == "binance"
assert trade.max_rate == 0.0
assert trade.min_rate is None
assert trade.stop_loss == 0.0
assert trade.initial_stop_loss == 0.0
assert trade.exit_reason is None
assert trade.strategy is None
assert trade.timeframe == '5m'
assert trade.stoploss_order_id == 'dry_stop_order_id222'
assert trade.stoploss_last_update is None
assert log_has("trying trades_bak1", caplog)
assert log_has("trying trades_bak2", caplog)
assert log_has("Running database migration for trades - backup: trades_bak2, orders_bak0",
caplog)
assert log_has("Database migration finished.", caplog)
assert pytest.approx(trade.open_trade_value) == trade._calc_open_trade_value(
trade.amount, trade.open_rate)
assert trade.close_profit_abs is None
orders = trade.orders
assert len(orders) == 4
assert orders[0].order_id == 'dry_buy_order'
assert orders[0].ft_order_side == 'buy'
assert orders[-1].order_id == 'dry_stop_order_id222'
assert orders[-1].ft_order_side == 'stoploss'
assert orders[-1].ft_is_open is True
assert orders[1].order_id == 'dry_buy_order22'
assert orders[1].ft_order_side == 'buy'
assert orders[1].ft_is_open is False
assert orders[2].order_id == 'dry_stop_order_id11X'
assert orders[2].ft_order_side == 'stoploss'
assert orders[2].ft_is_open is False
def test_migrate_too_old(mocker, default_conf, fee, caplog):
"""
Test Database migration (starting with new pairformat)
"""
caplog.set_level(logging.DEBUG)
amount = 103.223
create_table_old = """CREATE TABLE IF NOT EXISTS "trades" (
id INTEGER NOT NULL,
exchange VARCHAR NOT NULL,
pair VARCHAR NOT NULL,
is_open BOOLEAN NOT NULL,
fee_open FLOAT NOT NULL,
fee_close FLOAT NOT NULL,
open_rate FLOAT,
close_rate FLOAT,
close_profit FLOAT,
stake_amount FLOAT NOT NULL,
amount FLOAT,
open_date DATETIME NOT NULL,
close_date DATETIME,
open_order_id VARCHAR,
PRIMARY KEY (id),
CHECK (is_open IN (0, 1))
);"""
insert_table_old = """INSERT INTO trades (exchange, pair, is_open, fee_open, fee_close,
open_rate, stake_amount, amount, open_date)
VALUES ('binance', 'ETC/BTC', 1, {fee}, {fee},
0.00258580, {stake}, {amount},
'2019-11-28 12:44:24.000000')
""".format(fee=fee.return_value,
stake=default_conf.get("stake_amount"),
amount=amount
)
engine = create_engine('sqlite://')
mocker.patch('freqtrade.persistence.models.create_engine', lambda *args, **kwargs: engine)
# Create table using the old format
with engine.begin() as connection:
connection.execute(text(create_table_old))
connection.execute(text(insert_table_old))
# Run init to test migration
with pytest.raises(OperationalException, match=r'Your database seems to be very old'):
init_db(default_conf['db_url'])
def test_migrate_get_last_sequence_ids():
engine = MagicMock()
engine.begin = MagicMock()
engine.name = 'postgresql'
get_last_sequence_ids(engine, 'trades_bak', 'orders_bak')
assert engine.begin.call_count == 2
engine.reset_mock()
engine.begin.reset_mock()
engine.name = 'somethingelse'
get_last_sequence_ids(engine, 'trades_bak', 'orders_bak')
assert engine.begin.call_count == 0
def test_migrate_set_sequence_ids():
engine = MagicMock()
engine.begin = MagicMock()
engine.name = 'postgresql'
set_sequence_ids(engine, 22, 55, 5)
assert engine.begin.call_count == 1
engine.reset_mock()
engine.begin.reset_mock()
engine.name = 'somethingelse'
set_sequence_ids(engine, 22, 55, 6)
assert engine.begin.call_count == 0
def test_migrate_pairlocks(mocker, default_conf, fee, caplog):
"""
Test Database migration (starting with new pairformat)
"""
caplog.set_level(logging.DEBUG)
# Always create all columns apart from the last!
create_table_old = """CREATE TABLE pairlocks (
id INTEGER NOT NULL,
pair VARCHAR(25) NOT NULL,
reason VARCHAR(255),
lock_time DATETIME NOT NULL,
lock_end_time DATETIME NOT NULL,
active BOOLEAN NOT NULL,
PRIMARY KEY (id)
)
"""
create_index1 = "CREATE INDEX ix_pairlocks_pair ON pairlocks (pair)"
create_index2 = "CREATE INDEX ix_pairlocks_lock_end_time ON pairlocks (lock_end_time)"
create_index3 = "CREATE INDEX ix_pairlocks_active ON pairlocks (active)"
insert_table_old = """INSERT INTO pairlocks (
id, pair, reason, lock_time, lock_end_time, active)
VALUES (1, 'ETH/BTC', 'Auto lock', '2021-07-12 18:41:03', '2021-07-11 18:45:00', 1)
"""
insert_table_old2 = """INSERT INTO pairlocks (
id, pair, reason, lock_time, lock_end_time, active)
VALUES (2, '*', 'Lock all', '2021-07-12 18:41:03', '2021-07-12 19:00:00', 1)
"""
engine = create_engine('sqlite://')
mocker.patch('freqtrade.persistence.models.create_engine', lambda *args, **kwargs: engine)
# Create table using the old format
with engine.begin() as connection:
connection.execute(text(create_table_old))
connection.execute(text(insert_table_old))
connection.execute(text(insert_table_old2))
connection.execute(text(create_index1))
connection.execute(text(create_index2))
connection.execute(text(create_index3))
init_db(default_conf['db_url'])
assert len(PairLock.query.all()) == 2
assert len(PairLock.query.filter(PairLock.pair == '*').all()) == 1
pairlocks = PairLock.query.filter(PairLock.pair == 'ETH/BTC').all()
assert len(pairlocks) == 1
pairlocks[0].pair == 'ETH/BTC'
pairlocks[0].side == '*'
def test_adjust_stop_loss(fee):
trade = Trade(
pair='ADA/USDT',
@ -1758,6 +1358,7 @@ def test_to_json(fee):
'amount': 123.0,
'amount_requested': 123.0,
'stake_amount': 0.001,
'max_stake_amount': None,
'trade_duration': None,
'trade_duration_s': None,
'realized_profit': 0.0,
@ -1767,7 +1368,6 @@ def test_to_json(fee):
'profit_ratio': None,
'profit_pct': None,
'profit_abs': None,
'sell_reason': None,
'exit_reason': None,
'exit_order_status': None,
'stop_loss_abs': None,
@ -1782,7 +1382,6 @@ def test_to_json(fee):
'min_rate': None,
'max_rate': None,
'strategy': None,
'buy_tag': None,
'enter_tag': None,
'timeframe': None,
'exchange': 'binance',
@ -1826,6 +1425,7 @@ def test_to_json(fee):
'amount': 100.0,
'amount_requested': 101.0,
'stake_amount': 0.001,
'max_stake_amount': None,
'trade_duration': 60,
'trade_duration_s': 3600,
'stop_loss_abs': None,
@ -1857,11 +1457,9 @@ def test_to_json(fee):
'open_order_id': None,
'open_rate_requested': None,
'open_trade_value': 12.33075,
'sell_reason': None,
'exit_reason': None,
'exit_order_status': None,
'strategy': None,
'buy_tag': 'buys_signal_001',
'enter_tag': 'buys_signal_001',
'timeframe': None,
'exchange': 'binance',

View File

@ -46,13 +46,11 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
'open_rate_requested': ANY,
'open_trade_value': 0.0010025,
'close_rate_requested': ANY,
'sell_reason': ANY,
'exit_reason': ANY,
'exit_order_status': ANY,
'min_rate': ANY,
'max_rate': ANY,
'strategy': ANY,
'buy_tag': ANY,
'enter_tag': ANY,
'timeframe': 5,
'open_order_id': ANY,
@ -64,6 +62,7 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
'amount': 91.07468123,
'amount_requested': 91.07468124,
'stake_amount': 0.001,
'max_stake_amount': ANY,
'trade_duration': None,
'trade_duration_s': None,
'close_profit': None,

View File

@ -985,6 +985,7 @@ def test_api_status(botclient, mocker, ticker, fee, markets, is_short,
'base_currency': 'ETH',
'quote_currency': 'BTC',
'stake_amount': 0.001,
'max_stake_amount': ANY,
'stop_loss_abs': ANY,
'stop_loss_pct': ANY,
'stop_loss_ratio': ANY,
@ -1014,11 +1015,9 @@ def test_api_status(botclient, mocker, ticker, fee, markets, is_short,
'open_order_id': open_order_id,
'open_rate_requested': ANY,
'open_trade_value': open_trade_value,
'sell_reason': None,
'exit_reason': None,
'exit_order_status': None,
'strategy': CURRENT_TEST_STRATEGY,
'buy_tag': None,
'enter_tag': None,
'timeframe': 5,
'exchange': 'binance',
@ -1188,6 +1187,7 @@ def test_api_force_entry(botclient, mocker, fee, endpoint):
'base_currency': 'ETH',
'quote_currency': 'BTC',
'stake_amount': 1,
'max_stake_amount': ANY,
'stop_loss_abs': None,
'stop_loss_pct': None,
'stop_loss_ratio': None,
@ -1218,11 +1218,9 @@ def test_api_force_entry(botclient, mocker, fee, endpoint):
'open_order_id': '123456',
'open_rate_requested': None,
'open_trade_value': 0.24605460,
'sell_reason': None,
'exit_reason': None,
'exit_order_status': None,
'strategy': CURRENT_TEST_STRATEGY,
'buy_tag': None,
'enter_tag': None,
'timeframe': 5,
'exchange': 'binance',
@ -1709,7 +1707,7 @@ def test_api_backtest_history(botclient, mocker, testdatadir):
mocker.patch('freqtrade.data.btanalysis._get_backtest_files',
return_value=[
testdatadir / 'backtest_results/backtest-result_multistrat.json',
testdatadir / 'backtest_results/backtest-result_new.json'
testdatadir / 'backtest_results/backtest-result.json'
])
rc = client_get(client, f"{BASE_URI}/backtest/history")

View File

@ -46,7 +46,7 @@ def test_init_plotscript(default_conf, mocker, testdatadir):
default_conf['trade_source'] = "file"
default_conf['timeframe'] = "5m"
default_conf["datadir"] = testdatadir
default_conf['exportfilename'] = testdatadir / "backtest-result_new.json"
default_conf['exportfilename'] = testdatadir / "backtest-result.json"
supported_markets = ["TRX/BTC", "ADA/BTC"]
ret = init_plotscript(default_conf, supported_markets)
assert "ohlcv" in ret
@ -158,7 +158,7 @@ def test_plot_trades(testdatadir, caplog):
assert fig == fig1
assert log_has("No trades found.", caplog)
pair = "ADA/BTC"
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
trades = load_backtest_data(filename)
trades = trades.loc[trades['pair'] == pair]
@ -299,7 +299,7 @@ def test_generate_plot_file(mocker, caplog):
def test_add_profit(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
timerange = TimeRange.parse_timerange("20180110-20180112")
@ -319,7 +319,7 @@ def test_add_profit(testdatadir):
def test_generate_profit_graph(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
trades = load_backtest_data(filename)
timerange = TimeRange.parse_timerange("20180110-20180112")
pairs = ["TRX/BTC", "XLM/BTC"]
@ -354,7 +354,7 @@ def test_generate_profit_graph(testdatadir):
profit = find_trace_in_fig_data(figure.data, "Profit")
assert isinstance(profit, go.Scatter)
drawdown = find_trace_in_fig_data(figure.data, "Max drawdown 35.69%")
drawdown = find_trace_in_fig_data(figure.data, "Max drawdown 73.89%")
assert isinstance(drawdown, go.Scatter)
parallel = find_trace_in_fig_data(figure.data, "Parallel trades")
assert isinstance(parallel, go.Scatter)
@ -395,7 +395,7 @@ def test_load_and_plot_trades(default_conf, mocker, caplog, testdatadir):
default_conf['trade_source'] = 'file'
default_conf["datadir"] = testdatadir
default_conf['exportfilename'] = testdatadir / "backtest-result_new.json"
default_conf['exportfilename'] = testdatadir / "backtest-result.json"
default_conf['indicators1'] = ["sma5", "ema10"]
default_conf['indicators2'] = ["macd"]
default_conf['pairs'] = ["ETH/BTC", "LTC/BTC"]
@ -466,7 +466,7 @@ def test_plot_profit(default_conf, mocker, testdatadir):
match=r"No trades found, cannot generate Profit-plot.*"):
plot_profit(default_conf)
default_conf['exportfilename'] = testdatadir / "backtest_results/backtest-result_new.json"
default_conf['exportfilename'] = testdatadir / "backtest_results/backtest-result.json"
plot_profit(default_conf)

View File

@ -1 +1 @@
{"latest_backtest":"backtest-result_new.json"}
{"latest_backtest":"backtest-result.json"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long