Merge branch 'develop' into feature_keyval_storage
This commit is contained in:
commit
1551510c6f
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
@ -13,6 +13,10 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: '0 5 * * 4'
|
- cron: '0 5 * * 4'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_linux:
|
build_linux:
|
||||||
|
|
||||||
@ -296,18 +300,6 @@ jobs:
|
|||||||
details: Freqtrade doc test failed!
|
details: Freqtrade doc test failed!
|
||||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
|
|
||||||
cleanup-prior-runs:
|
|
||||||
permissions:
|
|
||||||
actions: write # for rokroskar/workflow-run-cleanup-action to obtain workflow name & cancel it
|
|
||||||
contents: read # for rokroskar/workflow-run-cleanup-action to obtain branch
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
steps:
|
|
||||||
- name: Cleanup previous runs on this branch
|
|
||||||
uses: rokroskar/workflow-run-cleanup-action@v0.3.3
|
|
||||||
if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/stable' && github.repository == 'freqtrade/freqtrade'"
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
|
||||||
|
|
||||||
# Notify only once - when CI completes (and after deploy) in case it's successfull
|
# Notify only once - when CI completes (and after deploy) in case it's successfull
|
||||||
notify-complete:
|
notify-complete:
|
||||||
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check ]
|
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check ]
|
||||||
|
@ -15,7 +15,7 @@ repos:
|
|||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
- types-cachetools==5.0.1
|
- types-cachetools==5.0.1
|
||||||
- types-filelock==3.2.6
|
- types-filelock==3.2.6
|
||||||
- types-requests==2.27.29
|
- types-requests==2.27.30
|
||||||
- types-tabulate==0.8.9
|
- types-tabulate==0.8.9
|
||||||
- types-python-dateutil==2.8.17
|
- types-python-dateutil==2.8.17
|
||||||
# stages: [push]
|
# stages: [push]
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM python:3.10.4-slim-bullseye as base
|
FROM python:3.10.5-slim-bullseye as base
|
||||||
|
|
||||||
# Setup env
|
# Setup env
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
mkdocs==1.3.0
|
mkdocs==1.3.0
|
||||||
mkdocs-material==8.2.16
|
mkdocs-material==8.3.2
|
||||||
mdx_truly_sane_lists==1.2
|
mdx_truly_sane_lists==1.2
|
||||||
pymdown-extensions==9.4
|
pymdown-extensions==9.4
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
|
@ -32,4 +32,8 @@ Please ensure that you're also updating dependencies - otherwise things might br
|
|||||||
``` bash
|
``` bash
|
||||||
git pull
|
git pull
|
||||||
pip install -U -r requirements.txt
|
pip install -U -r requirements.txt
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
# Ensure freqUI is at the latest version
|
||||||
|
freqtrade install-ui
|
||||||
```
|
```
|
||||||
|
@ -26,7 +26,7 @@ BT_DATA_COLUMNS = ['pair', 'stake_amount', 'amount', 'open_date', 'close_date',
|
|||||||
'profit_ratio', 'profit_abs', 'exit_reason',
|
'profit_ratio', 'profit_abs', 'exit_reason',
|
||||||
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
|
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
|
||||||
'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'enter_tag',
|
'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'enter_tag',
|
||||||
'is_short'
|
'is_short', 'open_timestamp', 'close_timestamp', 'orders'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -283,6 +283,8 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
|
|||||||
if 'enter_tag' not in df.columns:
|
if 'enter_tag' not in df.columns:
|
||||||
df['enter_tag'] = df['buy_tag']
|
df['enter_tag'] = df['buy_tag']
|
||||||
df = df.drop(['buy_tag'], axis=1)
|
df = df.drop(['buy_tag'], axis=1)
|
||||||
|
if 'orders' not in df.columns:
|
||||||
|
df.loc[:, 'orders'] = None
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# old format - only with lists.
|
# old format - only with lists.
|
||||||
@ -337,7 +339,7 @@ def trade_list_to_dataframe(trades: List[LocalTrade]) -> pd.DataFrame:
|
|||||||
:param trades: List of trade objects
|
:param trades: List of trade objects
|
||||||
:return: Dataframe with BT_DATA_COLUMNS
|
:return: Dataframe with BT_DATA_COLUMNS
|
||||||
"""
|
"""
|
||||||
df = pd.DataFrame.from_records([t.to_json() for t in trades], columns=BT_DATA_COLUMNS)
|
df = pd.DataFrame.from_records([t.to_json(True) for t in trades], columns=BT_DATA_COLUMNS)
|
||||||
if len(df) > 0:
|
if len(df) > 0:
|
||||||
df.loc[:, 'close_date'] = pd.to_datetime(df['close_date'], utc=True)
|
df.loc[:, 'close_date'] = pd.to_datetime(df['close_date'], utc=True)
|
||||||
df.loc[:, 'open_date'] = pd.to_datetime(df['open_date'], utc=True)
|
df.loc[:, 'open_date'] = pd.to_datetime(df['open_date'], utc=True)
|
||||||
|
@ -4,7 +4,7 @@ Freqtrade is the main module of this bot. It contains the class Freqtrade()
|
|||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
from datetime import datetime, time, timezone
|
from datetime import datetime, time, timedelta, timezone
|
||||||
from math import isclose
|
from math import isclose
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
@ -227,7 +227,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
Notify the user when the bot is stopped (not reloaded)
|
Notify the user when the bot is stopped (not reloaded)
|
||||||
and there are still open trades active.
|
and there are still open trades active.
|
||||||
"""
|
"""
|
||||||
open_trades = Trade.get_trades([Trade.is_open.is_(True)]).all()
|
open_trades = Trade.get_open_trades()
|
||||||
|
|
||||||
if len(open_trades) != 0 and self.state != State.RELOAD_CONFIG:
|
if len(open_trades) != 0 and self.state != State.RELOAD_CONFIG:
|
||||||
msg = {
|
msg = {
|
||||||
@ -302,6 +302,15 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
self.update_trade_state(order.trade, order.order_id, fo,
|
self.update_trade_state(order.trade, order.order_id, fo,
|
||||||
stoploss_order=(order.ft_order_side == 'stoploss'))
|
stoploss_order=(order.ft_order_side == 'stoploss'))
|
||||||
|
|
||||||
|
except InvalidOrderException as e:
|
||||||
|
logger.warning(f"Error updating Order {order.order_id} due to {e}.")
|
||||||
|
if order.order_date_utc - timedelta(days=5) < datetime.now(timezone.utc):
|
||||||
|
logger.warning(
|
||||||
|
"Order is older than 5 days. Assuming order was fully cancelled.")
|
||||||
|
fo = order.to_ccxt_object()
|
||||||
|
fo['status'] = 'canceled'
|
||||||
|
self.handle_timedout_order(fo, order.trade)
|
||||||
|
|
||||||
except ExchangeError as e:
|
except ExchangeError as e:
|
||||||
|
|
||||||
logger.warning(f"Error updating Order {order.order_id} due to {e}")
|
logger.warning(f"Error updating Order {order.order_id} due to {e}")
|
||||||
|
@ -969,6 +969,7 @@ class Backtesting:
|
|||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
del trade.orders[trade.orders.index(order)]
|
del trade.orders[trade.orders.index(order)]
|
||||||
|
trade.open_order_id = None
|
||||||
self.canceled_entry_orders += 1
|
self.canceled_entry_orders += 1
|
||||||
|
|
||||||
# place new order if result was not None
|
# place new order if result was not None
|
||||||
@ -1097,6 +1098,7 @@ class Backtesting:
|
|||||||
# 5. Process exit orders.
|
# 5. Process exit orders.
|
||||||
order = trade.select_order(trade.exit_side, is_open=True)
|
order = trade.select_order(trade.exit_side, is_open=True)
|
||||||
if order and self._get_order_filled(order.price, row):
|
if order and self._get_order_filled(order.price, row):
|
||||||
|
order.close_bt_order(current_time, trade)
|
||||||
trade.open_order_id = None
|
trade.open_order_id = None
|
||||||
trade.close_date = current_time
|
trade.close_date = current_time
|
||||||
trade.close(order.price, show_msg=False)
|
trade.close(order.price, show_msg=False)
|
||||||
|
@ -4,7 +4,6 @@ from datetime import datetime, timedelta, timezone
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Union
|
from typing import Any, Dict, List, Union
|
||||||
|
|
||||||
from numpy import int64
|
|
||||||
from pandas import DataFrame, to_datetime
|
from pandas import DataFrame, to_datetime
|
||||||
from tabulate import tabulate
|
from tabulate import tabulate
|
||||||
|
|
||||||
@ -417,9 +416,6 @@ def generate_strategy_stats(pairlist: List[str],
|
|||||||
key=lambda x: x['profit_sum']) if len(pair_results) > 1 else None
|
key=lambda x: x['profit_sum']) if len(pair_results) > 1 else None
|
||||||
worst_pair = min([pair for pair in pair_results if pair['key'] != 'TOTAL'],
|
worst_pair = min([pair for pair in pair_results if pair['key'] != 'TOTAL'],
|
||||||
key=lambda x: x['profit_sum']) if len(pair_results) > 1 else None
|
key=lambda x: x['profit_sum']) if len(pair_results) > 1 else None
|
||||||
if not results.empty:
|
|
||||||
results['open_timestamp'] = results['open_date'].view(int64) // 1e6
|
|
||||||
results['close_timestamp'] = results['close_date'].view(int64) // 1e6
|
|
||||||
|
|
||||||
backtest_days = (max_date - min_date).days or 1
|
backtest_days = (max_date - min_date).days or 1
|
||||||
strat_stats = {
|
strat_stats = {
|
||||||
|
@ -247,6 +247,35 @@ def set_sqlite_to_wal(engine):
|
|||||||
connection.execute(text("PRAGMA journal_mode=wal"))
|
connection.execute(text("PRAGMA journal_mode=wal"))
|
||||||
|
|
||||||
|
|
||||||
|
def fix_old_dry_orders(engine):
|
||||||
|
with engine.begin() as connection:
|
||||||
|
connection.execute(
|
||||||
|
text(
|
||||||
|
"""
|
||||||
|
update orders
|
||||||
|
set ft_is_open = 0
|
||||||
|
where ft_is_open = 1 and (ft_trade_id, order_id) not in (
|
||||||
|
select id, stoploss_order_id from trades where stoploss_order_id is not null
|
||||||
|
) and ft_order_side = 'stoploss'
|
||||||
|
and order_id like 'dry_%'
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
connection.execute(
|
||||||
|
text(
|
||||||
|
"""
|
||||||
|
update orders
|
||||||
|
set ft_is_open = 0
|
||||||
|
where ft_is_open = 1
|
||||||
|
and (ft_trade_id, order_id) not in (
|
||||||
|
select id, open_order_id from trades where open_order_id is not null
|
||||||
|
) and ft_order_side != 'stoploss'
|
||||||
|
and order_id like 'dry_%'
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_migrate(engine, decl_base, previous_tables) -> None:
|
def check_migrate(engine, decl_base, previous_tables) -> None:
|
||||||
"""
|
"""
|
||||||
Checks if migration is necessary and migrates if necessary
|
Checks if migration is necessary and migrates if necessary
|
||||||
@ -288,3 +317,4 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
|
|||||||
"start with a fresh database.")
|
"start with a fresh database.")
|
||||||
|
|
||||||
set_sqlite_to_wal(engine)
|
set_sqlite_to_wal(engine)
|
||||||
|
fix_old_dry_orders(engine)
|
||||||
|
@ -139,17 +139,23 @@ class Order(_DECL_BASE):
|
|||||||
'info': {},
|
'info': {},
|
||||||
}
|
}
|
||||||
|
|
||||||
def to_json(self, entry_side: str) -> Dict[str, Any]:
|
def to_json(self, entry_side: str, minified: bool = False) -> Dict[str, Any]:
|
||||||
return {
|
resp = {
|
||||||
|
'amount': self.amount,
|
||||||
|
'safe_price': self.safe_price,
|
||||||
|
'ft_order_side': self.ft_order_side,
|
||||||
|
'order_filled_timestamp': int(self.order_filled_date.replace(
|
||||||
|
tzinfo=timezone.utc).timestamp() * 1000) if self.order_filled_date else None,
|
||||||
|
'ft_is_entry': self.ft_order_side == entry_side,
|
||||||
|
}
|
||||||
|
if not minified:
|
||||||
|
resp.update({
|
||||||
'pair': self.ft_pair,
|
'pair': self.ft_pair,
|
||||||
'order_id': self.order_id,
|
'order_id': self.order_id,
|
||||||
'status': self.status,
|
'status': self.status,
|
||||||
'amount': self.amount,
|
|
||||||
'average': round(self.average, 8) if self.average else 0,
|
'average': round(self.average, 8) if self.average else 0,
|
||||||
'safe_price': self.safe_price,
|
|
||||||
'cost': self.cost if self.cost else 0,
|
'cost': self.cost if self.cost else 0,
|
||||||
'filled': self.filled,
|
'filled': self.filled,
|
||||||
'ft_order_side': self.ft_order_side,
|
|
||||||
'is_open': self.ft_is_open,
|
'is_open': self.ft_is_open,
|
||||||
'order_date': self.order_date.strftime(DATETIME_PRINT_FORMAT)
|
'order_date': self.order_date.strftime(DATETIME_PRINT_FORMAT)
|
||||||
if self.order_date else None,
|
if self.order_date else None,
|
||||||
@ -157,17 +163,16 @@ class Order(_DECL_BASE):
|
|||||||
tzinfo=timezone.utc).timestamp() * 1000) if self.order_date else None,
|
tzinfo=timezone.utc).timestamp() * 1000) if self.order_date else None,
|
||||||
'order_filled_date': self.order_filled_date.strftime(DATETIME_PRINT_FORMAT)
|
'order_filled_date': self.order_filled_date.strftime(DATETIME_PRINT_FORMAT)
|
||||||
if self.order_filled_date else None,
|
if self.order_filled_date else None,
|
||||||
'order_filled_timestamp': int(self.order_filled_date.replace(
|
|
||||||
tzinfo=timezone.utc).timestamp() * 1000) if self.order_filled_date else None,
|
|
||||||
'order_type': self.order_type,
|
'order_type': self.order_type,
|
||||||
'price': self.price,
|
'price': self.price,
|
||||||
'ft_is_entry': self.ft_order_side == entry_side,
|
|
||||||
'remaining': self.remaining,
|
'remaining': self.remaining,
|
||||||
}
|
})
|
||||||
|
return resp
|
||||||
|
|
||||||
def close_bt_order(self, close_date: datetime, trade: 'LocalTrade'):
|
def close_bt_order(self, close_date: datetime, trade: 'LocalTrade'):
|
||||||
self.order_filled_date = close_date
|
self.order_filled_date = close_date
|
||||||
self.filled = self.amount
|
self.filled = self.amount
|
||||||
|
self.remaining = 0
|
||||||
self.status = 'closed'
|
self.status = 'closed'
|
||||||
self.ft_is_open = False
|
self.ft_is_open = False
|
||||||
if (self.ft_order_side == trade.entry_side
|
if (self.ft_order_side == trade.entry_side
|
||||||
@ -396,9 +401,9 @@ class LocalTrade():
|
|||||||
f'open_rate={self.open_rate:.8f}, open_since={open_since})'
|
f'open_rate={self.open_rate:.8f}, open_since={open_since})'
|
||||||
)
|
)
|
||||||
|
|
||||||
def to_json(self) -> Dict[str, Any]:
|
def to_json(self, minified: bool = False) -> Dict[str, Any]:
|
||||||
filled_orders = self.select_filled_orders()
|
filled_orders = self.select_filled_or_open_orders()
|
||||||
orders = [order.to_json(self.entry_side) for order in filled_orders]
|
orders = [order.to_json(self.entry_side, minified) for order in filled_orders]
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'trade_id': self.id,
|
'trade_id': self.id,
|
||||||
@ -900,6 +905,21 @@ class LocalTrade():
|
|||||||
(o.filled or 0) > 0 and
|
(o.filled or 0) > 0 and
|
||||||
o.status in NON_OPEN_EXCHANGE_STATES]
|
o.status in NON_OPEN_EXCHANGE_STATES]
|
||||||
|
|
||||||
|
def select_filled_or_open_orders(self) -> List['Order']:
|
||||||
|
"""
|
||||||
|
Finds filled or open orders
|
||||||
|
:param order_side: Side of the order (either 'buy', 'sell', or None)
|
||||||
|
:return: array of Order objects
|
||||||
|
"""
|
||||||
|
return [o for o in self.orders if
|
||||||
|
(
|
||||||
|
o.ft_is_open is False
|
||||||
|
and (o.filled or 0) > 0
|
||||||
|
and o.status in NON_OPEN_EXCHANGE_STATES
|
||||||
|
)
|
||||||
|
or (o.ft_is_open is True and o.status is not None)
|
||||||
|
]
|
||||||
|
|
||||||
def set_kval(self, key: str, value: Any) -> None:
|
def set_kval(self, key: str, value: Any) -> None:
|
||||||
KeyValues.set_kval(key=key, value=value, trade_id=self.id)
|
KeyValues.set_kval(key=key, value=value, trade_id=self.id)
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ class ShowConfig(BaseModel):
|
|||||||
trailing_stop_positive: Optional[float]
|
trailing_stop_positive: Optional[float]
|
||||||
trailing_stop_positive_offset: Optional[float]
|
trailing_stop_positive_offset: Optional[float]
|
||||||
trailing_only_offset_is_reached: Optional[bool]
|
trailing_only_offset_is_reached: Optional[bool]
|
||||||
unfilledtimeout: UnfilledTimeout
|
unfilledtimeout: Optional[UnfilledTimeout] # Empty in webserver mode
|
||||||
order_types: Optional[OrderTypes]
|
order_types: Optional[OrderTypes]
|
||||||
use_custom_stoploss: Optional[bool]
|
use_custom_stoploss: Optional[bool]
|
||||||
timeframe: Optional[str]
|
timeframe: Optional[str]
|
||||||
|
@ -396,7 +396,7 @@ class Telegram(RPCHandler):
|
|||||||
first_avg = filled_orders[0]["safe_price"]
|
first_avg = filled_orders[0]["safe_price"]
|
||||||
|
|
||||||
for x, order in enumerate(filled_orders):
|
for x, order in enumerate(filled_orders):
|
||||||
if not order['ft_is_entry']:
|
if not order['ft_is_entry'] or order['is_open'] is True:
|
||||||
continue
|
continue
|
||||||
cur_entry_datetime = arrow.get(order["order_filled_date"])
|
cur_entry_datetime = arrow.get(order["order_filled_date"])
|
||||||
cur_entry_amount = order["amount"]
|
cur_entry_amount = order["amount"]
|
||||||
|
@ -24,6 +24,6 @@ nbconvert==6.5.0
|
|||||||
# mypy types
|
# mypy types
|
||||||
types-cachetools==5.0.1
|
types-cachetools==5.0.1
|
||||||
types-filelock==3.2.6
|
types-filelock==3.2.6
|
||||||
types-requests==2.27.29
|
types-requests==2.27.30
|
||||||
types-tabulate==0.8.9
|
types-tabulate==0.8.9
|
||||||
types-python-dateutil==2.8.17
|
types-python-dateutil==2.8.17
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
scipy==1.8.1
|
scipy==1.8.1
|
||||||
scikit-learn==1.1.1
|
scikit-learn==1.1.1
|
||||||
scikit-optimize==0.9.0
|
scikit-optimize==0.9.0
|
||||||
filelock==3.7.0
|
filelock==3.7.1
|
||||||
progressbar2==4.0.0
|
progressbar2==4.0.0
|
||||||
|
@ -2,17 +2,17 @@ numpy==1.22.4
|
|||||||
pandas==1.4.2
|
pandas==1.4.2
|
||||||
pandas-ta==0.3.14b
|
pandas-ta==0.3.14b
|
||||||
|
|
||||||
ccxt==1.84.39
|
ccxt==1.85.57
|
||||||
# Pin cryptography for now due to rust build errors with piwheels
|
# Pin cryptography for now due to rust build errors with piwheels
|
||||||
cryptography==37.0.2
|
cryptography==37.0.2
|
||||||
aiohttp==3.8.1
|
aiohttp==3.8.1
|
||||||
SQLAlchemy==1.4.36
|
SQLAlchemy==1.4.37
|
||||||
python-telegram-bot==13.12
|
python-telegram-bot==13.12
|
||||||
arrow==1.2.2
|
arrow==1.2.2
|
||||||
cachetools==4.2.2
|
cachetools==4.2.2
|
||||||
requests==2.27.1
|
requests==2.27.1
|
||||||
urllib3==1.26.9
|
urllib3==1.26.9
|
||||||
jsonschema==4.5.1
|
jsonschema==4.6.0
|
||||||
TA-Lib==0.4.24
|
TA-Lib==0.4.24
|
||||||
technical==1.3.0
|
technical==1.3.0
|
||||||
tabulate==0.8.9
|
tabulate==0.8.9
|
||||||
@ -28,7 +28,7 @@ py_find_1st==1.1.5
|
|||||||
# Load ticker files 30% faster
|
# Load ticker files 30% faster
|
||||||
python-rapidjson==1.6
|
python-rapidjson==1.6
|
||||||
# Properly format api responses
|
# Properly format api responses
|
||||||
orjson==3.6.8
|
orjson==3.7.1
|
||||||
|
|
||||||
# Notify systemd
|
# Notify systemd
|
||||||
sdnotify==0.3.2
|
sdnotify==0.3.2
|
||||||
|
@ -85,7 +85,7 @@ def test_load_backtest_data_new_format(testdatadir):
|
|||||||
filename = testdatadir / "backtest_results/backtest-result_new.json"
|
filename = testdatadir / "backtest_results/backtest-result_new.json"
|
||||||
bt_data = load_backtest_data(filename)
|
bt_data = load_backtest_data(filename)
|
||||||
assert isinstance(bt_data, DataFrame)
|
assert isinstance(bt_data, DataFrame)
|
||||||
assert set(bt_data.columns) == set(BT_DATA_COLUMNS + ['close_timestamp', 'open_timestamp'])
|
assert set(bt_data.columns) == set(BT_DATA_COLUMNS)
|
||||||
assert len(bt_data) == 179
|
assert len(bt_data) == 179
|
||||||
|
|
||||||
# Test loading from string (must yield same result)
|
# Test loading from string (must yield same result)
|
||||||
@ -110,7 +110,7 @@ def test_load_backtest_data_multi(testdatadir):
|
|||||||
bt_data = load_backtest_data(filename, strategy=strategy)
|
bt_data = load_backtest_data(filename, strategy=strategy)
|
||||||
assert isinstance(bt_data, DataFrame)
|
assert isinstance(bt_data, DataFrame)
|
||||||
assert set(bt_data.columns) == set(
|
assert set(bt_data.columns) == set(
|
||||||
BT_DATA_COLUMNS + ['close_timestamp', 'open_timestamp'])
|
BT_DATA_COLUMNS)
|
||||||
assert len(bt_data) == 179
|
assert len(bt_data) == 179
|
||||||
|
|
||||||
# Test loading from string (must yield same result)
|
# Test loading from string (must yield same result)
|
||||||
|
@ -795,10 +795,27 @@ def test_backtest_one(default_conf, fee, mocker, testdatadir) -> None:
|
|||||||
'is_open': [False, False],
|
'is_open': [False, False],
|
||||||
'enter_tag': [None, None],
|
'enter_tag': [None, None],
|
||||||
"is_short": [False, False],
|
"is_short": [False, False],
|
||||||
|
'open_timestamp': [1517251200000, 1517283000000],
|
||||||
|
'close_timestamp': [1517265300000, 1517285400000],
|
||||||
|
'orders': [
|
||||||
|
[
|
||||||
|
{'amount': 0.00957442, 'safe_price': 0.104445, 'ft_order_side': 'buy',
|
||||||
|
'order_filled_timestamp': 1517251200000, 'ft_is_entry': True},
|
||||||
|
{'amount': 0.00957442, 'safe_price': 0.10496853383458644, 'ft_order_side': 'sell',
|
||||||
|
'order_filled_timestamp': 1517265300000, 'ft_is_entry': False}
|
||||||
|
], [
|
||||||
|
{'amount': 0.0097064, 'safe_price': 0.10302485, 'ft_order_side': 'buy',
|
||||||
|
'order_filled_timestamp': 1517283000000, 'ft_is_entry': True},
|
||||||
|
{'amount': 0.0097064, 'safe_price': 0.10354126528822055, 'ft_order_side': 'sell',
|
||||||
|
'order_filled_timestamp': 1517285400000, 'ft_is_entry': False}
|
||||||
|
]
|
||||||
|
]
|
||||||
})
|
})
|
||||||
pd.testing.assert_frame_equal(results, expected)
|
pd.testing.assert_frame_equal(results, expected)
|
||||||
|
assert 'orders' in results.columns
|
||||||
data_pair = processed[pair]
|
data_pair = processed[pair]
|
||||||
for _, t in results.iterrows():
|
for _, t in results.iterrows():
|
||||||
|
assert len(t['orders']) == 2
|
||||||
ln = data_pair.loc[data_pair["date"] == t["open_date"]]
|
ln = data_pair.loc[data_pair["date"] == t["open_date"]]
|
||||||
# Check open trade rate alignes to open rate
|
# Check open trade rate alignes to open rate
|
||||||
assert ln is not None
|
assert ln is not None
|
||||||
|
@ -70,9 +70,14 @@ def test_backtest_position_adjustment(default_conf, fee, mocker, testdatadir) ->
|
|||||||
'is_open': [False, False],
|
'is_open': [False, False],
|
||||||
'enter_tag': [None, None],
|
'enter_tag': [None, None],
|
||||||
'is_short': [False, False],
|
'is_short': [False, False],
|
||||||
|
'open_timestamp': [1517251200000, 1517283000000],
|
||||||
|
'close_timestamp': [1517265300000, 1517285400000],
|
||||||
})
|
})
|
||||||
pd.testing.assert_frame_equal(results, expected)
|
pd.testing.assert_frame_equal(results.drop(columns=['orders']), expected)
|
||||||
data_pair = processed[pair]
|
data_pair = processed[pair]
|
||||||
|
assert len(results.iloc[0]['orders']) == 6
|
||||||
|
assert len(results.iloc[1]['orders']) == 2
|
||||||
|
|
||||||
for _, t in results.iterrows():
|
for _, t in results.iterrows():
|
||||||
ln = data_pair.loc[data_pair["date"] == t["open_date"]]
|
ln = data_pair.loc[data_pair["date"] == t["open_date"]]
|
||||||
# Check open trade rate alignes to open rate
|
# Check open trade rate alignes to open rate
|
||||||
|
@ -4802,10 +4802,19 @@ def test_startup_update_open_orders(mocker, default_conf_usdt, fee, caplog, is_s
|
|||||||
assert len(Order.get_open_orders()) == 2
|
assert len(Order.get_open_orders()) == 2
|
||||||
|
|
||||||
caplog.clear()
|
caplog.clear()
|
||||||
mocker.patch('freqtrade.exchange.Exchange.fetch_order', side_effect=InvalidOrderException)
|
mocker.patch('freqtrade.exchange.Exchange.fetch_order', side_effect=ExchangeError)
|
||||||
freqtrade.startup_update_open_orders()
|
freqtrade.startup_update_open_orders()
|
||||||
assert log_has_re(r"Error updating Order .*", caplog)
|
assert log_has_re(r"Error updating Order .*", caplog)
|
||||||
|
|
||||||
|
mocker.patch('freqtrade.exchange.Exchange.fetch_order', side_effect=InvalidOrderException)
|
||||||
|
hto_mock = mocker.patch('freqtrade.freqtradebot.FreqtradeBot.handle_timedout_order')
|
||||||
|
# Orders which are no longer found after X days should be assumed as canceled.
|
||||||
|
freqtrade.startup_update_open_orders()
|
||||||
|
assert log_has_re(r"Order is older than \d days.*", caplog)
|
||||||
|
assert hto_mock.call_count == 2
|
||||||
|
assert hto_mock.call_args_list[0][0][0]['status'] == 'canceled'
|
||||||
|
assert hto_mock.call_args_list[1][0][0]['status'] == 'canceled'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("init_persistence")
|
@pytest.mark.usefixtures("init_persistence")
|
||||||
@pytest.mark.parametrize("is_short", [False, True])
|
@pytest.mark.parametrize("is_short", [False, True])
|
||||||
|
Loading…
Reference in New Issue
Block a user