Merge f755df2568
into e7902bffa0
This commit is contained in:
commit
49ace00106
@ -11,7 +11,7 @@ If you're just getting started, please be familiar with the methods described in
|
||||
!!! Tip
|
||||
You can get a strategy template containing all below methods by running `freqtrade new-strategy --strategy MyAwesomeStrategy --template advanced`
|
||||
|
||||
## Storing information
|
||||
## Storing information (Non-Persistent)
|
||||
|
||||
Storing information can be accomplished by creating a new dictionary within the strategy class.
|
||||
|
||||
@ -40,6 +40,74 @@ class AwesomeStrategy(IStrategy):
|
||||
!!! Note
|
||||
If the data is pair-specific, make sure to use pair as one of the keys in the dictionary.
|
||||
|
||||
## Storing information (Persistent)
|
||||
|
||||
Storing information can also be performed in a persistent manner. Freqtrade allows storing/retrieving user custom information associated with a specific trade.
|
||||
Using a trade object handle information can be stored using `trade_obj.set_custom_data(key='my_key', value=my_value)` and retrieved using `trade_obj.get_custom_data(key='my_key')`.
|
||||
Each data entry is associated with a trade and a user supplied key (of type `string`). This means that this can only be used in callbacks that also provide a trade object handle.
|
||||
For the data to be able to be stored within the database it must be serialized. This is done by converting it to a JSON formatted string.
|
||||
|
||||
```python
|
||||
from freqtrade.persistence import Trade
|
||||
from datetime import timedelta
|
||||
|
||||
class AwesomeStrategy(IStrategy):
|
||||
|
||||
def bot_loop_start(self, **kwargs) -> None:
|
||||
for trade in Trade.get_open_order_trades():
|
||||
fills = trade.select_filled_orders(trade.entry_side)
|
||||
if trade.pair == 'ETH/USDT':
|
||||
trade_entry_type = trade.get_custom_data(key='entry_type').kv_value
|
||||
if trade_entry_type is None:
|
||||
trade_entry_type = 'breakout' if 'entry_1' in trade.enter_tag else 'dip'
|
||||
elif fills > 1:
|
||||
trade_entry_type = 'buy_up'
|
||||
trade.set_custom_data(key='entry_type', value=trade_entry_type)
|
||||
return super().bot_loop_start(**kwargs)
|
||||
|
||||
def adjust_entry_price(self, trade: Trade, order: Optional[Order], pair: str,
|
||||
current_time: datetime, proposed_rate: float, current_order_rate: float,
|
||||
entry_tag: Optional[str], side: str, **kwargs) -> float:
|
||||
# Limit orders to use and follow SMA200 as price target for the first 10 minutes since entry trigger for BTC/USDT pair.
|
||||
if pair == 'BTC/USDT' and entry_tag == 'long_sma200' and side == 'long' and (current_time - timedelta(minutes=10) > trade.open_date_utc and order.filled == 0.0:
|
||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair=pair, timeframe=self.timeframe)
|
||||
current_candle = dataframe.iloc[-1].squeeze()
|
||||
# store information about entry adjustment
|
||||
existing_count = trade.get_custom_data(key='num_entry_adjustments').kv_value
|
||||
if not existing_count:
|
||||
existing_count = 1
|
||||
else:
|
||||
existing_count += 1
|
||||
trade.set_custom_data(key='num_entry_adjustments', value=existing_count)
|
||||
|
||||
# adjust order price
|
||||
return current_candle['sma_200']
|
||||
|
||||
# default: maintain existing order
|
||||
return current_order_rate
|
||||
|
||||
def custom_exit(self, pair: str, trade: Trade, current_time: datetime, current_rate: float, current_profit: float, **kwargs):
|
||||
|
||||
entry_adjustment_count = trade.get_custom_data(key='num_entry_adjustments').kv_value
|
||||
trade_entry_type = trade.get_custom_data(key='entry_type').kv_value
|
||||
if entry_adjustment_count is None:
|
||||
if current_profit > 0.01 and (current_time - timedelta(minutes=100) > trade.open_date_utc):
|
||||
return True, 'exit_1'
|
||||
else
|
||||
if entry_adjustment_count > 0 and if current_profit > 0.05:
|
||||
return True, 'exit_2'
|
||||
if trade_entry_type == 'breakout' and current_profit > 0.1:
|
||||
return True, 'exit_3
|
||||
|
||||
return False, None
|
||||
```
|
||||
|
||||
!!! Note
|
||||
It is recommended that simple data types are used `[bool, int, float, str]` to ensure no issues when serializing the data that needs to be stored.
|
||||
|
||||
!!! Warning
|
||||
If supplied data cannot be serialized a warning is logged and the entry for the specified `key` will contain `None` as data.
|
||||
|
||||
## Dataframe access
|
||||
|
||||
You may access dataframe in various strategy functions by querying it from dataprovider.
|
||||
|
@ -30,7 +30,7 @@ from freqtrade.optimize.bt_progress import BTProgress
|
||||
from freqtrade.optimize.optimize_reports import (generate_backtest_stats, show_backtest_results,
|
||||
store_backtest_signal_candles,
|
||||
store_backtest_stats)
|
||||
from freqtrade.persistence import LocalTrade, Order, PairLocks, Trade
|
||||
from freqtrade.persistence import CustomDataWrapper, LocalTrade, Order, PairLocks, Trade
|
||||
from freqtrade.plugins.pairlistmanager import PairListManager
|
||||
from freqtrade.plugins.protectionmanager import ProtectionManager
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
@ -155,6 +155,7 @@ class Backtesting:
|
||||
LoggingMixin.show_output = True
|
||||
PairLocks.use_db = True
|
||||
Trade.use_db = True
|
||||
CustomDataWrapper.use_db = True
|
||||
|
||||
def init_backtest_detail(self):
|
||||
# Load detail timeframe if specified
|
||||
@ -313,6 +314,8 @@ class Backtesting:
|
||||
Trade.use_db = False
|
||||
PairLocks.reset_locks()
|
||||
Trade.reset_trades()
|
||||
CustomDataWrapper.use_db = False
|
||||
CustomDataWrapper.reset_custom_data()
|
||||
self.rejected_trades = 0
|
||||
self.timedout_entry_orders = 0
|
||||
self.timedout_exit_orders = 0
|
||||
|
@ -1,5 +1,6 @@
|
||||
# flake8: noqa: F401
|
||||
|
||||
from freqtrade.persistence.custom_data_middleware import CustomDataWrapper
|
||||
from freqtrade.persistence.models import init_db
|
||||
from freqtrade.persistence.pairlock_middleware import PairLocks
|
||||
from freqtrade.persistence.trade_model import LocalTrade, Order, Trade
|
||||
|
57
freqtrade/persistence/custom_data.py
Normal file
57
freqtrade/persistence/custom_data.py
Normal file
@ -0,0 +1,57 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Text, UniqueConstraint
|
||||
from sqlalchemy.orm import Query, relationship
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||
from freqtrade.persistence.base import _DECL_BASE
|
||||
|
||||
|
||||
class CustomData(_DECL_BASE):
|
||||
"""
|
||||
CustomData database model
|
||||
Keeps records of metadata as key/value store
|
||||
for trades or global persistant values
|
||||
One to many relationship with Trades:
|
||||
- One trade can have many metadata entries
|
||||
- One metadata entry can only be associated with one Trade
|
||||
"""
|
||||
__tablename__ = 'trade_custom_data'
|
||||
# Uniqueness should be ensured over pair, order_id
|
||||
# its likely that order_id is unique per Pair on some exchanges.
|
||||
__table_args__ = (UniqueConstraint('ft_trade_id', 'cd_key', name="_trade_id_cd_key"),)
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
ft_trade_id = Column(Integer, ForeignKey('trades.id'), index=True, default=0)
|
||||
|
||||
trade = relationship("Trade", back_populates="custom_data")
|
||||
|
||||
cd_key = Column(String(255), nullable=False)
|
||||
cd_type = Column(String(25), nullable=False)
|
||||
cd_value = Column(Text, nullable=False)
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
create_time = (self.created_at.strftime(DATETIME_PRINT_FORMAT)
|
||||
if self.created_at is not None else None)
|
||||
update_time = (self.updated_at.strftime(DATETIME_PRINT_FORMAT)
|
||||
if self.updated_at is not None else None)
|
||||
return (f'CustomData(id={self.id}, key={self.cd_key}, type={self.cd_type}, ' +
|
||||
f'value={self.cd_value}, trade_id={self.ft_trade_id}, created={create_time}, ' +
|
||||
f'updated={update_time})')
|
||||
|
||||
@staticmethod
|
||||
def query_cd(key: Optional[str] = None, trade_id: Optional[int] = None) -> Query:
|
||||
"""
|
||||
Get all CustomData, if trade_id is not specified
|
||||
return will be for generic values not tied to a trade
|
||||
:param trade_id: id of the Trade
|
||||
"""
|
||||
filters = []
|
||||
filters.append(CustomData.ft_trade_id == trade_id if trade_id is not None else 0)
|
||||
if key is not None:
|
||||
filters.append(CustomData.cd_key.ilike(key))
|
||||
|
||||
return CustomData.query.filter(*filters)
|
113
freqtrade/persistence/custom_data_middleware.py
Normal file
113
freqtrade/persistence/custom_data_middleware.py
Normal file
@ -0,0 +1,113 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from freqtrade.persistence.custom_data import CustomData
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CustomDataWrapper():
|
||||
"""
|
||||
CustomData middleware class
|
||||
Abstracts the database layer away so it becomes optional - which will be necessary to support
|
||||
backtesting and hyperopt in the future.
|
||||
"""
|
||||
|
||||
use_db = True
|
||||
custom_data: List[CustomData] = []
|
||||
unserialized_types = ['bool', 'float', 'int', 'str']
|
||||
|
||||
@staticmethod
|
||||
def reset_custom_data() -> None:
|
||||
"""
|
||||
Resets all key-value pairs. Only active for backtesting mode.
|
||||
"""
|
||||
if not CustomDataWrapper.use_db:
|
||||
CustomDataWrapper.custom_data = []
|
||||
|
||||
@staticmethod
|
||||
def get_custom_data(key: Optional[str] = None,
|
||||
trade_id: Optional[int] = None) -> List[CustomData]:
|
||||
if trade_id is None:
|
||||
trade_id = 0
|
||||
|
||||
if CustomDataWrapper.use_db:
|
||||
filtered_custom_data = CustomData.query_cd(trade_id=trade_id, key=key).all()
|
||||
for index, data_entry in enumerate(filtered_custom_data):
|
||||
if data_entry.cd_type not in CustomDataWrapper.unserialized_types:
|
||||
data_entry.cd_value = json.loads(data_entry.cd_value)
|
||||
filtered_custom_data[index] = data_entry
|
||||
return filtered_custom_data
|
||||
else:
|
||||
filtered_custom_data = [
|
||||
data_entry for data_entry in CustomDataWrapper.custom_data
|
||||
if (data_entry.ft_trade_id == trade_id)
|
||||
]
|
||||
if key is not None:
|
||||
filtered_custom_data = [
|
||||
data_entry for data_entry in filtered_custom_data
|
||||
if (data_entry.cd_key.casefold() == key.casefold())
|
||||
]
|
||||
return filtered_custom_data
|
||||
|
||||
@staticmethod
|
||||
def set_custom_data(key: str, value: Any, trade_id: Optional[int] = None) -> None:
|
||||
|
||||
value_type = type(value).__name__
|
||||
value_db = None
|
||||
|
||||
if value_type not in CustomDataWrapper.unserialized_types:
|
||||
try:
|
||||
value_db = json.dumps(value)
|
||||
except TypeError as e:
|
||||
logger.warning(f"could not serialize {key} value due to {e}")
|
||||
else:
|
||||
value_db = str(value)
|
||||
|
||||
if trade_id is None:
|
||||
trade_id = 0
|
||||
|
||||
custom_data = CustomDataWrapper.get_custom_data(key=key, trade_id=trade_id)
|
||||
if custom_data:
|
||||
data_entry = custom_data[0]
|
||||
data_entry.cd_value = value
|
||||
data_entry.updated_at = datetime.utcnow()
|
||||
else:
|
||||
data_entry = CustomData(
|
||||
ft_trade_id=trade_id,
|
||||
cd_key=key,
|
||||
cd_type=value_type,
|
||||
cd_value=value,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
if CustomDataWrapper.use_db and value_db is not None:
|
||||
data_entry.cd_value = value_db
|
||||
CustomData.query.session.add(data_entry)
|
||||
CustomData.query.session.commit()
|
||||
elif not CustomDataWrapper.use_db:
|
||||
cd_index = -1
|
||||
for index, data_entry in enumerate(CustomDataWrapper.custom_data):
|
||||
if data_entry.ft_trade_id == trade_id and data_entry.cd_key == key:
|
||||
cd_index = index
|
||||
break
|
||||
|
||||
if cd_index >= 0:
|
||||
data_entry.cd_type = value_type
|
||||
data_entry.value = value
|
||||
data_entry.updated_at = datetime.utcnow()
|
||||
|
||||
CustomDataWrapper.custom_data[cd_index] = data_entry
|
||||
else:
|
||||
CustomDataWrapper.custom_data.append(data_entry)
|
||||
|
||||
@staticmethod
|
||||
def get_all_custom_data() -> List[CustomData]:
|
||||
|
||||
if CustomDataWrapper.use_db:
|
||||
return CustomData.query.all()
|
||||
else:
|
||||
return CustomDataWrapper.custom_data
|
@ -10,6 +10,7 @@ from sqlalchemy.pool import StaticPool
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.persistence.base import _DECL_BASE
|
||||
from freqtrade.persistence.custom_data import CustomData
|
||||
from freqtrade.persistence.migrations import check_migrate
|
||||
from freqtrade.persistence.pairlock import PairLock
|
||||
from freqtrade.persistence.trade_model import Order, Trade
|
||||
@ -57,6 +58,8 @@ def init_db(db_url: str) -> None:
|
||||
Trade.query = Trade._session.query_property()
|
||||
Order.query = Trade._session.query_property()
|
||||
PairLock.query = Trade._session.query_property()
|
||||
CustomData._session = scoped_session(sessionmaker(bind=engine, autoflush=True))
|
||||
CustomData.query = CustomData._session.query_property()
|
||||
|
||||
previous_tables = inspect(engine).get_table_names()
|
||||
_DECL_BASE.metadata.create_all(engine)
|
||||
|
@ -17,6 +17,8 @@ from freqtrade.enums import ExitType, TradingMode
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
from freqtrade.leverage import interest
|
||||
from freqtrade.persistence.base import _DECL_BASE
|
||||
from freqtrade.persistence.custom_data import CustomData
|
||||
from freqtrade.persistence.custom_data_middleware import CustomDataWrapper
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -243,6 +245,7 @@ class LocalTrade():
|
||||
id: int = 0
|
||||
|
||||
orders: List[Order] = []
|
||||
custom_data: List[CustomData] = []
|
||||
|
||||
exchange: str = ''
|
||||
pair: str = ''
|
||||
@ -939,6 +942,12 @@ class LocalTrade():
|
||||
or (o.ft_is_open is True and o.status is not None)
|
||||
]
|
||||
|
||||
def set_custom_data(self, key: str, value: Any) -> None:
|
||||
CustomDataWrapper.set_custom_data(key=key, value=value, trade_id=self.id)
|
||||
|
||||
def get_custom_data(self, key: Optional[str]) -> List[CustomData]:
|
||||
return CustomDataWrapper.get_custom_data(key=key, trade_id=self.id)
|
||||
|
||||
@property
|
||||
def nr_of_successful_entries(self) -> int:
|
||||
"""
|
||||
@ -1069,6 +1078,7 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
orders = relationship("Order", order_by="Order.id", cascade="all, delete-orphan", lazy="joined")
|
||||
custom_data = relationship("CustomData", order_by="CustomData.id", cascade="all, delete-orphan")
|
||||
|
||||
exchange = Column(String(25), nullable=False)
|
||||
pair = Column(String(25), nullable=False, index=True)
|
||||
@ -1141,6 +1151,10 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
for order in self.orders:
|
||||
Order.query.session.delete(order)
|
||||
|
||||
for entry in self.custom_data:
|
||||
CustomData.query.session.delete(entry)
|
||||
|
||||
CustomData.query.session.commit()
|
||||
Trade.query.session.delete(self)
|
||||
Trade.commit()
|
||||
|
||||
@ -1426,6 +1440,12 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||
.order_by(desc('profit_sum')).first()
|
||||
return best_pair
|
||||
|
||||
def set_custom_data(self, key: str, value: Any) -> None:
|
||||
super().set_custom_data(key=key, value=value)
|
||||
|
||||
def get_custom_data(self, key: Optional[str]) -> List[CustomData]:
|
||||
return super().get_custom_data(key=key)
|
||||
|
||||
@staticmethod
|
||||
def get_trading_volume(start_date: datetime = datetime.fromtimestamp(0)) -> float:
|
||||
"""
|
||||
|
@ -829,6 +829,26 @@ class RPC:
|
||||
'cancel_order_count': c_count,
|
||||
}
|
||||
|
||||
def _rpc_list_custom_data(self, trade_id: int, key: Optional[str]) -> List[Dict[str, Any]]:
|
||||
# Query for trade
|
||||
trade = Trade.get_trades(trade_filter=[Trade.id == trade_id]).first()
|
||||
if trade is None:
|
||||
return []
|
||||
# Query custom_data
|
||||
custom_data = trade.get_custom_data(key=key)
|
||||
return [
|
||||
{
|
||||
'id': data_entry.id,
|
||||
'ft_trade_id': data_entry.ft_trade_id,
|
||||
'cd_key': data_entry.cd_key,
|
||||
'cd_type': data_entry.cd_type,
|
||||
'cd_value': data_entry.cd_value,
|
||||
'created_at': data_entry.created_at,
|
||||
'updated_at': data_entry.updated_at
|
||||
}
|
||||
for data_entry in custom_data
|
||||
]
|
||||
|
||||
def _rpc_performance(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Handler for performance.
|
||||
|
@ -191,6 +191,7 @@ class Telegram(RPCHandler):
|
||||
CommandHandler('health', self._health),
|
||||
CommandHandler('help', self._help),
|
||||
CommandHandler('version', self._version),
|
||||
CommandHandler('list_custom_data', self._list_custom_data),
|
||||
]
|
||||
callbacks = [
|
||||
CallbackQueryHandler(self._status_table, pattern='update_status_table'),
|
||||
@ -1534,7 +1535,9 @@ class Telegram(RPCHandler):
|
||||
"*/stats:* `Shows Wins / losses by Sell reason as well as "
|
||||
"Avg. holding durations for buys and sells.`\n"
|
||||
"*/help:* `This help message`\n"
|
||||
"*/version:* `Show version`"
|
||||
"*/version:* `Show version`\n"
|
||||
"*/list_custom_data <trade_id> <key>:* `List custom_data for Trade ID & Key combo.`\n"
|
||||
"`If no Key is supplied it will list all key-value pairs found for that Trade ID.`"
|
||||
)
|
||||
|
||||
self._send_msg(message, parse_mode=ParseMode.MARKDOWN)
|
||||
@ -1614,6 +1617,53 @@ class Telegram(RPCHandler):
|
||||
f"*Current state:* `{val['state']}`"
|
||||
)
|
||||
|
||||
@authorized_only
|
||||
def _list_custom_data(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
Handler for /list_custom_data <id> <key>.
|
||||
List custom_data for specified trade (and key if supplied).
|
||||
:param bot: telegram bot
|
||||
:param update: message update
|
||||
:return: None
|
||||
"""
|
||||
try:
|
||||
if not context.args or len(context.args) == 0:
|
||||
raise RPCException("Trade-id not set.")
|
||||
trade_id = int(context.args[0])
|
||||
key = None if len(context.args) < 2 else str(context.args[1])
|
||||
|
||||
results = self._rpc._rpc_list_custom_data(trade_id, key)
|
||||
messages = []
|
||||
if len(results) > 0:
|
||||
messages.append(
|
||||
'Found custom-data entr' + ('ies: ' if len(results) > 1 else 'y: ')
|
||||
)
|
||||
for result in results:
|
||||
lines = [
|
||||
f"*Key:* `{result['cd_key']}`",
|
||||
f"*ID:* `{result['id']}`",
|
||||
f"*Trade ID:* `{result['ft_trade_id']}`",
|
||||
f"*Type:* `{result['cd_type']}`",
|
||||
f"*Value:* `{result['cd_value']}`",
|
||||
f"*Create Date:* `{result['created_at']}`",
|
||||
f"*Update Date:* `{result['updated_at']}`"
|
||||
]
|
||||
# Filter empty lines using list-comprehension
|
||||
messages.append("\n".join([line for line in lines if line]))
|
||||
for msg in messages:
|
||||
if len(msg) > MAX_MESSAGE_LENGTH:
|
||||
msg = "Message dropped because length exceeds "
|
||||
msg += f"maximum allowed characters: {MAX_MESSAGE_LENGTH}"
|
||||
logger.warning(msg)
|
||||
self._send_msg(msg)
|
||||
else:
|
||||
message = f"Didn't find any custom-data entries for Trade ID: `{trade_id}`"
|
||||
message += f" and Key: `{key}`." if key is not None else ""
|
||||
self._send_msg(message)
|
||||
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
|
||||
def _update_msg(self, query: CallbackQuery, msg: str, callback_path: str = "",
|
||||
reload_able: bool = False, parse_mode: str = ParseMode.MARKDOWN) -> None:
|
||||
if reload_able:
|
||||
|
@ -104,7 +104,7 @@ def test_telegram_init(default_conf, mocker, caplog) -> None:
|
||||
"['count'], ['locks'], ['unlock', 'delete_locks'], "
|
||||
"['reload_config', 'reload_conf'], ['show_config', 'show_conf'], "
|
||||
"['stopbuy'], ['whitelist'], ['blacklist'], ['blacklist_delete', 'bl_delete'], "
|
||||
"['logs'], ['edge'], ['health'], ['help'], ['version']"
|
||||
"['logs'], ['edge'], ['health'], ['help'], ['version'], ['list_custom_data']"
|
||||
"]")
|
||||
|
||||
assert log_has(message_str, caplog)
|
||||
|
Loading…
Reference in New Issue
Block a user