Merge fix/pd-mem-leak
This commit is contained in:
commit
ba493eb7a7
@ -3,11 +3,12 @@ This module contains the argument manager class
|
||||
"""
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
@ -29,6 +30,52 @@ class TimeRange:
|
||||
self.startts: int = startts
|
||||
self.stopts: int = stopts
|
||||
|
||||
@property
|
||||
def startdt(self) -> Optional[datetime]:
|
||||
if self.startts:
|
||||
return datetime.fromtimestamp(self.startts, tz=timezone.utc)
|
||||
return None
|
||||
|
||||
@property
|
||||
def stopdt(self) -> Optional[datetime]:
|
||||
if self.stopts:
|
||||
return datetime.fromtimestamp(self.stopts, tz=timezone.utc)
|
||||
return None
|
||||
|
||||
@property
|
||||
def timerange_str(self) -> str:
|
||||
"""
|
||||
Returns a string representation of the timerange as used by parse_timerange.
|
||||
Follows the format yyyymmdd-yyyymmdd - leaving out the parts that are not set.
|
||||
"""
|
||||
start = ''
|
||||
stop = ''
|
||||
if startdt := self.startdt:
|
||||
start = startdt.strftime('%Y%m%d')
|
||||
if stopdt := self.stopdt:
|
||||
stop = stopdt.strftime('%Y%m%d')
|
||||
return f"{start}-{stop}"
|
||||
|
||||
@property
|
||||
def start_fmt(self) -> str:
|
||||
"""
|
||||
Returns a string representation of the start date
|
||||
"""
|
||||
val = 'unbounded'
|
||||
if (startdt := self.startdt) is not None:
|
||||
val = startdt.strftime(DATETIME_PRINT_FORMAT)
|
||||
return val
|
||||
|
||||
@property
|
||||
def stop_fmt(self) -> str:
|
||||
"""
|
||||
Returns a string representation of the stop date
|
||||
"""
|
||||
val = 'unbounded'
|
||||
if (stopdt := self.stopdt) is not None:
|
||||
val = stopdt.strftime(DATETIME_PRINT_FORMAT)
|
||||
return val
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Override the default Equals behavior"""
|
||||
return (self.starttype == other.starttype and self.stoptype == other.stoptype
|
||||
|
@ -3,7 +3,6 @@ Functions to convert data from one format to another
|
||||
"""
|
||||
import itertools
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from operator import itemgetter
|
||||
from typing import Dict, List
|
||||
|
||||
@ -138,11 +137,9 @@ def trim_dataframe(df: DataFrame, timerange, df_date_col: str = 'date',
|
||||
df = df.iloc[startup_candles:, :]
|
||||
else:
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
df = df.loc[df[df_date_col] >= start, :]
|
||||
df = df.loc[df[df_date_col] >= timerange.startdt, :]
|
||||
if timerange.stoptype == 'date':
|
||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
df = df.loc[df[df_date_col] <= stop, :]
|
||||
df = df.loc[df[df_date_col] <= timerange.stopdt, :]
|
||||
return df
|
||||
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import logging
|
||||
import operator
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
@ -160,9 +160,9 @@ def _load_cached_data_for_updating(
|
||||
end = None
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
start = timerange.startdt
|
||||
if timerange.stoptype == 'date':
|
||||
end = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
end = timerange.stopdt
|
||||
|
||||
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||
data = data_handler.ohlcv_load(pair, timeframe=timeframe,
|
||||
|
@ -366,13 +366,11 @@ class IDataHandler(ABC):
|
||||
"""
|
||||
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
if pairdata.iloc[0]['date'] > start:
|
||||
if pairdata.iloc[0]['date'] > timerange.startdt:
|
||||
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
||||
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}")
|
||||
if timerange.stoptype == 'date':
|
||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
if pairdata.iloc[-1]['date'] < stop:
|
||||
if pairdata.iloc[-1]['date'] < timerange.stopdt:
|
||||
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
||||
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")
|
||||
|
||||
|
@ -392,7 +392,7 @@ class Edge:
|
||||
# Returning a list of pairs in order of "expectancy"
|
||||
return final
|
||||
|
||||
def _find_trades_for_stoploss_range(self, df, pair, stoploss_range):
|
||||
def _find_trades_for_stoploss_range(self, df, pair: str, stoploss_range) -> list:
|
||||
buy_column = df['enter_long'].values
|
||||
sell_column = df['exit_long'].values
|
||||
date_column = df['date'].values
|
||||
@ -407,7 +407,7 @@ class Edge:
|
||||
return result
|
||||
|
||||
def _detect_next_stop_or_sell_point(self, buy_column, sell_column, date_column,
|
||||
ohlc_columns, stoploss, pair):
|
||||
ohlc_columns, stoploss, pair: str):
|
||||
"""
|
||||
Iterate through ohlc_columns in order to find the next trade
|
||||
Next trade opens from the first buy signal noticed to
|
||||
|
@ -433,9 +433,7 @@ class FreqaiDataKitchen:
|
||||
timerange_train.stopts = timerange_train.startts + train_period_days
|
||||
|
||||
first = False
|
||||
start = datetime.fromtimestamp(timerange_train.startts, tz=timezone.utc)
|
||||
stop = datetime.fromtimestamp(timerange_train.stopts, tz=timezone.utc)
|
||||
tr_training_list.append(start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d"))
|
||||
tr_training_list.append(timerange_train.timerange_str)
|
||||
tr_training_list_timerange.append(copy.deepcopy(timerange_train))
|
||||
|
||||
# associated backtest period
|
||||
@ -447,9 +445,7 @@ class FreqaiDataKitchen:
|
||||
if timerange_backtest.stopts > config_timerange.stopts:
|
||||
timerange_backtest.stopts = config_timerange.stopts
|
||||
|
||||
start = datetime.fromtimestamp(timerange_backtest.startts, tz=timezone.utc)
|
||||
stop = datetime.fromtimestamp(timerange_backtest.stopts, tz=timezone.utc)
|
||||
tr_backtesting_list.append(start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d"))
|
||||
tr_backtesting_list.append(timerange_backtest.timerange_str)
|
||||
tr_backtesting_list_timerange.append(copy.deepcopy(timerange_backtest))
|
||||
|
||||
# ensure we are predicting on exactly same amount of data as requested by user defined
|
||||
@ -491,11 +487,9 @@ class FreqaiDataKitchen:
|
||||
it is sliced down to just the present training period.
|
||||
"""
|
||||
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
df = df.loc[df["date"] >= start, :]
|
||||
df = df.loc[df["date"] >= timerange.startdt, :]
|
||||
if not self.live:
|
||||
df = df.loc[df["date"] < stop, :]
|
||||
df = df.loc[df["date"] < timerange.stopdt, :]
|
||||
|
||||
return df
|
||||
|
||||
@ -1058,9 +1052,7 @@ class FreqaiDataKitchen:
|
||||
backtest_timerange.startts = (
|
||||
backtest_timerange.startts - backtest_period_days * SECONDS_IN_DAY
|
||||
)
|
||||
start = datetime.fromtimestamp(backtest_timerange.startts, tz=timezone.utc)
|
||||
stop = datetime.fromtimestamp(backtest_timerange.stopts, tz=timezone.utc)
|
||||
full_timerange = start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d")
|
||||
full_timerange = backtest_timerange.timerange_str
|
||||
config_path = Path(self.config["config_files"][0])
|
||||
|
||||
if not self.full_path.is_dir():
|
||||
|
@ -13,7 +13,7 @@ from numpy.typing import NDArray
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import timeframe_to_seconds
|
||||
@ -788,14 +788,8 @@ class IFreqaiModel(ABC):
|
||||
:return: if the data exists or not
|
||||
"""
|
||||
if self.config.get("freqai_backtest_live_models", False) and len(dataframe_backtest) == 0:
|
||||
tr_backtest_startts_str = datetime.fromtimestamp(
|
||||
tr_backtest.startts,
|
||||
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
|
||||
tr_backtest_stopts_str = datetime.fromtimestamp(
|
||||
tr_backtest.stopts,
|
||||
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
|
||||
logger.info(f"No data found for pair {pair} from {tr_backtest_startts_str} "
|
||||
f" from {tr_backtest_startts_str} to {tr_backtest_stopts_str}. "
|
||||
logger.info(f"No data found for pair {pair} from "
|
||||
f"from { tr_backtest.start_fmt} to {tr_backtest.stop_fmt}. "
|
||||
"Probably more than one training within the same candle period.")
|
||||
return False
|
||||
return True
|
||||
@ -810,18 +804,11 @@ class IFreqaiModel(ABC):
|
||||
:param pair: the current pair
|
||||
:param total_trains: total trains (total number of slides for the sliding window)
|
||||
"""
|
||||
tr_train_startts_str = datetime.fromtimestamp(
|
||||
tr_train.startts,
|
||||
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
|
||||
tr_train_stopts_str = datetime.fromtimestamp(
|
||||
tr_train.stopts,
|
||||
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
|
||||
|
||||
if not self.config.get("freqai_backtest_live_models", False):
|
||||
logger.info(
|
||||
f"Training {pair}, {self.pair_it}/{self.total_pairs} pairs"
|
||||
f" from {tr_train_startts_str} "
|
||||
f"to {tr_train_stopts_str}, {train_it}/{total_trains} "
|
||||
f" from {tr_train.start_fmt} "
|
||||
f"to {tr_train.stop_fmt}, {train_it}/{total_trains} "
|
||||
"trains"
|
||||
)
|
||||
# Following methods which are overridden by user made prediction models.
|
||||
|
@ -230,7 +230,4 @@ def get_timerange_backtest_live_models(config: Config) -> str:
|
||||
dk = FreqaiDataKitchen(config)
|
||||
models_path = dk.get_full_models_path(config)
|
||||
timerange, _ = dk.get_timerange_and_assets_end_dates_from_ready_models(models_path)
|
||||
start_date = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
end_date = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
tr = f"{start_date.strftime('%Y%m%d')}-{end_date.strftime('%Y%m%d')}"
|
||||
return tr
|
||||
return timerange.timerange_str
|
||||
|
@ -354,7 +354,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
self._schedule.run_pending()
|
||||
|
||||
def update_closed_trades_without_assigned_fees(self):
|
||||
def update_closed_trades_without_assigned_fees(self) -> None:
|
||||
"""
|
||||
Update closed trades without close fees assigned.
|
||||
Only acts when Orders are in the database, otherwise the last order-id is unknown.
|
||||
@ -379,7 +379,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
stoploss_order=order.ft_order_side == 'stoploss',
|
||||
send_msg=False)
|
||||
|
||||
trades: List[Trade] = Trade.get_open_trades_without_assigned_fees()
|
||||
trades = Trade.get_open_trades_without_assigned_fees()
|
||||
for trade in trades:
|
||||
if trade.is_open and not trade.fee_updated(trade.entry_side):
|
||||
order = trade.select_order(trade.entry_side, False)
|
||||
|
@ -262,7 +262,10 @@ def dataframe_to_json(dataframe: pandas.DataFrame) -> str:
|
||||
:param dataframe: A pandas DataFrame
|
||||
:returns: A JSON string of the pandas DataFrame
|
||||
"""
|
||||
return dataframe.to_json(orient='split')
|
||||
# https://github.com/pandas-dev/pandas/issues/24889
|
||||
# https://github.com/pandas-dev/pandas/issues/40443
|
||||
# We need to convert to a dict to avoid mem leak
|
||||
return dataframe.to_dict(orient='tight')
|
||||
|
||||
|
||||
def json_to_dataframe(data: str) -> pandas.DataFrame:
|
||||
@ -271,7 +274,7 @@ def json_to_dataframe(data: str) -> pandas.DataFrame:
|
||||
:param data: A JSON string
|
||||
:returns: A pandas DataFrame from the JSON string
|
||||
"""
|
||||
dataframe = pandas.read_json(data, orient='split')
|
||||
dataframe = pandas.DataFrame.from_dict(data, orient='tight')
|
||||
if 'date' in dataframe.columns:
|
||||
dataframe['date'] = pandas.to_datetime(dataframe['date'], unit='ms', utc=True)
|
||||
|
||||
|
@ -166,7 +166,7 @@ class Backtesting:
|
||||
PairLocks.use_db = True
|
||||
Trade.use_db = True
|
||||
|
||||
def init_backtest_detail(self):
|
||||
def init_backtest_detail(self) -> None:
|
||||
# Load detail timeframe if specified
|
||||
self.timeframe_detail = str(self.config.get('timeframe_detail', ''))
|
||||
if self.timeframe_detail:
|
||||
@ -1286,8 +1286,7 @@ class Backtesting:
|
||||
def _get_min_cached_backtest_date(self):
|
||||
min_backtest_date = None
|
||||
backtest_cache_age = self.config.get('backtest_cache', constants.BACKTEST_CACHE_DEFAULT)
|
||||
if self.timerange.stopts == 0 or datetime.fromtimestamp(
|
||||
self.timerange.stopts, tz=timezone.utc) > datetime.now(tz=timezone.utc):
|
||||
if self.timerange.stopts == 0 or self.timerange.stopdt > datetime.now(tz=timezone.utc):
|
||||
logger.warning('Backtest result caching disabled due to use of open-ended timerange.')
|
||||
elif backtest_cache_age == 'day':
|
||||
min_backtest_date = datetime.now(tz=timezone.utc) - timedelta(days=1)
|
||||
|
@ -1,5 +1,5 @@
|
||||
import logging
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import inspect, select, text, tuple_, update
|
||||
|
||||
@ -31,9 +31,9 @@ def get_backup_name(tabs: List[str], backup_prefix: str):
|
||||
return table_back_name
|
||||
|
||||
|
||||
def get_last_sequence_ids(engine, trade_back_name, order_back_name):
|
||||
order_id: int = None
|
||||
trade_id: int = None
|
||||
def get_last_sequence_ids(engine, trade_back_name: str, order_back_name: str):
|
||||
order_id: Optional[int] = None
|
||||
trade_id: Optional[int] = None
|
||||
|
||||
if engine.name == 'postgresql':
|
||||
with engine.begin() as connection:
|
||||
|
@ -76,13 +76,14 @@ class WebSocketChannel:
|
||||
Close the WebSocketChannel
|
||||
"""
|
||||
|
||||
self._closed.set()
|
||||
self._relay_task.cancel()
|
||||
|
||||
try:
|
||||
await self._websocket.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self._closed.set()
|
||||
|
||||
def is_closed(self) -> bool:
|
||||
"""
|
||||
Closed flag
|
||||
|
@ -4,7 +4,7 @@ from typing import Any, Dict, Union
|
||||
|
||||
import orjson
|
||||
import rapidjson
|
||||
from pandas import DataFrame
|
||||
from pandas import DataFrame, Timestamp
|
||||
|
||||
from freqtrade.misc import dataframe_to_json, json_to_dataframe
|
||||
from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy
|
||||
@ -51,6 +51,11 @@ def _json_default(z):
|
||||
'__type__': 'dataframe',
|
||||
'__value__': dataframe_to_json(z)
|
||||
}
|
||||
# Pandas returns a Timestamp object, we need to
|
||||
# convert it to a timestamp int (with ms) for orjson
|
||||
# to handle it
|
||||
if isinstance(z, Timestamp):
|
||||
return z.timestamp() * 1e3
|
||||
raise TypeError
|
||||
|
||||
|
||||
|
@ -1062,7 +1062,7 @@ class Telegram(RPCHandler):
|
||||
self._rpc._rpc_force_entry(pair, price, order_side=order_side)
|
||||
except RPCException as e:
|
||||
logger.exception("Forcebuy error!")
|
||||
self._send_msg(str(e))
|
||||
self._send_msg(str(e), ParseMode.HTML)
|
||||
|
||||
def _force_enter_inline(self, update: Update, _: CallbackContext) -> None:
|
||||
if update.callback_query:
|
||||
|
@ -30,6 +30,8 @@ asyncio_mode = "auto"
|
||||
|
||||
[tool.mypy]
|
||||
ignore_missing_imports = true
|
||||
namespace_packages = false
|
||||
implicit_optional = true
|
||||
warn_unused_ignores = true
|
||||
exclude = [
|
||||
'^build_helpers\.py$'
|
||||
|
@ -8,7 +8,7 @@
|
||||
coveralls==3.3.1
|
||||
flake8==5.0.4
|
||||
flake8-tidy-imports==4.8.0
|
||||
mypy==0.982
|
||||
mypy==0.990
|
||||
pre-commit==2.20.0
|
||||
pytest==7.2.0
|
||||
pytest-asyncio==0.20.2
|
||||
|
@ -4,7 +4,8 @@ pandas-ta==0.3.14b
|
||||
|
||||
ccxt==2.1.75
|
||||
# Pin cryptography for now due to rust build errors with piwheels
|
||||
cryptography==38.0.3
|
||||
cryptography==38.0.1; platform_machine == 'armv7l'
|
||||
cryptography==38.0.3; platform_machine != 'armv7l'
|
||||
aiohttp==3.8.3
|
||||
SQLAlchemy==1.4.44
|
||||
python-telegram-bot==13.14
|
||||
|
@ -101,7 +101,7 @@ def json_deserialize(message):
|
||||
:param message: The message to deserialize
|
||||
"""
|
||||
def json_to_dataframe(data: str) -> pandas.DataFrame:
|
||||
dataframe = pandas.read_json(data, orient='split')
|
||||
dataframe = pandas.DataFrame.from_dict(data, orient='tight')
|
||||
if 'date' in dataframe.columns:
|
||||
dataframe['date'] = pandas.to_datetime(dataframe['date'], unit='ms', utc=True)
|
||||
|
||||
|
@ -33,6 +33,7 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
|
||||
'freqtrade.exchange.Exchange',
|
||||
fetch_ticker=ticker,
|
||||
get_fee=fee,
|
||||
_is_dry_limit_order_filled=MagicMock(side_effect=[False, True]),
|
||||
)
|
||||
|
||||
freqtradebot = get_patched_freqtradebot(mocker, default_conf)
|
||||
@ -44,6 +45,91 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
|
||||
rpc._rpc_trade_status()
|
||||
|
||||
freqtradebot.enter_positions()
|
||||
|
||||
# Open order...
|
||||
results = rpc._rpc_trade_status()
|
||||
assert results[0] == {
|
||||
'trade_id': 1,
|
||||
'pair': 'ETH/BTC',
|
||||
'base_currency': 'ETH',
|
||||
'quote_currency': 'BTC',
|
||||
'open_date': ANY,
|
||||
'open_timestamp': ANY,
|
||||
'is_open': ANY,
|
||||
'fee_open': ANY,
|
||||
'fee_open_cost': ANY,
|
||||
'fee_open_currency': ANY,
|
||||
'fee_close': fee.return_value,
|
||||
'fee_close_cost': ANY,
|
||||
'fee_close_currency': ANY,
|
||||
'open_rate_requested': ANY,
|
||||
'open_trade_value': 0.0010025,
|
||||
'close_rate_requested': ANY,
|
||||
'sell_reason': ANY,
|
||||
'exit_reason': ANY,
|
||||
'exit_order_status': ANY,
|
||||
'min_rate': ANY,
|
||||
'max_rate': ANY,
|
||||
'strategy': ANY,
|
||||
'buy_tag': ANY,
|
||||
'enter_tag': ANY,
|
||||
'timeframe': 5,
|
||||
'open_order_id': ANY,
|
||||
'close_date': None,
|
||||
'close_timestamp': None,
|
||||
'open_rate': 1.098e-05,
|
||||
'close_rate': None,
|
||||
'current_rate': 1.099e-05,
|
||||
'amount': 91.07468124,
|
||||
'amount_requested': 91.07468124,
|
||||
'stake_amount': 0.001,
|
||||
'trade_duration': None,
|
||||
'trade_duration_s': None,
|
||||
'close_profit': None,
|
||||
'close_profit_pct': None,
|
||||
'close_profit_abs': None,
|
||||
'current_profit': 0.0,
|
||||
'current_profit_pct': 0.0,
|
||||
'current_profit_abs': 0.0,
|
||||
'profit_ratio': 0.0,
|
||||
'profit_pct': 0.0,
|
||||
'profit_abs': 0.0,
|
||||
'profit_fiat': ANY,
|
||||
'stop_loss_abs': 0.0,
|
||||
'stop_loss_pct': None,
|
||||
'stop_loss_ratio': None,
|
||||
'stoploss_order_id': None,
|
||||
'stoploss_last_update': ANY,
|
||||
'stoploss_last_update_timestamp': ANY,
|
||||
'initial_stop_loss_abs': 0.0,
|
||||
'initial_stop_loss_pct': None,
|
||||
'initial_stop_loss_ratio': None,
|
||||
'stoploss_current_dist': -1.099e-05,
|
||||
'stoploss_current_dist_ratio': -1.0,
|
||||
'stoploss_current_dist_pct': pytest.approx(-100.0),
|
||||
'stoploss_entry_dist': -0.0010025,
|
||||
'stoploss_entry_dist_ratio': -1.0,
|
||||
'open_order': '(limit buy rem=91.07468123)',
|
||||
'realized_profit': 0.0,
|
||||
'exchange': 'binance',
|
||||
'leverage': 1.0,
|
||||
'interest_rate': 0.0,
|
||||
'liquidation_price': None,
|
||||
'is_short': False,
|
||||
'funding_fees': 0.0,
|
||||
'trading_mode': TradingMode.SPOT,
|
||||
'orders': [{
|
||||
'amount': 91.07468123, 'average': 1.098e-05, 'safe_price': 1.098e-05,
|
||||
'cost': 0.0009999999999054, 'filled': 0.0, 'ft_order_side': 'buy',
|
||||
'order_date': ANY, 'order_timestamp': ANY, 'order_filled_date': ANY,
|
||||
'order_filled_timestamp': ANY, 'order_type': 'limit', 'price': 1.098e-05,
|
||||
'is_open': True, 'pair': 'ETH/BTC', 'order_id': ANY,
|
||||
'remaining': 91.07468123, 'status': ANY, 'ft_is_entry': True,
|
||||
}],
|
||||
}
|
||||
|
||||
# Fill open order ...
|
||||
freqtradebot.manage_open_orders()
|
||||
trades = Trade.get_open_trades()
|
||||
freqtradebot.exit_positions(trades)
|
||||
|
||||
|
@ -67,7 +67,7 @@ def botclient(default_conf, mocker):
|
||||
|
||||
def client_post(client, url, data={}):
|
||||
return client.post(url,
|
||||
data=data,
|
||||
content=data,
|
||||
headers={'Authorization': _basic_auth_str(_TEST_USER, _TEST_PASS),
|
||||
'Origin': 'http://example.com',
|
||||
'content-type': 'application/json'
|
||||
|
@ -1,4 +1,6 @@
|
||||
# pragma pylint: disable=missing-docstring, C0103
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import arrow
|
||||
import pytest
|
||||
|
||||
@ -8,16 +10,28 @@ from freqtrade.exceptions import OperationalException
|
||||
|
||||
def test_parse_timerange_incorrect():
|
||||
|
||||
assert TimeRange('date', None, 1274486400, 0) == TimeRange.parse_timerange('20100522-')
|
||||
assert TimeRange(None, 'date', 0, 1274486400) == TimeRange.parse_timerange('-20100522')
|
||||
timerange = TimeRange.parse_timerange('20100522-')
|
||||
assert TimeRange('date', None, 1274486400, 0) == timerange
|
||||
assert timerange.timerange_str == '20100522-'
|
||||
timerange = TimeRange.parse_timerange('-20100522')
|
||||
assert TimeRange(None, 'date', 0, 1274486400) == timerange
|
||||
assert timerange.timerange_str == '-20100522'
|
||||
timerange = TimeRange.parse_timerange('20100522-20150730')
|
||||
assert timerange == TimeRange('date', 'date', 1274486400, 1438214400)
|
||||
assert timerange.timerange_str == '20100522-20150730'
|
||||
assert timerange.start_fmt == '2010-05-22 00:00:00'
|
||||
assert timerange.stop_fmt == '2015-07-30 00:00:00'
|
||||
|
||||
# Added test for unix timestamp - BTC genesis date
|
||||
assert TimeRange('date', None, 1231006505, 0) == TimeRange.parse_timerange('1231006505-')
|
||||
assert TimeRange(None, 'date', 0, 1233360000) == TimeRange.parse_timerange('-1233360000')
|
||||
timerange = TimeRange.parse_timerange('1231006505-1233360000')
|
||||
assert TimeRange('date', 'date', 1231006505, 1233360000) == timerange
|
||||
assert isinstance(timerange.startdt, datetime)
|
||||
assert isinstance(timerange.stopdt, datetime)
|
||||
assert timerange.startdt == datetime.fromtimestamp(1231006505, tz=timezone.utc)
|
||||
assert timerange.stopdt == datetime.fromtimestamp(1233360000, tz=timezone.utc)
|
||||
assert timerange.timerange_str == '20090103-20090131'
|
||||
|
||||
timerange = TimeRange.parse_timerange('1231006505000-1233360000000')
|
||||
assert TimeRange('date', 'date', 1231006505, 1233360000) == timerange
|
||||
@ -45,6 +59,7 @@ def test_subtract_start():
|
||||
x = TimeRange(None, 'date', 0, 1438214400)
|
||||
x.subtract_start(300)
|
||||
assert not x.startts
|
||||
assert not x.startdt
|
||||
|
||||
x = TimeRange('date', None, 1274486400, 0)
|
||||
x.subtract_start(300)
|
||||
|
Loading…
Reference in New Issue
Block a user