Merge branch 'develop' into db_keep_orders
This commit is contained in:
@@ -15,7 +15,7 @@ ARGS_STRATEGY = ["strategy", "strategy_path"]
|
||||
|
||||
ARGS_TRADE = ["db_url", "sd_notify", "dry_run"]
|
||||
|
||||
ARGS_COMMON_OPTIMIZE = ["timeframe", "timerange",
|
||||
ARGS_COMMON_OPTIMIZE = ["timeframe", "timerange", "dataformat_ohlcv",
|
||||
"max_open_trades", "stake_amount", "fee"]
|
||||
|
||||
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + ["position_stacking", "use_max_market_positions",
|
||||
|
@@ -35,8 +35,8 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
"Downloading data requires a list of pairs. "
|
||||
"Please check the documentation on how to configure this.")
|
||||
|
||||
logger.info(f'About to download pairs: {config["pairs"]}, '
|
||||
f'intervals: {config["timeframes"]} to {config["datadir"]}')
|
||||
logger.info(f"About to download pairs: {config['pairs']}, "
|
||||
f"intervals: {config['timeframes']} to {config['datadir']}")
|
||||
|
||||
pairs_not_available: List[str] = []
|
||||
|
||||
@@ -51,21 +51,21 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
|
||||
if config.get('download_trades'):
|
||||
pairs_not_available = refresh_backtest_trades_data(
|
||||
exchange, pairs=config["pairs"], datadir=config['datadir'],
|
||||
timerange=timerange, erase=bool(config.get("erase")),
|
||||
exchange, pairs=config['pairs'], datadir=config['datadir'],
|
||||
timerange=timerange, erase=bool(config.get('erase')),
|
||||
data_format=config['dataformat_trades'])
|
||||
|
||||
# Convert downloaded trade data to different timeframes
|
||||
convert_trades_to_ohlcv(
|
||||
pairs=config["pairs"], timeframes=config["timeframes"],
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get("erase")),
|
||||
pairs=config['pairs'], timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||
data_format_trades=config['dataformat_trades'],
|
||||
)
|
||||
else:
|
||||
pairs_not_available = refresh_backtest_ohlcv_data(
|
||||
exchange, pairs=config["pairs"], timeframes=config["timeframes"],
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get("erase")),
|
||||
exchange, pairs=config['pairs'], timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||
data_format=config['dataformat_ohlcv'])
|
||||
|
||||
except KeyboardInterrupt:
|
||||
|
@@ -75,7 +75,7 @@ def start_new_strategy(args: Dict[str, Any]) -> None:
|
||||
if args["strategy"] == "DefaultStrategy":
|
||||
raise OperationalException("DefaultStrategy is not allowed as name.")
|
||||
|
||||
new_path = config['user_data_dir'] / USERPATH_STRATEGIES / (args["strategy"] + ".py")
|
||||
new_path = config['user_data_dir'] / USERPATH_STRATEGIES / (args['strategy'] + '.py')
|
||||
|
||||
if new_path.exists():
|
||||
raise OperationalException(f"`{new_path}` already exists. "
|
||||
@@ -125,11 +125,11 @@ def start_new_hyperopt(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
if "hyperopt" in args and args["hyperopt"]:
|
||||
if args["hyperopt"] == "DefaultHyperopt":
|
||||
if 'hyperopt' in args and args['hyperopt']:
|
||||
if args['hyperopt'] == 'DefaultHyperopt':
|
||||
raise OperationalException("DefaultHyperopt is not allowed as name.")
|
||||
|
||||
new_path = config['user_data_dir'] / USERPATH_HYPEROPTS / (args["hyperopt"] + ".py")
|
||||
new_path = config['user_data_dir'] / USERPATH_HYPEROPTS / (args['hyperopt'] + '.py')
|
||||
|
||||
if new_path.exists():
|
||||
raise OperationalException(f"`{new_path}` already exists. "
|
||||
|
@@ -54,7 +54,7 @@ class Configuration:
|
||||
:param files: List of file paths
|
||||
:return: configuration dictionary
|
||||
"""
|
||||
c = Configuration({"config": files}, RunMode.OTHER)
|
||||
c = Configuration({'config': files}, RunMode.OTHER)
|
||||
return c.get_config()
|
||||
|
||||
def load_from_files(self, files: List[str]) -> Dict[str, Any]:
|
||||
@@ -123,10 +123,10 @@ class Configuration:
|
||||
the -v/--verbose, --logfile options
|
||||
"""
|
||||
# Log level
|
||||
config.update({'verbosity': self.args.get("verbosity", 0)})
|
||||
config.update({'verbosity': self.args.get('verbosity', 0)})
|
||||
|
||||
if 'logfile' in self.args and self.args["logfile"]:
|
||||
config.update({'logfile': self.args["logfile"]})
|
||||
if 'logfile' in self.args and self.args['logfile']:
|
||||
config.update({'logfile': self.args['logfile']})
|
||||
|
||||
setup_logging(config)
|
||||
|
||||
@@ -149,22 +149,22 @@ class Configuration:
|
||||
def _process_common_options(self, config: Dict[str, Any]) -> None:
|
||||
|
||||
# Set strategy if not specified in config and or if it's non default
|
||||
if self.args.get("strategy") or not config.get('strategy'):
|
||||
config.update({'strategy': self.args.get("strategy")})
|
||||
if self.args.get('strategy') or not config.get('strategy'):
|
||||
config.update({'strategy': self.args.get('strategy')})
|
||||
|
||||
self._args_to_config(config, argname='strategy_path',
|
||||
logstring='Using additional Strategy lookup path: {}')
|
||||
|
||||
if ('db_url' in self.args and self.args["db_url"] and
|
||||
self.args["db_url"] != constants.DEFAULT_DB_PROD_URL):
|
||||
config.update({'db_url': self.args["db_url"]})
|
||||
if ('db_url' in self.args and self.args['db_url'] and
|
||||
self.args['db_url'] != constants.DEFAULT_DB_PROD_URL):
|
||||
config.update({'db_url': self.args['db_url']})
|
||||
logger.info('Parameter --db-url detected ...')
|
||||
|
||||
if config.get('forcebuy_enable', False):
|
||||
logger.warning('`forcebuy` RPC message enabled.')
|
||||
|
||||
# Support for sd_notify
|
||||
if 'sd_notify' in self.args and self.args["sd_notify"]:
|
||||
if 'sd_notify' in self.args and self.args['sd_notify']:
|
||||
config['internals'].update({'sd_notify': True})
|
||||
|
||||
def _process_datadir_options(self, config: Dict[str, Any]) -> None:
|
||||
@@ -173,24 +173,24 @@ class Configuration:
|
||||
--user-data, --datadir
|
||||
"""
|
||||
# Check exchange parameter here - otherwise `datadir` might be wrong.
|
||||
if "exchange" in self.args and self.args["exchange"]:
|
||||
config['exchange']['name'] = self.args["exchange"]
|
||||
if 'exchange' in self.args and self.args['exchange']:
|
||||
config['exchange']['name'] = self.args['exchange']
|
||||
logger.info(f"Using exchange {config['exchange']['name']}")
|
||||
|
||||
if 'pair_whitelist' not in config['exchange']:
|
||||
config['exchange']['pair_whitelist'] = []
|
||||
|
||||
if 'user_data_dir' in self.args and self.args["user_data_dir"]:
|
||||
config.update({'user_data_dir': self.args["user_data_dir"]})
|
||||
if 'user_data_dir' in self.args and self.args['user_data_dir']:
|
||||
config.update({'user_data_dir': self.args['user_data_dir']})
|
||||
elif 'user_data_dir' not in config:
|
||||
# Default to cwd/user_data (legacy option ...)
|
||||
config.update({'user_data_dir': str(Path.cwd() / "user_data")})
|
||||
config.update({'user_data_dir': str(Path.cwd() / 'user_data')})
|
||||
|
||||
# reset to user_data_dir so this contains the absolute path.
|
||||
config['user_data_dir'] = create_userdata_dir(config['user_data_dir'], create_dir=False)
|
||||
logger.info('Using user-data directory: %s ...', config['user_data_dir'])
|
||||
|
||||
config.update({'datadir': create_datadir(config, self.args.get("datadir", None))})
|
||||
config.update({'datadir': create_datadir(config, self.args.get('datadir', None))})
|
||||
logger.info('Using data directory: %s ...', config.get('datadir'))
|
||||
|
||||
if self.args.get('exportfilename'):
|
||||
@@ -219,8 +219,8 @@ class Configuration:
|
||||
config.update({'use_max_market_positions': False})
|
||||
logger.info('Parameter --disable-max-market-positions detected ...')
|
||||
logger.info('max_open_trades set to unlimited ...')
|
||||
elif 'max_open_trades' in self.args and self.args["max_open_trades"]:
|
||||
config.update({'max_open_trades': self.args["max_open_trades"]})
|
||||
elif 'max_open_trades' in self.args and self.args['max_open_trades']:
|
||||
config.update({'max_open_trades': self.args['max_open_trades']})
|
||||
logger.info('Parameter --max-open-trades detected, '
|
||||
'overriding max_open_trades to: %s ...', config.get('max_open_trades'))
|
||||
elif config['runmode'] in NON_UTIL_MODES:
|
||||
@@ -447,12 +447,12 @@ class Configuration:
|
||||
config['pairs'].sort()
|
||||
return
|
||||
|
||||
if "config" in self.args and self.args["config"]:
|
||||
if 'config' in self.args and self.args['config']:
|
||||
logger.info("Using pairlist from configuration.")
|
||||
config['pairs'] = config.get('exchange', {}).get('pair_whitelist')
|
||||
else:
|
||||
# Fall back to /dl_path/pairs.json
|
||||
pairs_file = config['datadir'] / "pairs.json"
|
||||
pairs_file = config['datadir'] / 'pairs.json'
|
||||
if pairs_file.exists():
|
||||
with pairs_file.open('r') as f:
|
||||
config['pairs'] = json_load(f)
|
||||
|
@@ -24,7 +24,7 @@ ORDERTIF_POSSIBILITIES = ['gtc', 'fok', 'ioc']
|
||||
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList',
|
||||
'AgeFilter', 'PrecisionFilter', 'PriceFilter',
|
||||
'ShuffleFilter', 'SpreadFilter']
|
||||
AVAILABLE_DATAHANDLERS = ['json', 'jsongz']
|
||||
AVAILABLE_DATAHANDLERS = ['json', 'jsongz', 'hdf5']
|
||||
DRY_RUN_WALLET = 1000
|
||||
DATETIME_PRINT_FORMAT = '%Y-%m-%d %H:%M:%S'
|
||||
MATH_CLOSE_PREC = 1e-14 # Precision used for float comparisons
|
||||
|
@@ -208,7 +208,7 @@ def load_trades_from_db(db_url: str, strategy: Optional[str] = None) -> pd.DataF
|
||||
def load_trades(source: str, db_url: str, exportfilename: Path,
|
||||
no_trades: bool = False, strategy: Optional[str] = None) -> pd.DataFrame:
|
||||
"""
|
||||
Based on configuration option "trade_source":
|
||||
Based on configuration option 'trade_source':
|
||||
* loads data from DB (using `db_url`)
|
||||
* loads data from backtestfile (using `exportfilename`)
|
||||
:param source: "DB" or "file" - specify source to load from
|
||||
|
@@ -255,7 +255,8 @@ def convert_ohlcv_format(config: Dict[str, Any], convert_from: str, convert_to:
|
||||
drop_incomplete=False,
|
||||
startup_candles=0)
|
||||
logger.info(f"Converting {len(data)} candles for {pair}")
|
||||
trg.ohlcv_store(pair=pair, timeframe=timeframe, data=data)
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
||||
src.ohlcv_purge(pair=pair, timeframe=timeframe)
|
||||
if len(data) > 0:
|
||||
trg.ohlcv_store(pair=pair, timeframe=timeframe, data=data)
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
||||
src.ohlcv_purge(pair=pair, timeframe=timeframe)
|
||||
|
@@ -39,6 +39,12 @@ class DataProvider:
|
||||
"""
|
||||
self.__cached_pairs[(pair, timeframe)] = (dataframe, Arrow.utcnow().datetime)
|
||||
|
||||
def add_pairlisthandler(self, pairlists) -> None:
|
||||
"""
|
||||
Allow adding pairlisthandler after initialization
|
||||
"""
|
||||
self._pairlists = pairlists
|
||||
|
||||
def refresh(self,
|
||||
pairlist: ListPairsWithTimeframes,
|
||||
helping_pairs: ListPairsWithTimeframes = None) -> None:
|
||||
|
211
freqtrade/data/history/hdf5datahandler.py
Normal file
211
freqtrade/data/history/hdf5datahandler.py
Normal file
@@ -0,0 +1,211 @@
|
||||
import logging
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade import misc
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS,
|
||||
DEFAULT_TRADES_COLUMNS,
|
||||
ListPairsWithTimeframes)
|
||||
|
||||
from .idatahandler import IDataHandler, TradeList
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HDF5DataHandler(IDataHandler):
|
||||
|
||||
_columns = DEFAULT_DATAFRAME_COLUMNS
|
||||
|
||||
@classmethod
|
||||
def ohlcv_get_available_data(cls, datadir: Path) -> ListPairsWithTimeframes:
|
||||
"""
|
||||
Returns a list of all pairs with ohlcv data available in this datadir
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
:return: List of Tuples of (pair, timeframe)
|
||||
"""
|
||||
_tmp = [re.search(r'^([a-zA-Z_]+)\-(\d+\S+)(?=.h5)', p.name)
|
||||
for p in datadir.glob("*.h5")]
|
||||
return [(match[1].replace('_', '/'), match[2]) for match in _tmp
|
||||
if match and len(match.groups()) > 1]
|
||||
|
||||
@classmethod
|
||||
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str) -> List[str]:
|
||||
"""
|
||||
Returns a list of all pairs with ohlcv data available in this datadir
|
||||
for the specified timeframe
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
:param timeframe: Timeframe to search pairs for
|
||||
:return: List of Pairs
|
||||
"""
|
||||
|
||||
_tmp = [re.search(r'^(\S+)(?=\-' + timeframe + '.h5)', p.name)
|
||||
for p in datadir.glob(f"*{timeframe}.h5")]
|
||||
# Check if regex found something and only return these results
|
||||
return [match[0].replace('_', '/') for match in _tmp if match]
|
||||
|
||||
def ohlcv_store(self, pair: str, timeframe: str, data: pd.DataFrame) -> None:
|
||||
"""
|
||||
Store data in hdf5 file.
|
||||
:param pair: Pair - used to generate filename
|
||||
:timeframe: Timeframe - used to generate filename
|
||||
:data: Dataframe containing OHLCV data
|
||||
:return: None
|
||||
"""
|
||||
key = self._pair_ohlcv_key(pair, timeframe)
|
||||
_data = data.copy()
|
||||
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe)
|
||||
|
||||
ds = pd.HDFStore(filename, mode='a', complevel=9, complib='blosc')
|
||||
ds.put(key, _data.loc[:, self._columns], format='table', data_columns=['date'])
|
||||
|
||||
ds.close()
|
||||
|
||||
def _ohlcv_load(self, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange] = None) -> pd.DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
Implements the loading and conversion to a Pandas dataframe.
|
||||
Timerange trimming and dataframe validation happens outside of this method.
|
||||
:param pair: Pair to load data
|
||||
:param timeframe: Timeframe (e.g. "5m")
|
||||
:param timerange: Limit data to be loaded to this timerange.
|
||||
Optionally implemented by subclasses to avoid loading
|
||||
all data where possible.
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
key = self._pair_ohlcv_key(pair, timeframe)
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe)
|
||||
|
||||
if not filename.exists():
|
||||
return pd.DataFrame(columns=self._columns)
|
||||
where = []
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
where.append(f"date >= Timestamp({timerange.startts * 1e9})")
|
||||
if timerange.stoptype == 'date':
|
||||
where.append(f"date < Timestamp({timerange.stopts * 1e9})")
|
||||
|
||||
pairdata = pd.read_hdf(filename, key=key, mode="r", where=where)
|
||||
|
||||
if list(pairdata.columns) != self._columns:
|
||||
raise ValueError("Wrong dataframe format")
|
||||
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||
return pairdata
|
||||
|
||||
def ohlcv_purge(self, pair: str, timeframe: str) -> bool:
|
||||
"""
|
||||
Remove data for this pair
|
||||
:param pair: Delete data for this pair.
|
||||
:param timeframe: Timeframe (e.g. "5m")
|
||||
:return: True when deleted, false if file did not exist.
|
||||
"""
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe)
|
||||
if filename.exists():
|
||||
filename.unlink()
|
||||
return True
|
||||
return False
|
||||
|
||||
def ohlcv_append(self, pair: str, timeframe: str, data: pd.DataFrame) -> None:
|
||||
"""
|
||||
Append data to existing data structures
|
||||
:param pair: Pair
|
||||
:param timeframe: Timeframe this ohlcv data is for
|
||||
:param data: Data to append.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def trades_get_pairs(cls, datadir: Path) -> List[str]:
|
||||
"""
|
||||
Returns a list of all pairs for which trade data is available in this
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
:return: List of Pairs
|
||||
"""
|
||||
_tmp = [re.search(r'^(\S+)(?=\-trades.h5)', p.name)
|
||||
for p in datadir.glob("*trades.h5")]
|
||||
# Check if regex found something and only return these results to avoid exceptions.
|
||||
return [match[0].replace('_', '/') for match in _tmp if match]
|
||||
|
||||
def trades_store(self, pair: str, data: TradeList) -> None:
|
||||
"""
|
||||
Store trades data (list of Dicts) to file
|
||||
:param pair: Pair - used for filename
|
||||
:param data: List of Lists containing trade data,
|
||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||
"""
|
||||
key = self._pair_trades_key(pair)
|
||||
|
||||
ds = pd.HDFStore(self._pair_trades_filename(self._datadir, pair),
|
||||
mode='a', complevel=9, complib='blosc')
|
||||
ds.put(key, pd.DataFrame(data, columns=DEFAULT_TRADES_COLUMNS),
|
||||
format='table', data_columns=['timestamp'])
|
||||
ds.close()
|
||||
|
||||
def trades_append(self, pair: str, data: TradeList):
|
||||
"""
|
||||
Append data to existing files
|
||||
:param pair: Pair - used for filename
|
||||
:param data: List of Lists containing trade data,
|
||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
|
||||
"""
|
||||
Load a pair from h5 file.
|
||||
:param pair: Load trades for this pair
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: List of trades
|
||||
"""
|
||||
key = self._pair_trades_key(pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
|
||||
if not filename.exists():
|
||||
return []
|
||||
where = []
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
where.append(f"timestamp >= {timerange.startts * 1e3}")
|
||||
if timerange.stoptype == 'date':
|
||||
where.append(f"timestamp < {timerange.stopts * 1e3}")
|
||||
|
||||
trades = pd.read_hdf(filename, key=key, mode="r", where=where)
|
||||
return trades.values.tolist()
|
||||
|
||||
def trades_purge(self, pair: str) -> bool:
|
||||
"""
|
||||
Remove data for this pair
|
||||
:param pair: Delete data for this pair.
|
||||
:return: True when deleted, false if file did not exist.
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
if filename.exists():
|
||||
filename.unlink()
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _pair_ohlcv_key(cls, pair: str, timeframe: str) -> str:
|
||||
return f"{pair}/ohlcv/tf_{timeframe}"
|
||||
|
||||
@classmethod
|
||||
def _pair_trades_key(cls, pair: str) -> str:
|
||||
return f"{pair}/trades"
|
||||
|
||||
@classmethod
|
||||
def _pair_data_filename(cls, datadir: Path, pair: str, timeframe: str) -> Path:
|
||||
pair_s = misc.pair_to_filename(pair)
|
||||
filename = datadir.joinpath(f'{pair_s}-{timeframe}.h5')
|
||||
return filename
|
||||
|
||||
@classmethod
|
||||
def _pair_trades_filename(cls, datadir: Path, pair: str) -> Path:
|
||||
pair_s = misc.pair_to_filename(pair)
|
||||
filename = datadir.joinpath(f'{pair_s}-trades.h5')
|
||||
return filename
|
@@ -9,7 +9,8 @@ from pandas import DataFrame
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
|
||||
from freqtrade.data.converter import (ohlcv_to_dataframe,
|
||||
from freqtrade.data.converter import (clean_ohlcv_dataframe,
|
||||
ohlcv_to_dataframe,
|
||||
trades_remove_duplicates,
|
||||
trades_to_ohlcv)
|
||||
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler
|
||||
@@ -202,7 +203,10 @@ def _download_pair_history(datadir: Path,
|
||||
if data.empty:
|
||||
data = new_dataframe
|
||||
else:
|
||||
data = data.append(new_dataframe)
|
||||
# Run cleaning again to ensure there were no duplicate candles
|
||||
# Especially between existing and new data.
|
||||
data = clean_ohlcv_dataframe(data.append(new_dataframe), timeframe, pair,
|
||||
fill_missing=False, drop_incomplete=False)
|
||||
|
||||
logger.debug("New Start: %s",
|
||||
f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
||||
|
@@ -50,9 +50,7 @@ class IDataHandler(ABC):
|
||||
@abstractmethod
|
||||
def ohlcv_store(self, pair: str, timeframe: str, data: DataFrame) -> None:
|
||||
"""
|
||||
Store data in json format "values".
|
||||
format looks as follows:
|
||||
[[<date>,<open>,<high>,<low>,<close>]]
|
||||
Store ohlcv data.
|
||||
:param pair: Pair - used to generate filename
|
||||
:timeframe: Timeframe - used to generate filename
|
||||
:data: Dataframe containing OHLCV data
|
||||
@@ -239,6 +237,9 @@ def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
|
||||
elif datatype == 'jsongz':
|
||||
from .jsondatahandler import JsonGzDataHandler
|
||||
return JsonGzDataHandler
|
||||
elif datatype == 'hdf5':
|
||||
from .hdf5datahandler import HDF5DataHandler
|
||||
return HDF5DataHandler
|
||||
else:
|
||||
raise ValueError(f"No datahandler for datatype {datatype} available.")
|
||||
|
||||
|
@@ -20,6 +20,7 @@ BAD_EXCHANGES = {
|
||||
"Details in https://github.com/freqtrade/freqtrade/issues/1983",
|
||||
"hitbtc": "This API cannot be used with Freqtrade. "
|
||||
"Use `hitbtc2` exchange id to access this exchange.",
|
||||
"phemex": "Does not provide history. ",
|
||||
**dict.fromkeys([
|
||||
'adara',
|
||||
'anxpro',
|
||||
|
@@ -86,8 +86,8 @@ class Exchange:
|
||||
|
||||
# Deep merge ft_has with default ft_has options
|
||||
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
||||
if exchange_config.get("_ft_has_params"):
|
||||
self._ft_has = deep_merge_dicts(exchange_config.get("_ft_has_params"),
|
||||
if exchange_config.get('_ft_has_params'):
|
||||
self._ft_has = deep_merge_dicts(exchange_config.get('_ft_has_params'),
|
||||
self._ft_has)
|
||||
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
||||
|
||||
|
@@ -541,7 +541,9 @@ class FreqtradeBot:
|
||||
"""
|
||||
logger.debug(f"create_trade for pair {pair}")
|
||||
|
||||
if self.strategy.is_pair_locked(pair):
|
||||
analyzed_df, _ = self.dataprovider.get_analyzed_dataframe(pair, self.strategy.timeframe)
|
||||
if self.strategy.is_pair_locked(
|
||||
pair, analyzed_df.iloc[-1]['date'] if len(analyzed_df) > 0 else None):
|
||||
logger.info(f"Pair {pair} is currently locked.")
|
||||
return False
|
||||
|
||||
@@ -552,7 +554,6 @@ class FreqtradeBot:
|
||||
return False
|
||||
|
||||
# running get_signal on historical data fetched
|
||||
analyzed_df, _ = self.dataprovider.get_analyzed_dataframe(pair, self.strategy.timeframe)
|
||||
(buy, sell) = self.strategy.get_signal(pair, self.strategy.timeframe, analyzed_df)
|
||||
|
||||
if buy and not sell:
|
||||
@@ -955,7 +956,7 @@ class FreqtradeBot:
|
||||
stop_price = trade.open_rate * (1 + stoploss)
|
||||
|
||||
if self.create_stoploss_order(trade=trade, stop_price=stop_price):
|
||||
trade.stoploss_last_update = datetime.now()
|
||||
trade.stoploss_last_update = datetime.utcnow()
|
||||
return False
|
||||
|
||||
# If stoploss order is canceled for some reason we add it
|
||||
|
@@ -1,14 +1,18 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from logging import Formatter
|
||||
from logging.handlers import RotatingFileHandler, SysLogHandler
|
||||
from typing import Any, Dict, List
|
||||
from logging.handlers import (BufferingHandler, RotatingFileHandler,
|
||||
SysLogHandler)
|
||||
from typing import Any, Dict
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
LOGFORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
|
||||
# Initialize bufferhandler - will be used for /log endpoints
|
||||
bufferHandler = BufferingHandler(1000)
|
||||
bufferHandler.setFormatter(Formatter(LOGFORMAT))
|
||||
|
||||
|
||||
def _set_loggers(verbosity: int = 0, api_verbosity: str = 'info') -> None:
|
||||
@@ -33,17 +37,31 @@ def _set_loggers(verbosity: int = 0, api_verbosity: str = 'info') -> None:
|
||||
)
|
||||
|
||||
|
||||
def setup_logging_pre() -> None:
|
||||
"""
|
||||
Early setup for logging.
|
||||
Uses INFO loglevel and only the Streamhandler.
|
||||
Early messages (before proper logging setup) will therefore only be sent to additional
|
||||
logging handlers after the real initialization, because we don't know which
|
||||
ones the user desires beforehand.
|
||||
"""
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format=LOGFORMAT,
|
||||
handlers=[logging.StreamHandler(sys.stderr), bufferHandler]
|
||||
)
|
||||
|
||||
|
||||
def setup_logging(config: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Process -v/--verbose, --logfile options
|
||||
"""
|
||||
# Log level
|
||||
verbosity = config['verbosity']
|
||||
|
||||
# Log to stderr
|
||||
log_handlers: List[logging.Handler] = [logging.StreamHandler(sys.stderr)]
|
||||
logging.root.addHandler(bufferHandler)
|
||||
|
||||
logfile = config.get('logfile')
|
||||
|
||||
if logfile:
|
||||
s = logfile.split(':')
|
||||
if s[0] == 'syslog':
|
||||
@@ -58,28 +76,27 @@ def setup_logging(config: Dict[str, Any]) -> None:
|
||||
# to perform reduction of repeating messages if this is set in the
|
||||
# syslog config. The messages should be equal for this.
|
||||
handler.setFormatter(Formatter('%(name)s - %(levelname)s - %(message)s'))
|
||||
log_handlers.append(handler)
|
||||
logging.root.addHandler(handler)
|
||||
elif s[0] == 'journald':
|
||||
try:
|
||||
from systemd.journal import JournaldLogHandler
|
||||
except ImportError:
|
||||
raise OperationalException("You need the systemd python package be installed in "
|
||||
"order to use logging to journald.")
|
||||
handler = JournaldLogHandler()
|
||||
handler_jd = JournaldLogHandler()
|
||||
# No datetime field for logging into journald, to allow syslog
|
||||
# to perform reduction of repeating messages if this is set in the
|
||||
# syslog config. The messages should be equal for this.
|
||||
handler.setFormatter(Formatter('%(name)s - %(levelname)s - %(message)s'))
|
||||
log_handlers.append(handler)
|
||||
handler_jd.setFormatter(Formatter('%(name)s - %(levelname)s - %(message)s'))
|
||||
logging.root.addHandler(handler_jd)
|
||||
else:
|
||||
log_handlers.append(RotatingFileHandler(logfile,
|
||||
maxBytes=1024 * 1024, # 1Mb
|
||||
backupCount=10))
|
||||
handler_rf = RotatingFileHandler(logfile,
|
||||
maxBytes=1024 * 1024 * 10, # 10Mb
|
||||
backupCount=10)
|
||||
handler_rf.setFormatter(Formatter(LOGFORMAT))
|
||||
logging.root.addHandler(handler_rf)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO if verbosity < 1 else logging.DEBUG,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
handlers=log_handlers
|
||||
)
|
||||
logging.root.setLevel(logging.INFO if verbosity < 1 else logging.DEBUG)
|
||||
_set_loggers(verbosity, config.get('api_server', {}).get('verbosity', 'info'))
|
||||
|
||||
logger.info('Verbosity set to %s', verbosity)
|
||||
|
@@ -3,18 +3,17 @@
|
||||
Main Freqtrade bot script.
|
||||
Read the documentation to know what cli arguments you need.
|
||||
"""
|
||||
|
||||
from freqtrade.exceptions import FreqtradeException, OperationalException
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, List
|
||||
|
||||
# check min. python version
|
||||
if sys.version_info < (3, 6):
|
||||
sys.exit("Freqtrade requires Python version >= 3.6")
|
||||
|
||||
# flake8: noqa E402
|
||||
import logging
|
||||
from typing import Any, List
|
||||
|
||||
from freqtrade.commands import Arguments
|
||||
from freqtrade.exceptions import FreqtradeException, OperationalException
|
||||
from freqtrade.loggers import setup_logging_pre
|
||||
|
||||
|
||||
logger = logging.getLogger('freqtrade')
|
||||
@@ -28,6 +27,7 @@ def main(sysargv: List[str] = None) -> None:
|
||||
|
||||
return_code: Any = 1
|
||||
try:
|
||||
setup_logging_pre()
|
||||
arguments = Arguments(sysargv)
|
||||
args = arguments.get_parsed_arg()
|
||||
|
||||
|
@@ -96,6 +96,7 @@ class Backtesting:
|
||||
"PrecisionFilter not allowed for backtesting multiple strategies."
|
||||
)
|
||||
|
||||
dataprovider.add_pairlisthandler(self.pairlists)
|
||||
self.pairlists.refresh_pairlist()
|
||||
|
||||
if len(self.pairlists.whitelist) == 0:
|
||||
|
@@ -38,15 +38,15 @@ def init_plotscript(config):
|
||||
"""
|
||||
|
||||
if "pairs" in config:
|
||||
pairs = config["pairs"]
|
||||
pairs = config['pairs']
|
||||
else:
|
||||
pairs = config["exchange"]["pair_whitelist"]
|
||||
pairs = config['exchange']['pair_whitelist']
|
||||
|
||||
# Set timerange to use
|
||||
timerange = TimeRange.parse_timerange(config.get("timerange"))
|
||||
timerange = TimeRange.parse_timerange(config.get('timerange'))
|
||||
|
||||
data = load_data(
|
||||
datadir=config.get("datadir"),
|
||||
datadir=config.get('datadir'),
|
||||
pairs=pairs,
|
||||
timeframe=config.get('timeframe', '5m'),
|
||||
timerange=timerange,
|
||||
@@ -67,7 +67,7 @@ def init_plotscript(config):
|
||||
db_url=config.get('db_url'),
|
||||
exportfilename=filename,
|
||||
no_trades=no_trades,
|
||||
strategy=config.get("strategy"),
|
||||
strategy=config.get('strategy'),
|
||||
)
|
||||
trades = trim_dataframe(trades, timerange, 'open_date')
|
||||
|
||||
@@ -491,13 +491,13 @@ def load_and_plot_trades(config: Dict[str, Any]):
|
||||
pair=pair,
|
||||
data=df_analyzed,
|
||||
trades=trades_pair,
|
||||
indicators1=config.get("indicators1", []),
|
||||
indicators2=config.get("indicators2", []),
|
||||
indicators1=config.get('indicators1', []),
|
||||
indicators2=config.get('indicators2', []),
|
||||
plot_config=strategy.plot_config if hasattr(strategy, 'plot_config') else {}
|
||||
)
|
||||
|
||||
store_plot_file(fig, filename=generate_plot_filename(pair, config['timeframe']),
|
||||
directory=config['user_data_dir'] / "plot")
|
||||
directory=config['user_data_dir'] / 'plot')
|
||||
|
||||
logger.info('End of plotting process. %s plots generated', pair_counter)
|
||||
|
||||
@@ -514,7 +514,7 @@ def plot_profit(config: Dict[str, Any]) -> None:
|
||||
# Filter trades to relevant pairs
|
||||
# Remove open pairs - we don't know the profit yet so can't calculate profit for these.
|
||||
# Also, If only one open pair is left, then the profit-generation would fail.
|
||||
trades = trades[(trades['pair'].isin(plot_elements["pairs"]))
|
||||
trades = trades[(trades['pair'].isin(plot_elements['pairs']))
|
||||
& (~trades['close_date'].isnull())
|
||||
]
|
||||
if len(trades) == 0:
|
||||
@@ -523,7 +523,7 @@ def plot_profit(config: Dict[str, Any]) -> None:
|
||||
|
||||
# Create an average close price of all the pairs that were involved.
|
||||
# this could be useful to gauge the overall market trend
|
||||
fig = generate_profit_graph(plot_elements["pairs"], plot_elements["ohlcv"],
|
||||
fig = generate_profit_graph(plot_elements['pairs'], plot_elements['ohlcv'],
|
||||
trades, config.get('timeframe', '5m'))
|
||||
store_plot_file(fig, filename='freqtrade-profit-plot.html',
|
||||
directory=config['user_data_dir'] / "plot", auto_open=True)
|
||||
directory=config['user_data_dir'] / 'plot', auto_open=True)
|
||||
|
@@ -187,6 +187,7 @@ class ApiServer(RPC):
|
||||
self.app.add_url_rule(f'{BASE_URI}/count', 'count', view_func=self._count, methods=['GET'])
|
||||
self.app.add_url_rule(f'{BASE_URI}/daily', 'daily', view_func=self._daily, methods=['GET'])
|
||||
self.app.add_url_rule(f'{BASE_URI}/edge', 'edge', view_func=self._edge, methods=['GET'])
|
||||
self.app.add_url_rule(f'{BASE_URI}/logs', 'log', view_func=self._get_logs, methods=['GET'])
|
||||
self.app.add_url_rule(f'{BASE_URI}/profit', 'profit',
|
||||
view_func=self._profit, methods=['GET'])
|
||||
self.app.add_url_rule(f'{BASE_URI}/performance', 'performance',
|
||||
@@ -349,6 +350,18 @@ class ApiServer(RPC):
|
||||
|
||||
return self.rest_dump(stats)
|
||||
|
||||
@require_login
|
||||
@rpc_catch_errors
|
||||
def _get_logs(self):
|
||||
"""
|
||||
Returns latest logs
|
||||
get:
|
||||
param:
|
||||
limit: Only get a certain number of records
|
||||
"""
|
||||
limit = int(request.args.get('limit', 0)) or None
|
||||
return self.rest_dump(self._rpc_get_logs(limit))
|
||||
|
||||
@require_login
|
||||
@rpc_catch_errors
|
||||
def _edge(self):
|
||||
|
@@ -11,9 +11,9 @@ from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
import arrow
|
||||
from numpy import NAN, mean
|
||||
|
||||
from freqtrade.exceptions import (ExchangeError,
|
||||
PricingError)
|
||||
from freqtrade.exceptions import ExchangeError, PricingError
|
||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_msecs
|
||||
from freqtrade.loggers import bufferHandler
|
||||
from freqtrade.misc import shorten_date
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
|
||||
@@ -158,6 +158,7 @@ class RPC:
|
||||
current_profit_abs=current_profit_abs,
|
||||
stoploss_current_dist=stoploss_current_dist,
|
||||
stoploss_current_dist_ratio=round(stoploss_current_dist_ratio, 8),
|
||||
stoploss_current_dist_pct=round(stoploss_current_dist_ratio * 100, 2),
|
||||
stoploss_entry_dist=stoploss_entry_dist,
|
||||
stoploss_entry_dist_ratio=round(stoploss_entry_dist_ratio, 8),
|
||||
open_order='({} {} rem={:.8f})'.format(
|
||||
@@ -631,6 +632,24 @@ class RPC:
|
||||
}
|
||||
return res
|
||||
|
||||
def _rpc_get_logs(self, limit: Optional[int]) -> Dict[str, Any]:
|
||||
"""Returns the last X logs"""
|
||||
if limit:
|
||||
buffer = bufferHandler.buffer[-limit:]
|
||||
else:
|
||||
buffer = bufferHandler.buffer
|
||||
records = [[datetime.fromtimestamp(r.created).strftime("%Y-%m-%d %H:%M:%S"),
|
||||
r.created * 1000, r.name, r.levelname,
|
||||
r.message + ('\n' + r.exc_text if r.exc_text else '')]
|
||||
for r in buffer]
|
||||
|
||||
# Log format:
|
||||
# [logtime-formatted, logepoch, logger-name, loglevel, message \n + exception]
|
||||
# e.g. ["2020-08-27 11:35:01", 1598520901097.9397,
|
||||
# "freqtrade.worker", "INFO", "Starting worker develop"]
|
||||
|
||||
return {'log_count': len(records), 'logs': records}
|
||||
|
||||
def _rpc_edge(self) -> List[Dict[str, Any]]:
|
||||
""" Returns information related to Edge """
|
||||
if not self._freqtrade.edge:
|
||||
|
@@ -12,6 +12,7 @@ from tabulate import tabulate
|
||||
from telegram import ParseMode, ReplyKeyboardMarkup, Update
|
||||
from telegram.error import NetworkError, TelegramError
|
||||
from telegram.ext import CallbackContext, CommandHandler, Updater
|
||||
from telegram.utils.helpers import escape_markdown
|
||||
|
||||
from freqtrade.__init__ import __version__
|
||||
from freqtrade.rpc import RPC, RPCException, RPCMessageType
|
||||
@@ -103,6 +104,7 @@ class Telegram(RPC):
|
||||
CommandHandler('stopbuy', self._stopbuy),
|
||||
CommandHandler('whitelist', self._whitelist),
|
||||
CommandHandler('blacklist', self._blacklist),
|
||||
CommandHandler('logs', self._logs),
|
||||
CommandHandler('edge', self._edge),
|
||||
CommandHandler('help', self._help),
|
||||
CommandHandler('version', self._version),
|
||||
@@ -239,17 +241,18 @@ class Telegram(RPC):
|
||||
("*Close Profit:* `{close_profit_pct}`"
|
||||
if r['close_profit_pct'] is not None else ""),
|
||||
"*Current Profit:* `{current_profit_pct:.2f}%`",
|
||||
|
||||
# Adding initial stoploss only if it is different from stoploss
|
||||
"*Initial Stoploss:* `{initial_stop_loss:.8f}` " +
|
||||
("`({initial_stop_loss_pct:.2f}%)`") if (
|
||||
r['stop_loss'] != r['initial_stop_loss']
|
||||
and r['initial_stop_loss_pct'] is not None) else "",
|
||||
|
||||
# Adding stoploss and stoploss percentage only if it is not None
|
||||
"*Stoploss:* `{stop_loss:.8f}` " +
|
||||
("`({stop_loss_pct:.2f}%)`" if r['stop_loss_pct'] else ""),
|
||||
]
|
||||
if (r['stop_loss'] != r['initial_stop_loss']
|
||||
and r['initial_stop_loss_pct'] is not None):
|
||||
# Adding initial stoploss only if it is different from stoploss
|
||||
lines.append("*Initial Stoploss:* `{initial_stop_loss:.8f}` "
|
||||
"`({initial_stop_loss_pct:.2f}%)`")
|
||||
|
||||
# Adding stoploss and stoploss percentage only if it is not None
|
||||
lines.append("*Stoploss:* `{stop_loss:.8f}` " +
|
||||
("`({stop_loss_pct:.2f}%)`" if r['stop_loss_pct'] else ""))
|
||||
lines.append("*Stoploss distance:* `{stoploss_current_dist:.8f}` "
|
||||
"`({stoploss_current_dist_pct:.2f}%)`")
|
||||
if r['open_order']:
|
||||
if r['sell_order_status']:
|
||||
lines.append("*Open Order:* `{open_order}` - `{sell_order_status}`")
|
||||
@@ -637,6 +640,38 @@ class Telegram(RPC):
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
|
||||
@authorized_only
|
||||
def _logs(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
Handler for /logs
|
||||
Shows the latest logs
|
||||
"""
|
||||
try:
|
||||
try:
|
||||
limit = int(context.args[0])
|
||||
except (TypeError, ValueError, IndexError):
|
||||
limit = 10
|
||||
logs = self._rpc_get_logs(limit)['logs']
|
||||
msgs = ''
|
||||
msg_template = "*{}* {}: {} \\- `{}`"
|
||||
for logrec in logs:
|
||||
msg = msg_template.format(escape_markdown(logrec[0], version=2),
|
||||
escape_markdown(logrec[2], version=2),
|
||||
escape_markdown(logrec[3], version=2),
|
||||
escape_markdown(logrec[4], version=2))
|
||||
if len(msgs + msg) + 10 >= MAX_TELEGRAM_MESSAGE_LENGTH:
|
||||
# Send message immediately if it would become too long
|
||||
self._send_msg(msgs, parse_mode=ParseMode.MARKDOWN_V2)
|
||||
msgs = msg + '\n'
|
||||
else:
|
||||
# Append message to messages to send
|
||||
msgs += msg + '\n'
|
||||
|
||||
if msgs:
|
||||
self._send_msg(msgs, parse_mode=ParseMode.MARKDOWN_V2)
|
||||
except RPCException as e:
|
||||
self._send_msg(str(e))
|
||||
|
||||
@authorized_only
|
||||
def _edge(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
@@ -682,6 +717,7 @@ class Telegram(RPC):
|
||||
"*/stopbuy:* `Stops buying, but handles open trades gracefully` \n"
|
||||
"*/reload_config:* `Reload configuration file` \n"
|
||||
"*/show_config:* `Show running configuration` \n"
|
||||
"*/logs [limit]:* `Show latest logs - defaults to 10` \n"
|
||||
"*/whitelist:* `Show current whitelist` \n"
|
||||
"*/blacklist [pair]:* `Show current blacklist, or adds one or more pairs "
|
||||
"to the blacklist.` \n"
|
||||
|
@@ -14,8 +14,9 @@ from pandas import DataFrame
|
||||
|
||||
from freqtrade.constants import ListPairsWithTimeframes
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.exceptions import StrategyError, OperationalException
|
||||
from freqtrade.exceptions import OperationalException, StrategyError
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
from freqtrade.exchange.exchange import timeframe_to_next_date
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
from freqtrade.wallets import Wallets
|
||||
@@ -297,13 +298,25 @@ class IStrategy(ABC):
|
||||
if pair in self._pair_locked_until:
|
||||
del self._pair_locked_until[pair]
|
||||
|
||||
def is_pair_locked(self, pair: str) -> bool:
|
||||
def is_pair_locked(self, pair: str, candle_date: datetime = None) -> bool:
|
||||
"""
|
||||
Checks if a pair is currently locked
|
||||
The 2nd, optional parameter ensures that locks are applied until the new candle arrives,
|
||||
and not stop at 14:00:00 - while the next candle arrives at 14:00:02 leaving a gap
|
||||
of 2 seconds for a buy to happen on an old signal.
|
||||
:param: pair: "Pair to check"
|
||||
:param candle_date: Date of the last candle. Optional, defaults to current date
|
||||
:returns: locking state of the pair in question.
|
||||
"""
|
||||
if pair not in self._pair_locked_until:
|
||||
return False
|
||||
return self._pair_locked_until[pair] >= datetime.now(timezone.utc)
|
||||
if not candle_date:
|
||||
return self._pair_locked_until[pair] >= datetime.now(timezone.utc)
|
||||
else:
|
||||
# Locking should happen until a new candle arrives
|
||||
lock_time = timeframe_to_next_date(self.timeframe, candle_date)
|
||||
# lock_time = candle_date + timedelta(minutes=timeframe_to_minutes(self.timeframe))
|
||||
return self._pair_locked_until[pair] > lock_time
|
||||
|
||||
def analyze_ticker(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
"""
|
||||
@@ -434,7 +447,7 @@ class IStrategy(ABC):
|
||||
if latest_date < (arrow.utcnow().shift(minutes=-(timeframe_minutes * 2 + offset))):
|
||||
logger.warning(
|
||||
'Outdated history for pair %s. Last tick is %s minutes old',
|
||||
pair, (arrow.utcnow() - latest_date).seconds // 60
|
||||
pair, int((arrow.utcnow() - latest_date).total_seconds() // 60)
|
||||
)
|
||||
return False, False
|
||||
|
||||
|
2
freqtrade/vendor/qtpylib/indicators.py
vendored
2
freqtrade/vendor/qtpylib/indicators.py
vendored
@@ -222,7 +222,7 @@ def crossed(series1, series2, direction=None):
|
||||
if isinstance(series1, np.ndarray):
|
||||
series1 = pd.Series(series1)
|
||||
|
||||
if isinstance(series2, (float, int, np.ndarray)):
|
||||
if isinstance(series2, (float, int, np.ndarray, np.integer, np.floating)):
|
||||
series2 = pd.Series(index=series1.index, data=series2)
|
||||
|
||||
if direction is None or direction == "above":
|
||||
|
Reference in New Issue
Block a user