Merge branch 'develop' into feat/new_args_system

This commit is contained in:
Matthias
2019-10-20 19:32:34 +02:00
100 changed files with 2632 additions and 1112 deletions

View File

@@ -14,7 +14,8 @@ ARGS_STRATEGY = ["strategy", "strategy_path"]
ARGS_TRADE = ["db_url", "sd_notify", "dry_run"]
ARGS_COMMON_OPTIMIZE = ["ticker_interval", "timerange", "max_open_trades", "stake_amount"]
ARGS_COMMON_OPTIMIZE = ["ticker_interval", "timerange",
"max_open_trades", "stake_amount", "fee"]
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + ["position_stacking", "use_max_market_positions",
"strategy_list", "export", "exportfilename"]
@@ -28,11 +29,14 @@ ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + ["hyperopt", "hyperopt_path",
ARGS_EDGE = ARGS_COMMON_OPTIMIZE + ["stoploss_range"]
ARGS_LIST_EXCHANGES = ["print_one_column"]
ARGS_LIST_EXCHANGES = ["print_one_column", "list_exchanges_all"]
ARGS_LIST_TIMEFRAMES = ["exchange", "print_one_column"]
ARGS_CREATE_USERDIR = ["user_data_dir"]
ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "exchange", "timeframes", "erase"]
ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "download_trades", "exchange",
"timeframes", "erase"]
ARGS_PLOT_DATAFRAME = ["pairs", "indicators1", "indicators2", "plot_limit",
"db_url", "trade_source", "export", "exportfilename",
@@ -41,7 +45,9 @@ ARGS_PLOT_DATAFRAME = ["pairs", "indicators1", "indicators2", "plot_limit",
ARGS_PLOT_PROFIT = ["pairs", "timerange", "export", "exportfilename", "db_url",
"trade_source", "ticker_interval"]
NO_CONF_REQURIED = ["create-userdir", "download-data", "plot-dataframe", "plot-profit"]
NO_CONF_REQURIED = ["download-data", "list-timeframes", "plot-dataframe", "plot-profit"]
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges"]
class Arguments:
@@ -69,8 +75,6 @@ class Arguments:
"""
parsed_arg = self.parser.parse_args(self.args)
# When no config is provided, but a config exists, use that configuration!
# Workaround issue in argparse with action='append' and default value
# (see https://bugs.python.org/issue16399)
# Allow no-config for certain commands (like downloading / plotting)
@@ -107,7 +111,7 @@ class Arguments:
from freqtrade.optimize import start_backtesting, start_hyperopt, start_edge
from freqtrade.utils import (start_create_userdir, start_download_data,
start_list_exchanges, start_trading)
start_list_exchanges, start_list_timeframes, start_trading)
from freqtrade.plot.plot_utils import start_plot_dataframe, start_plot_profit
subparsers = self.parser.add_subparsers(dest='command',
@@ -156,6 +160,14 @@ class Arguments:
list_exchanges_cmd.set_defaults(func=start_list_exchanges)
self._build_args(optionlist=ARGS_LIST_EXCHANGES, parser=list_exchanges_cmd)
# Add list-timeframes subcommand
list_timeframes_cmd = subparsers.add_parser(
'list-timeframes',
help='Print available ticker intervals (timeframes) for the exchange.'
)
list_timeframes_cmd.set_defaults(func=start_list_timeframes)
self._build_args(optionlist=ARGS_LIST_TIMEFRAMES, parser=list_timeframes_cmd)
# Add download-data subcommand
download_data_cmd = subparsers.add_parser('download-data',
help='Download backtesting data.',

View File

@@ -3,7 +3,7 @@ from typing import Any, Dict
from freqtrade import OperationalException
from freqtrade.exchange import (available_exchanges, get_exchange_bad_reason,
is_exchange_available, is_exchange_bad,
is_exchange_known_ccxt, is_exchange_bad,
is_exchange_officially_supported)
from freqtrade.state import RunMode
@@ -31,15 +31,15 @@ def check_exchange(config: Dict[str, Any], check_for_bad: bool = True) -> bool:
raise OperationalException(
f'This command requires a configured exchange. You should either use '
f'`--exchange <exchange_name>` or specify a configuration file via `--config`.\n'
f'The following exchanges are supported by ccxt: '
f'The following exchanges are available for Freqtrade: '
f'{", ".join(available_exchanges())}'
)
if not is_exchange_available(exchange):
if not is_exchange_known_ccxt(exchange):
raise OperationalException(
f'Exchange "{exchange}" is not supported by ccxt '
f'Exchange "{exchange}" is not known to the ccxt library '
f'and therefore not available for the bot.\n'
f'The following exchanges are supported by ccxt: '
f'The following exchanges are available for Freqtrade: '
f'{", ".join(available_exchanges())}'
)
@@ -51,8 +51,8 @@ def check_exchange(config: Dict[str, Any], check_for_bad: bool = True) -> bool:
logger.info(f'Exchange "{exchange}" is officially supported '
f'by the Freqtrade development team.')
else:
logger.warning(f'Exchange "{exchange}" is supported by ccxt '
f'and therefore available for the bot but not officially supported '
logger.warning(f'Exchange "{exchange}" is known to the the ccxt library, '
f'available for the bot, but not officially supported '
f'by the Freqtrade development team. '
f'It may work flawlessly (please report back) or have serious issues. '
f'Use it at your own discretion.')

View File

@@ -2,7 +2,6 @@
Definition of cli arguments used in arguments.py
"""
import argparse
import os
from freqtrade import __version__, constants
@@ -145,8 +144,12 @@ AVAILABLE_CLI_OPTIONS = {
'Requires `--export` to be set as well. '
'Example: `--export-filename=user_data/backtest_results/backtest_today.json`',
metavar='PATH',
default=os.path.join('user_data', 'backtest_results',
'backtest-result.json'),
),
"fee": Arg(
'--fee',
help='Specify fee ratio. Will be applied twice (on trade entry and exit).',
type=float,
metavar='FLOAT',
),
# Edge
"stoploss_range": Arg(
@@ -244,7 +247,12 @@ AVAILABLE_CLI_OPTIONS = {
# List exchanges
"print_one_column": Arg(
'-1', '--one-column',
help='Print exchanges in one column.',
help='Print output in one column.',
action='store_true',
),
"list_exchanges_all": Arg(
'-a', '--all',
help='Print all exchanges known to the ccxt library.',
action='store_true',
),
# Script options
@@ -265,6 +273,12 @@ AVAILABLE_CLI_OPTIONS = {
type=check_int_positive,
metavar='INT',
),
"download_trades": Arg(
'--dl-trades',
help='Download trades instead of OHLCV data. The bot will resample trades to the '
'desired timeframe as specified as --timeframes/-t.',
action='store_true',
),
"exchange": Arg(
'--exchange',
help=f'Exchange name (default: `{constants.DEFAULT_EXCHANGE}`). '

View File

@@ -9,8 +9,9 @@ from typing import Any, Callable, Dict, List, Optional
from freqtrade import OperationalException, constants
from freqtrade.configuration.check_exchange import check_exchange
from freqtrade.configuration.config_validation import (
validate_config_consistency, validate_config_schema)
from freqtrade.configuration.config_validation import (validate_config_consistency,
validate_config_schema)
from freqtrade.configuration.deprecated_settings import process_temporary_deprecated_settings
from freqtrade.configuration.directory_operations import (create_datadir,
create_userdata_dir)
from freqtrade.configuration.load_config import load_config_file
@@ -75,6 +76,10 @@ class Configuration:
# Normalize config
if 'internals' not in config:
config['internals'] = {}
# TODO: This can be deleted along with removal of deprecated
# experimental settings
if 'ask_strategy' not in config:
config['ask_strategy'] = {}
# validate configuration before returning
logger.info('Validating configuration ...')
@@ -106,6 +111,8 @@ class Configuration:
self._resolve_pairs_list(config)
process_temporary_deprecated_settings(config)
validate_config_consistency(config)
return config
@@ -189,6 +196,13 @@ class Configuration:
config.update({'datadir': create_datadir(config, self.args.get("datadir", None))})
logger.info('Using data directory: %s ...', config.get('datadir'))
if self.args.get('exportfilename'):
self._args_to_config(config, argname='exportfilename',
logstring='Storing backtest results to {} ...')
else:
config['exportfilename'] = (config['user_data_dir']
/ 'backtest_results/backtest-result.json')
def _process_optimize_options(self, config: Dict[str, Any]) -> None:
# This will override the strategy configuration
@@ -214,6 +228,10 @@ class Configuration:
logstring='Parameter --stake_amount detected, '
'overriding stake_amount to: {} ...')
self._args_to_config(config, argname='fee',
logstring='Parameter --fee detected, '
'setting fee to: {} ...')
self._args_to_config(config, argname='timerange',
logstring='Parameter --timerange detected: {} ...')
@@ -228,9 +246,6 @@ class Configuration:
self._args_to_config(config, argname='export',
logstring='Parameter --export detected: {} ...')
self._args_to_config(config, argname='exportfilename',
logstring='Storing backtest results to {} ...')
# Edge section:
if 'stoploss_range' in self.args and self.args["stoploss_range"]:
txt_range = eval(self.args["stoploss_range"])
@@ -305,6 +320,8 @@ class Configuration:
self._args_to_config(config, argname='days',
logstring='Detected --days: {}')
self._args_to_config(config, argname='download_trades',
logstring='Detected --dl-trades: {}')
def _process_runmode(self, config: Dict[str, Any]) -> None:
@@ -327,7 +344,8 @@ class Configuration:
sample: logfun=len (prints the length of the found
configuration instead of the content)
"""
if argname in self.args and self.args[argname]:
if (argname in self.args and self.args[argname] is not None
and self.args[argname] is not False):
config.update({argname: self.args[argname]})
if logfun:

View File

@@ -0,0 +1,59 @@
"""
Functions to handle deprecated settings
"""
import logging
from typing import Any, Dict
from freqtrade import OperationalException
logger = logging.getLogger(__name__)
def check_conflicting_settings(config: Dict[str, Any],
section1: str, name1: str,
section2: str, name2: str):
section1_config = config.get(section1, {})
section2_config = config.get(section2, {})
if name1 in section1_config and name2 in section2_config:
raise OperationalException(
f"Conflicting settings `{section1}.{name1}` and `{section2}.{name2}` "
"(DEPRECATED) detected in the configuration file. "
"This deprecated setting will be removed in the next versions of Freqtrade. "
f"Please delete it from your configuration and use the `{section1}.{name1}` "
"setting instead."
)
def process_deprecated_setting(config: Dict[str, Any],
section1: str, name1: str,
section2: str, name2: str):
section2_config = config.get(section2, {})
if name2 in section2_config:
logger.warning(
"DEPRECATED: "
f"The `{section2}.{name2}` setting is deprecated and "
"will be removed in the next versions of Freqtrade. "
f"Please use the `{section1}.{name1}` setting in your configuration instead."
)
section1_config = config.get(section1, {})
section1_config[name1] = section2_config[name2]
def process_temporary_deprecated_settings(config: Dict[str, Any]) -> None:
check_conflicting_settings(config, 'ask_strategy', 'use_sell_signal',
'experimental', 'use_sell_signal')
check_conflicting_settings(config, 'ask_strategy', 'sell_profit_only',
'experimental', 'sell_profit_only')
check_conflicting_settings(config, 'ask_strategy', 'ignore_roi_if_buy_signal',
'experimental', 'ignore_roi_if_buy_signal')
process_deprecated_setting(config, 'ask_strategy', 'use_sell_signal',
'experimental', 'use_sell_signal')
process_deprecated_setting(config, 'ask_strategy', 'sell_profit_only',
'experimental', 'sell_profit_only')
process_deprecated_setting(config, 'ask_strategy', 'ignore_roi_if_buy_signal',
'experimental', 'ignore_roi_if_buy_signal')

View File

@@ -42,9 +42,10 @@ class TimeRange:
(r'^-(\d{10})$', (None, 'date')),
(r'^(\d{10})-$', ('date', None)),
(r'^(\d{10})-(\d{10})$', ('date', 'date')),
(r'^(-\d+)$', (None, 'line')),
(r'^(\d+)-$', ('line', None)),
(r'^(\d+)-(\d+)$', ('index', 'index'))]
(r'^-(\d{13})$', (None, 'date')),
(r'^(\d{13})-$', ('date', None)),
(r'^(\d{13})-(\d{13})$', ('date', 'date')),
]
for rex, stype in syntax:
# Apply the regular expression to text
match = re.match(rex, text)
@@ -57,6 +58,8 @@ class TimeRange:
starts = rvals[index]
if stype[0] == 'date' and len(starts) == 8:
start = arrow.get(starts, 'YYYYMMDD').timestamp
elif len(starts) == 13:
start = int(starts) // 1000
else:
start = int(starts)
index += 1
@@ -64,6 +67,8 @@ class TimeRange:
stops = rvals[index]
if stype[1] == 'date' and len(stops) == 8:
stop = arrow.get(stops, 'YYYYMMDD').timestamp
elif len(stops) == 13:
stop = int(stops) // 1000
else:
stop = int(stops)
return TimeRange(stype[0], stype[1], start, stop)

View File

@@ -112,7 +112,10 @@ CONF_SCHEMA = {
'properties': {
'use_order_book': {'type': 'boolean'},
'order_book_min': {'type': 'number', 'minimum': 1},
'order_book_max': {'type': 'number', 'minimum': 1, 'maximum': 50}
'order_book_max': {'type': 'number', 'minimum': 1, 'maximum': 50},
'use_sell_signal': {'type': 'boolean'},
'sell_profit_only': {'type': 'boolean'},
'ignore_roi_if_buy_signal': {'type': 'boolean'}
}
},
'order_types': {
@@ -142,7 +145,8 @@ CONF_SCHEMA = {
'properties': {
'use_sell_signal': {'type': 'boolean'},
'sell_profit_only': {'type': 'boolean'},
'ignore_roi_if_buy_signal_true': {'type': 'boolean'}
'ignore_roi_if_buy_signal': {'type': 'boolean'},
'block_bad_exchanges': {'type': 'boolean'}
}
},
'pairlist': {
@@ -260,6 +264,6 @@ CONF_SCHEMA = {
'stake_amount',
'dry_run',
'bid_strategy',
'telegram'
'unfilledtimeout',
]
}

View File

@@ -93,7 +93,7 @@ def load_trades_from_db(db_url: str) -> pd.DataFrame:
t.close_date.replace(tzinfo=pytz.UTC) if t.close_date else None,
t.calc_profit(), t.calc_profit_percent(),
t.open_rate, t.close_rate, t.amount,
(t.close_date.timestamp() - t.open_date.timestamp()
(round((t.close_date.timestamp() - t.open_date.timestamp()) / 60, 2)
if t.close_date else None),
t.sell_reason,
t.fee_open, t.fee_close,

View File

@@ -114,3 +114,25 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
keys=['b_sum', 'b_size', 'bids', 'asks', 'a_size', 'a_sum'])
# logger.info('order book %s', frame )
return frame
def trades_to_ohlcv(trades: list, timeframe: str) -> list:
"""
Converts trades list to ohlcv list
:param trades: List of trades, as returned by ccxt.fetch_trades.
:param timeframe: Ticker timeframe to resample data to
:return: ohlcv timeframe as list (as returned by ccxt.fetch_ohlcv)
"""
from freqtrade.exchange import timeframe_to_minutes
ticker_minutes = timeframe_to_minutes(timeframe)
df = pd.DataFrame(trades)
df['datetime'] = pd.to_datetime(df['datetime'])
df = df.set_index('datetime')
df_new = df['price'].resample(f'{ticker_minutes}min').ohlc()
df_new['volume'] = df['amount'].resample(f'{ticker_minutes}min').sum()
df_new['date'] = df_new.index.astype("int64") // 10 ** 6
# Drop 0 volume rows
df_new = df_new.dropna()
columns = ["date", "open", "high", "low", "close", "volume"]
return list(zip(*[df_new[x].values.tolist() for x in columns]))

View File

@@ -6,7 +6,7 @@ Common Interface for bot and strategy to access data.
"""
import logging
from pathlib import Path
from typing import List, Tuple
from typing import Any, Dict, List, Optional, Tuple
from pandas import DataFrame
@@ -85,6 +85,14 @@ class DataProvider:
logger.warning(f"No data found for ({pair}, {ticker_interval}).")
return data
def market(self, pair: str) -> Optional[Dict[str, Any]]:
"""
Return market data for the pair
:param pair: Pair to get the data for
:return: Market data dict from ccxt or None if market info is not available for the pair
"""
return self._exchange.markets.get(pair)
def ticker(self, pair: str):
"""
Return last ticker data
@@ -92,9 +100,9 @@ class DataProvider:
# TODO: Implement me
pass
def orderbook(self, pair: str, maximum: int):
def orderbook(self, pair: str, maximum: int) -> Dict[str, List]:
"""
return latest orderbook data
fetch latest orderbook data
:param pair: pair to get the data for
:param maximum: Maximum number of orderbook entries to query
:return: dict including bids/asks with a total of `maximum` entries.

View File

@@ -17,7 +17,7 @@ from pandas import DataFrame
from freqtrade import OperationalException, misc
from freqtrade.configuration import TimeRange
from freqtrade.data.converter import parse_ticker_dataframe
from freqtrade.data.converter import parse_ticker_dataframe, trades_to_ohlcv
from freqtrade.exchange import Exchange, timeframe_to_minutes
logger = logging.getLogger(__name__)
@@ -33,20 +33,12 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
start_index = 0
stop_index = len(tickerlist)
if timerange.starttype == 'line':
stop_index = timerange.startts
if timerange.starttype == 'index':
start_index = timerange.startts
elif timerange.starttype == 'date':
if timerange.starttype == 'date':
while (start_index < len(tickerlist) and
tickerlist[start_index][0] < timerange.startts * 1000):
start_index += 1
if timerange.stoptype == 'line':
start_index = max(len(tickerlist) + timerange.stopts, 0)
if timerange.stoptype == 'index':
stop_index = timerange.stopts
elif timerange.stoptype == 'date':
if timerange.stoptype == 'date':
while (stop_index > 0 and
tickerlist[stop_index-1][0] > timerange.stopts * 1000):
stop_index -= 1
@@ -82,10 +74,42 @@ def store_tickerdata_file(datadir: Path, pair: str,
misc.file_dump_json(filename, data, is_zip=is_zip)
def load_trades_file(datadir: Path, pair: str,
timerange: Optional[TimeRange] = None) -> List[Dict]:
"""
Load a pair from file, either .json.gz or .json
:return: tradelist or empty list if unsuccesful
"""
filename = pair_trades_filename(datadir, pair)
tradesdata = misc.file_load_json(filename)
if not tradesdata:
return []
return tradesdata
def store_trades_file(datadir: Path, pair: str,
data: list, is_zip: bool = True):
"""
Stores tickerdata to file
"""
filename = pair_trades_filename(datadir, pair)
misc.file_dump_json(filename, data, is_zip=is_zip)
def _validate_pairdata(pair, pairdata, timerange: TimeRange):
if timerange.starttype == 'date' and pairdata[0][0] > timerange.startts * 1000:
logger.warning('Missing data at start for pair %s, data starts at %s',
pair, arrow.get(pairdata[0][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
if timerange.stoptype == 'date' and pairdata[-1][0] < timerange.stopts * 1000:
logger.warning('Missing data at end for pair %s, data ends at %s',
pair, arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
def load_pair_history(pair: str,
ticker_interval: str,
datadir: Path,
timerange: TimeRange = TimeRange(None, None, 0, 0),
timerange: Optional[TimeRange] = None,
refresh_pairs: bool = False,
exchange: Optional[Exchange] = None,
fill_up_missing: bool = True,
@@ -116,13 +140,8 @@ def load_pair_history(pair: str,
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)
if pairdata:
if timerange.starttype == 'date' and pairdata[0][0] > timerange.startts * 1000:
logger.warning('Missing data at start for pair %s, data starts at %s',
pair, arrow.get(pairdata[0][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
if timerange.stoptype == 'date' and pairdata[-1][0] < timerange.stopts * 1000:
logger.warning('Missing data at end for pair %s, data ends at %s',
pair,
arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
if timerange:
_validate_pairdata(pair, pairdata, timerange)
return parse_ticker_dataframe(pairdata, ticker_interval, pair=pair,
fill_missing=fill_up_missing,
drop_incomplete=drop_incomplete)
@@ -139,7 +158,7 @@ def load_data(datadir: Path,
pairs: List[str],
refresh_pairs: bool = False,
exchange: Optional[Exchange] = None,
timerange: TimeRange = TimeRange(None, None, 0, 0),
timerange: Optional[TimeRange] = None,
fill_up_missing: bool = True,
) -> Dict[str, DataFrame]:
"""
@@ -169,13 +188,20 @@ def pair_data_filename(datadir: Path, pair: str, ticker_interval: str) -> Path:
return filename
def load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: str,
timerange: Optional[TimeRange]) -> Tuple[List[Any],
Optional[int]]:
def pair_trades_filename(datadir: Path, pair: str) -> Path:
pair_s = pair.replace("/", "_")
filename = datadir.joinpath(f'{pair_s}-trades.json.gz')
return filename
def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: str,
timerange: Optional[TimeRange]) -> Tuple[List[Any],
Optional[int]]:
"""
Load cached data to download more data.
If timerange is passed in, checks wether data from an before the stored data will be downloaded.
If that's the case than what's available should be completely overwritten.
If timerange is passed in, checks whether data from an before the stored data will be
downloaded.
If that's the case then what's available should be completely overwritten.
Only used by download_pair_history().
"""
@@ -238,7 +264,7 @@ def download_pair_history(datadir: Path,
f'and store in {datadir}.'
)
data, since_ms = load_cached_data_for_updating(datadir, pair, ticker_interval, timerange)
data, since_ms = _load_cached_data_for_updating(datadir, pair, ticker_interval, timerange)
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
@@ -266,7 +292,7 @@ def download_pair_history(datadir: Path,
def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes: List[str],
dl_path: Path, timerange: TimeRange,
dl_path: Path, timerange: Optional[TimeRange] = None,
erase=False) -> List[str]:
"""
Refresh stored ohlcv data for backtesting and hyperopt operations.
@@ -294,6 +320,92 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
return pairs_not_available
def download_trades_history(datadir: Path,
exchange: Exchange,
pair: str,
timerange: Optional[TimeRange] = None) -> bool:
"""
Download trade history from the exchange.
Appends to previously downloaded trades data.
"""
try:
since = timerange.startts * 1000 if timerange and timerange.starttype == 'date' else None
trades = load_trades_file(datadir, pair)
from_id = trades[-1]['id'] if trades else None
logger.debug("Current Start: %s", trades[0]['datetime'] if trades else 'None')
logger.debug("Current End: %s", trades[-1]['datetime'] if trades else 'None')
new_trades = exchange.get_historic_trades(pair=pair,
since=since if since else
int(arrow.utcnow().shift(
days=-30).float_timestamp) * 1000,
# until=xxx,
from_id=from_id,
)
trades.extend(new_trades[1])
store_trades_file(datadir, pair, trades)
logger.debug("New Start: %s", trades[0]['datetime'])
logger.debug("New End: %s", trades[-1]['datetime'])
logger.info(f"New Amount of trades: {len(trades)}")
return True
except Exception as e:
logger.error(
f'Failed to download historic trades for pair: "{pair}". '
f'Error: {e}'
)
return False
def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path,
timerange: TimeRange, erase=False) -> List[str]:
"""
Refresh stored trades data.
Used by freqtrade download-data
:return: Pairs not available
"""
pairs_not_available = []
for pair in pairs:
if pair not in exchange.markets:
pairs_not_available.append(pair)
logger.info(f"Skipping pair {pair}...")
continue
dl_file = pair_trades_filename(datadir, pair)
if erase and dl_file.exists():
logger.info(
f'Deleting existing data for pair {pair}.')
dl_file.unlink()
logger.info(f'Downloading trades for pair {pair}.')
download_trades_history(datadir=datadir, exchange=exchange,
pair=pair,
timerange=timerange)
return pairs_not_available
def convert_trades_to_ohlcv(pairs: List[str], timeframes: List[str],
datadir: Path, timerange: TimeRange, erase=False) -> None:
"""
Convert stored trades data to ohlcv data
"""
for pair in pairs:
trades = load_trades_file(datadir, pair)
for timeframe in timeframes:
ohlcv_file = pair_data_filename(datadir, pair, timeframe)
if erase and ohlcv_file.exists():
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
ohlcv_file.unlink()
ohlcv = trades_to_ohlcv(trades, timeframe)
# Store ohlcv
store_tickerdata_file(datadir, pair, timeframe, data=ohlcv)
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
"""
Get the maximum timeframe for the given backtest data

View File

@@ -77,8 +77,10 @@ class Edge:
self._timerange: TimeRange = TimeRange.parse_timerange("%s-" % arrow.now().shift(
days=-1 * self._since_number_of_days).format('YYYYMMDD'))
self.fee = self.exchange.get_fee()
if config.get('fee'):
self.fee = config['fee']
else:
self.fee = self.exchange.get_fee()
def calculate(self) -> bool:
pairs = self.config['exchange']['pair_whitelist']

View File

@@ -1,8 +1,9 @@
from freqtrade.exchange.exchange import Exchange # noqa: F401
from freqtrade.exchange.exchange import Exchange, MAP_EXCHANGE_CHILDCLASS # noqa: F401
from freqtrade.exchange.exchange import (get_exchange_bad_reason, # noqa: F401
is_exchange_bad,
is_exchange_available,
is_exchange_known_ccxt,
is_exchange_officially_supported,
ccxt_exchanges,
available_exchanges)
from freqtrade.exchange.exchange import (timeframe_to_seconds, # noqa: F401
timeframe_to_minutes,

View File

@@ -16,6 +16,8 @@ class Binance(Exchange):
_ft_has: Dict = {
"stoploss_on_exchange": True,
"order_time_in_force": ['gtc', 'fok', 'ioc'],
"trades_pagination": "id",
"trades_pagination_arg": "fromId",
}
def get_order_book(self, pair: str, limit: int = 100) -> dict:

View File

@@ -27,11 +27,86 @@ logger = logging.getLogger(__name__)
API_RETRY_COUNT = 4
BAD_EXCHANGES = {
"bitmex": "Various reasons",
"bitmex": "Various reasons.",
"bitstamp": "Does not provide history. "
"Details in https://github.com/freqtrade/freqtrade/issues/1983",
"hitbtc": "This API cannot be used with Freqtrade. "
"Use `hitbtc2` exchange id to access this exchange.",
**dict.fromkeys([
'adara',
'anxpro',
'bigone',
'coinbase',
'coinexchange',
'coinmarketcap',
'lykke',
'xbtce',
], "Does not provide timeframes. ccxt fetchOHLCV: False"),
**dict.fromkeys([
'bcex',
'bit2c',
'bitbay',
'bitflyer',
'bitforex',
'bithumb',
'bitso',
'bitstamp1',
'bl3p',
'braziliex',
'btcbox',
'btcchina',
'btctradeim',
'btctradeua',
'bxinth',
'chilebit',
'coincheck',
'coinegg',
'coinfalcon',
'coinfloor',
'coingi',
'coinmate',
'coinone',
'coinspot',
'coolcoin',
'crypton',
'deribit',
'exmo',
'exx',
'flowbtc',
'foxbit',
'fybse',
# 'hitbtc',
'ice3x',
'independentreserve',
'indodax',
'itbit',
'lakebtc',
'latoken',
'liquid',
'livecoin',
'luno',
'mixcoins',
'negociecoins',
'nova',
'paymium',
'southxchange',
'stronghold',
'surbitcoin',
'therock',
'tidex',
'vaultoro',
'vbtc',
'virwox',
'yobit',
'zaif',
], "Does not provide timeframes. ccxt fetchOHLCV: emulated"),
}
MAP_EXCHANGE_CHILDCLASS = {
'binanceus': 'binance',
'binanceje': 'binance',
}
def retrier_async(f):
async def wrapper(*args, **kwargs):
@@ -72,6 +147,8 @@ def retrier(f):
class Exchange:
_config: Dict = {}
# Parameters to add directly to buy/sell calls (like agreeing to trading agreement)
_params: Dict = {}
# Dict to specify which options each exchange implements
@@ -82,6 +159,9 @@ class Exchange:
"order_time_in_force": ["gtc"],
"ohlcv_candle_limit": 500,
"ohlcv_partial_candle": True,
"trades_pagination": "time", # Possible are "time" or "id"
"trades_pagination_arg": "since",
}
_ft_has: Dict = {}
@@ -125,6 +205,9 @@ class Exchange:
self._ohlcv_candle_limit = self._ft_has['ohlcv_candle_limit']
self._ohlcv_partial_candle = self._ft_has['ohlcv_partial_candle']
self._trades_pagination = self._ft_has['trades_pagination']
self._trades_pagination_arg = self._ft_has['trades_pagination_arg']
# Initialize ccxt objects
self._api = self._init_ccxt(
exchange_config, ccxt_kwargs=exchange_config.get('ccxt_config'))
@@ -133,6 +216,9 @@ class Exchange:
logger.info('Using Exchange "%s"', self.name)
# Check if timeframe is available
self.validate_timeframes(config.get('ticker_interval'))
# Converts the interval provided in minutes in config to seconds
self.markets_refresh_interval: int = exchange_config.get(
"markets_refresh_interval", 60) * 60
@@ -144,10 +230,6 @@ class Exchange:
self.validate_ordertypes(config.get('order_types', {}))
self.validate_order_time_in_force(config.get('order_time_in_force', {}))
if config.get('ticker_interval'):
# Check if timeframe is available
self.validate_timeframes(config['ticker_interval'])
def __del__(self):
"""
Destructor - clean up async stuff
@@ -165,7 +247,7 @@ class Exchange:
# Find matching class for the given exchange name
name = exchange_config['name']
if not is_exchange_available(name, ccxt_module):
if not is_exchange_known_ccxt(name, ccxt_module):
raise OperationalException(f'Exchange {name} is not supported by ccxt')
ex_config = {
@@ -199,6 +281,10 @@ class Exchange:
"""exchange ccxt id"""
return self._api.id
@property
def timeframes(self) -> List[str]:
return list((self._api.timeframes or {}).keys())
@property
def markets(self) -> Dict:
"""exchange ccxt markets"""
@@ -291,7 +377,7 @@ class Exchange:
return pair
raise DependencyException(f"Could not combine {curr_1} and {curr_2} to get a valid pair.")
def validate_timeframes(self, timeframe: List[str]) -> None:
def validate_timeframes(self, timeframe: Optional[str]) -> None:
"""
Checks if ticker interval from config is a supported timeframe on the exchange
"""
@@ -304,10 +390,9 @@ class Exchange:
f"for the exchange \"{self.name}\" and this exchange "
f"is therefore not supported. ccxt fetchOHLCV: {self.exchange_has('fetchOHLCV')}")
timeframes = self._api.timeframes
if timeframe not in timeframes:
if timeframe and (timeframe not in self.timeframes):
raise OperationalException(
f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
f"Invalid ticker interval '{timeframe}'. This exchange supports: {self.timeframes}")
def validate_ordertypes(self, order_types: Dict) -> None:
"""
@@ -665,6 +750,154 @@ class Exchange:
except ccxt.BaseError as e:
raise OperationalException(f'Could not fetch ticker data. Msg: {e}') from e
@retrier_async
async def _async_fetch_trades(self, pair: str,
since: Optional[int] = None,
params: Optional[dict] = None) -> List[Dict]:
"""
Asyncronously gets trade history using fetch_trades.
Handles exchange errors, does one call to the exchange.
:param pair: Pair to fetch trade data for
:param since: Since as integer timestamp in milliseconds
returns: List of dicts containing trades
"""
try:
# fetch trades asynchronously
if params:
logger.debug("Fetching trades for pair %s, params: %s ", pair, params)
trades = await self._api_async.fetch_trades(pair, params=params, limit=1000)
else:
logger.debug(
"Fetching trades for pair %s, since %s %s...",
pair, since,
'(' + arrow.get(since // 1000).isoformat() + ') ' if since is not None else ''
)
trades = await self._api_async.fetch_trades(pair, since=since, limit=1000)
return trades
except ccxt.NotSupported as e:
raise OperationalException(
f'Exchange {self._api.name} does not support fetching historical trade data.'
f'Message: {e}') from e
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
raise TemporaryError(f'Could not load trade history due to {e.__class__.__name__}. '
f'Message: {e}') from e
except ccxt.BaseError as e:
raise OperationalException(f'Could not fetch trade data. Msg: {e}') from e
async def _async_get_trade_history_id(self, pair: str,
until: int,
since: Optional[int] = None,
from_id: Optional[str] = None) -> Tuple[str, List[Dict]]:
"""
Asyncronously gets trade history using fetch_trades
use this when exchange uses id-based iteration (check `self._trades_pagination`)
:param pair: Pair to fetch trade data for
:param since: Since as integer timestamp in milliseconds
:param until: Until as integer timestamp in milliseconds
:param from_id: Download data starting with ID (if id is known). Ignores "since" if set.
returns tuple: (pair, trades-list)
"""
trades: List[Dict] = []
if not from_id:
# Fetch first elements using timebased method to get an ID to paginate on
# Depending on the Exchange, this can introduce a drift at the start of the interval
# of up to an hour.
# e.g. Binance returns the "last 1000" candles within a 1h time interval
# - so we will miss the first trades.
t = await self._async_fetch_trades(pair, since=since)
from_id = t[-1]['id']
trades.extend(t[:-1])
while True:
t = await self._async_fetch_trades(pair,
params={self._trades_pagination_arg: from_id})
if len(t):
# Skip last id since its the key for the next call
trades.extend(t[:-1])
if from_id == t[-1]['id'] or t[-1]['timestamp'] > until:
logger.debug(f"Stopping because from_id did not change. "
f"Reached {t[-1]['timestamp']} > {until}")
# Reached the end of the defined-download period - add last trade as well.
trades.extend(t[-1:])
break
from_id = t[-1]['id']
else:
break
return (pair, trades)
async def _async_get_trade_history_time(self, pair: str, until: int,
since: Optional[int] = None) -> Tuple[str, List]:
"""
Asyncronously gets trade history using fetch_trades,
when the exchange uses time-based iteration (check `self._trades_pagination`)
:param pair: Pair to fetch trade data for
:param since: Since as integer timestamp in milliseconds
:param until: Until as integer timestamp in milliseconds
returns tuple: (pair, trades-list)
"""
trades: List[Dict] = []
while True:
t = await self._async_fetch_trades(pair, since=since)
if len(t):
since = t[-1]['timestamp']
trades.extend(t)
# Reached the end of the defined-download period
if until and t[-1]['timestamp'] > until:
logger.debug(
f"Stopping because until was reached. {t[-1]['timestamp']} > {until}")
break
else:
break
return (pair, trades)
async def _async_get_trade_history(self, pair: str,
since: Optional[int] = None,
until: Optional[int] = None,
from_id: Optional[str] = None) -> Tuple[str, List[Dict]]:
"""
Async wrapper handling downloading trades using either time or id based methods.
"""
if self._trades_pagination == 'time':
return await self._async_get_trade_history_time(
pair=pair, since=since,
until=until or ccxt.Exchange.milliseconds())
elif self._trades_pagination == 'id':
return await self._async_get_trade_history_id(
pair=pair, since=since,
until=until or ccxt.Exchange.milliseconds(), from_id=from_id
)
else:
raise OperationalException(f"Exchange {self.name} does use neither time, "
f"nor id based pagination")
def get_historic_trades(self, pair: str,
since: Optional[int] = None,
until: Optional[int] = None,
from_id: Optional[str] = None) -> Tuple[str, List]:
"""
Gets candle history using asyncio and returns the list of candles.
Handles all async doing.
Async over one pair, assuming we get `_ohlcv_candle_limit` candles per call.
:param pair: Pair to download
:param ticker_interval: Interval to get
:param since: Timestamp in milliseconds to get history from
:param until: Timestamp in milliseconds. Defaults to current timestamp if not defined.
:param from_id: Download data starting with ID (if id is known)
:returns List of tickers
"""
if not self.exchange_has("fetchTrades"):
raise OperationalException("This exchange does not suport downloading Trades.")
return asyncio.get_event_loop().run_until_complete(
self._async_get_trade_history(pair=pair, since=since,
until=until, from_id=from_id))
@retrier
def cancel_order(self, order_id: str, pair: str) -> None:
if self._config['dry_run']:
@@ -768,18 +1001,29 @@ def get_exchange_bad_reason(exchange_name: str) -> str:
return BAD_EXCHANGES.get(exchange_name, "")
def is_exchange_available(exchange_name: str, ccxt_module=None) -> bool:
return exchange_name in available_exchanges(ccxt_module)
def is_exchange_known_ccxt(exchange_name: str, ccxt_module=None) -> bool:
return exchange_name in ccxt_exchanges(ccxt_module)
def is_exchange_officially_supported(exchange_name: str) -> bool:
return exchange_name in ['bittrex', 'binance']
def available_exchanges(ccxt_module=None) -> List[str]:
def ccxt_exchanges(ccxt_module=None) -> List[str]:
"""
Return the list of all exchanges known to ccxt
"""
return ccxt_module.exchanges if ccxt_module is not None else ccxt.exchanges
def available_exchanges(ccxt_module=None) -> List[str]:
"""
Return exchanges available to the bot, i.e. non-bad exchanges in the ccxt list
"""
exchanges = ccxt_exchanges(ccxt_module)
return [x for x in exchanges if not is_exchange_bad(x)]
def timeframe_to_seconds(ticker_interval: str) -> int:
"""
Translates the timeframe interval value written in the human readable

View File

@@ -14,6 +14,10 @@ logger = logging.getLogger(__name__)
class Kraken(Exchange):
_params: Dict = {"trading_agreement": "agree"}
_ft_has: Dict = {
"trades_pagination": "id",
"trades_pagination_arg": "since",
}
@retrier
def get_balances(self) -> dict:

View File

@@ -1,7 +1,6 @@
"""
Freqtrade is the main module of this bot. It contains the class Freqtrade()
"""
import copy
import logging
import traceback
@@ -12,7 +11,7 @@ from typing import Any, Dict, List, Optional, Tuple
import arrow
from requests.exceptions import RequestException
from freqtrade import (DependencyException, OperationalException, InvalidOrderException,
from freqtrade import (DependencyException, InvalidOrderException,
__version__, constants, persistence)
from freqtrade.data.converter import order_book_to_dataframe
from freqtrade.data.dataprovider import DataProvider
@@ -135,12 +134,11 @@ class FreqtradeBot:
self.strategy.informative_pairs())
# First process current opened trades
for trade in trades:
self.process_maybe_execute_sell(trade)
self.process_maybe_execute_sells(trades)
# Then looking for buy opportunities
if len(trades) < self.config['max_open_trades']:
self.process_maybe_execute_buy()
self.process_maybe_execute_buys()
if 'unfilledtimeout' in self.config:
# Check and handle any timed out open orders
@@ -262,11 +260,10 @@ class FreqtradeBot:
Checks pairs as long as the open trade count is below `max_open_trades`.
:return: True if at least one trade has been created.
"""
interval = self.strategy.ticker_interval
whitelist = copy.deepcopy(self.active_pair_whitelist)
if not whitelist:
logger.warning("Whitelist is empty.")
logger.info("Active pair whitelist is empty.")
return False
# Remove currently opened and latest pairs from whitelist
@@ -276,7 +273,8 @@ class FreqtradeBot:
logger.debug('Ignoring %s in pair whitelist', trade.pair)
if not whitelist:
logger.info("No currency pair in whitelist, but checking to sell open trades.")
logger.info("No currency pair in active pair whitelist, "
"but checking to sell open trades.")
return False
buycount = 0
@@ -285,8 +283,10 @@ class FreqtradeBot:
if self.strategy.is_pair_locked(_pair):
logger.info(f"Pair {_pair} is currently locked.")
continue
(buy, sell) = self.strategy.get_signal(
_pair, interval, self.dataprovider.ohlcv(_pair, self.strategy.ticker_interval))
_pair, self.strategy.ticker_interval,
self.dataprovider.ohlcv(_pair, self.strategy.ticker_interval))
if buy and not sell and len(Trade.get_open_trades()) < self.config['max_open_trades']:
stake_amount = self._get_trade_stake_amount(_pair)
@@ -431,10 +431,9 @@ class FreqtradeBot:
return True
def process_maybe_execute_buy(self) -> None:
def process_maybe_execute_buys(self) -> None:
"""
Tries to execute a buy trade in a safe way
:return: True if executed
Tries to execute buy orders for trades in a safe way
"""
try:
# Create entity and execute trade
@@ -443,40 +442,37 @@ class FreqtradeBot:
except DependencyException as exception:
logger.warning('Unable to create trade: %s', exception)
def process_maybe_execute_sell(self, trade: Trade) -> bool:
def process_maybe_execute_sells(self, trades: List[Any]) -> None:
"""
Tries to execute a sell trade
:return: True if executed
Tries to execute sell orders for trades in a safe way
"""
try:
self.update_trade_state(trade)
result = False
for trade in trades:
try:
self.update_trade_state(trade)
if self.strategy.order_types.get('stoploss_on_exchange') and trade.is_open:
result = self.handle_stoploss_on_exchange(trade)
if result:
self.wallets.update()
return result
if trade.is_open and trade.open_order_id is None:
if (self.strategy.order_types.get('stoploss_on_exchange') and
self.handle_stoploss_on_exchange(trade)):
result = True
continue
# Check if we can sell our current pair
result = self.handle_trade(trade)
if trade.open_order_id is None and self.handle_trade(trade):
result = True
# Updating wallets if any trade occured
if result:
self.wallets.update()
except DependencyException as exception:
logger.warning('Unable to sell trade: %s', exception)
return result
# Updating wallets if any trade occured
if result:
self.wallets.update()
except DependencyException as exception:
logger.warning('Unable to sell trade: %s', exception)
return False
def get_real_amount(self, trade: Trade, order: Dict) -> float:
def get_real_amount(self, trade: Trade, order: Dict, order_amount: float = None) -> float:
"""
Get real amount for the trade
Necessary for exchanges which charge fees in base currency (e.g. binance)
"""
order_amount = order['amount']
if order_amount is None:
order_amount = order['amount']
# Only run for closed orders
if trade.fee_open == 0 or order['status'] == 'open':
return order_amount
@@ -513,7 +509,7 @@ class FreqtradeBot:
if not isclose(amount, order_amount, abs_tol=constants.MATH_CLOSE_PREC):
logger.warning(f"Amount {amount} does not match amount {trade.amount}")
raise OperationalException("Half bought? Amounts don't match")
raise DependencyException("Half bought? Amounts don't match")
real_amount = amount - fee_abs
if fee_abs != 0:
logger.info(f"Applying fee on amount for {trade} "
@@ -541,7 +537,7 @@ class FreqtradeBot:
# Fee was applied, so set to 0
trade.fee_open = 0
except OperationalException as exception:
except DependencyException as exception:
logger.warning("Could not update trade amount: %s", exception)
trade.update(order)
@@ -575,18 +571,20 @@ class FreqtradeBot:
:return: True if trade has been sold, False otherwise
"""
if not trade.is_open:
raise ValueError(f'Attempt to handle closed trade: {trade}')
raise DependencyException(f'Attempt to handle closed trade: {trade}')
logger.debug('Handling %s ...', trade)
(buy, sell) = (False, False)
experimental = self.config.get('experimental', {})
if experimental.get('use_sell_signal') or experimental.get('ignore_roi_if_buy_signal'):
config_ask_strategy = self.config.get('ask_strategy', {})
if (config_ask_strategy.get('use_sell_signal', True) or
config_ask_strategy.get('ignore_roi_if_buy_signal')):
(buy, sell) = self.strategy.get_signal(
trade.pair, self.strategy.ticker_interval,
self.dataprovider.ohlcv(trade.pair, self.strategy.ticker_interval))
config_ask_strategy = self.config.get('ask_strategy', {})
if config_ask_strategy.get('use_order_book', False):
logger.info('Using order book for selling...')
# logger.debug('Order book %s',orderBook)
@@ -708,7 +706,7 @@ class FreqtradeBot:
if trade.stop_loss > float(order['info']['stopPrice']):
# we check if the update is neccesary
update_beat = self.strategy.order_types.get('stoploss_on_exchange_interval', 60)
if (datetime.utcnow() - trade.stoploss_last_update).total_seconds() > update_beat:
if (datetime.utcnow() - trade.stoploss_last_update).total_seconds() >= update_beat:
# cancelling the current stoploss on exchange first
logger.info('Trailing stoploss: cancelling current stoploss on exchange (id:{%s})'
'in order to add another one ...', order['id'])
@@ -750,8 +748,8 @@ class FreqtradeBot:
"""
buy_timeout = self.config['unfilledtimeout']['buy']
sell_timeout = self.config['unfilledtimeout']['sell']
buy_timeoutthreashold = arrow.utcnow().shift(minutes=-buy_timeout).datetime
sell_timeoutthreashold = arrow.utcnow().shift(minutes=-sell_timeout).datetime
buy_timeout_threshold = arrow.utcnow().shift(minutes=-buy_timeout).datetime
sell_timeout_threshold = arrow.utcnow().shift(minutes=-sell_timeout).datetime
for trade in Trade.query.filter(Trade.open_order_id.isnot(None)).all():
try:
@@ -775,21 +773,18 @@ class FreqtradeBot:
self.wallets.update()
continue
# Handle cancelled on exchange
if order['status'] == 'canceled':
if order['side'] == 'buy':
self.handle_buy_order_full_cancel(trade, "canceled on Exchange")
elif order['side'] == 'sell':
self.handle_timedout_limit_sell(trade, order)
self.wallets.update()
# Check if order is still actually open
elif order['status'] == 'open':
if order['side'] == 'buy' and ordertime < buy_timeoutthreashold:
self.handle_timedout_limit_buy(trade, order)
self.wallets.update()
elif order['side'] == 'sell' and ordertime < sell_timeoutthreashold:
self.handle_timedout_limit_sell(trade, order)
self.wallets.update()
if ((order['side'] == 'buy' and order['status'] == 'canceled')
or (order['status'] == 'open'
and order['side'] == 'buy' and ordertime < buy_timeout_threshold)):
self.handle_timedout_limit_buy(trade, order)
self.wallets.update()
elif ((order['side'] == 'sell' and order['status'] == 'canceled')
or (order['status'] == 'open'
and order['side'] == 'sell' and ordertime < sell_timeout_threshold)):
self.handle_timedout_limit_sell(trade, order)
self.wallets.update()
def handle_buy_order_full_cancel(self, trade: Trade, reason: str) -> None:
"""Close trade in database and send message"""
@@ -805,16 +800,33 @@ class FreqtradeBot:
"""Buy timeout - cancel order
:return: True if order was fully cancelled
"""
self.exchange.cancel_order(trade.open_order_id, trade.pair)
if order['remaining'] == order['amount']:
reason = "cancelled due to timeout"
if order['status'] != 'canceled':
corder = self.exchange.cancel_order(trade.open_order_id, trade.pair)
else:
# Order was cancelled already, so we can reuse the existing dict
corder = order
reason = "canceled on Exchange"
if corder['remaining'] == corder['amount']:
# if trade is not partially completed, just delete the trade
self.handle_buy_order_full_cancel(trade, "cancelled due to timeout")
self.handle_buy_order_full_cancel(trade, reason)
return True
# if trade is partially complete, edit the stake details for the trade
# and close the order
trade.amount = order['amount'] - order['remaining']
trade.amount = corder['amount'] - corder['remaining']
trade.stake_amount = trade.amount * trade.open_rate
# verify if fees were taken from amount to avoid problems during selling
try:
new_amount = self.get_real_amount(trade, corder, trade.amount)
if not isclose(order['amount'], new_amount, abs_tol=constants.MATH_CLOSE_PREC):
trade.amount = new_amount
# Fee was applied, so set to 0
trade.fee_open = 0
except DependencyException as e:
logger.warning("Could not update trade amount: %s", e)
trade.open_order_id = None
logger.info('Partial buy order timeout for %s.', trade)
self.rpc.send_msg({

View File

@@ -1,40 +0,0 @@
from math import cos, exp, pi, sqrt
import numpy as np
import talib as ta
from pandas import Series
def went_up(series: Series) -> bool:
return series > series.shift(1)
def went_down(series: Series) -> bool:
return series < series.shift(1)
def ehlers_super_smoother(series: Series, smoothing: float = 6) -> Series:
magic = pi * sqrt(2) / smoothing
a1 = exp(-magic)
coeff2 = 2 * a1 * cos(magic)
coeff3 = -a1 * a1
coeff1 = (1 - coeff2 - coeff3) / 2
filtered = series.copy()
for i in range(2, len(series)):
filtered.iloc[i] = coeff1 * (series.iloc[i] + series.iloc[i-1]) + \
coeff2 * filtered.iloc[i-1] + coeff3 * filtered.iloc[i-2]
return filtered
def fishers_inverse(series: Series, smoothing: float = 0) -> np.ndarray:
""" Does a smoothed fishers inverse transformation.
Can be used with any oscillator that goes from 0 to 100 like RSI or MFI """
v1 = 0.1 * (series - 50)
if smoothing > 0:
v2 = ta.WMA(v1.values, timeperiod=smoothing)
else:
v2 = v1
return (np.exp(2 * v2)-1) / (np.exp(2 * v2) + 1)

View File

@@ -72,8 +72,10 @@ def json_load(datafile: IO):
def file_load_json(file):
gzipfile = file.with_suffix(file.suffix + '.gz')
if file.suffix != ".gz":
gzipfile = file.with_suffix(file.suffix + '.gz')
else:
gzipfile = file
# Try gzip file first, otherwise regular json file.
if gzipfile.is_file():
logger.debug('Loading ticker data from file %s', gzipfile)

View File

@@ -63,9 +63,12 @@ class Backtesting:
self.config['exchange']['uid'] = ''
self.config['dry_run'] = True
self.strategylist: List[IStrategy] = []
self.exchange = ExchangeResolver(self.config['exchange']['name'], self.config).exchange
self.fee = self.exchange.get_fee()
if config.get('fee'):
self.fee = config['fee']
else:
self.fee = self.exchange.get_fee()
if self.config.get('runmode') != RunMode.HYPEROPT:
self.dataprovider = DataProvider(self.config, self.exchange)
@@ -146,8 +149,8 @@ class Backtesting:
len(results[results.profit_abs < 0])
])
# Ignore type as floatfmt does allow tuples but mypy does not know that
return tabulate(tabular_data, headers=headers, # type: ignore
floatfmt=floatfmt, tablefmt="pipe")
return tabulate(tabular_data, headers=headers,
floatfmt=floatfmt, tablefmt="pipe") # type: ignore
def _generate_text_table_sell_reason(self, data: Dict[str, Dict], results: DataFrame) -> str:
"""
@@ -185,8 +188,8 @@ class Backtesting:
len(results[results.profit_abs < 0])
])
# Ignore type as floatfmt does allow tuples but mypy does not know that
return tabulate(tabular_data, headers=headers, # type: ignore
floatfmt=floatfmt, tablefmt="pipe")
return tabulate(tabular_data, headers=headers,
floatfmt=floatfmt, tablefmt="pipe") # type: ignore
def _store_backtest_result(self, recordfilename: Path, results: DataFrame,
strategyname: Optional[str] = None) -> None:
@@ -267,6 +270,11 @@ class Backtesting:
# - (Expected abs profit + open_rate + open_fee) / (fee_close -1)
closerate = - (trade.open_rate * roi + trade.open_rate *
(1 + trade.fee_open)) / (trade.fee_close - 1)
# Use the maximum between closerate and low as we
# cannot sell outside of a candle.
# Applies when using {"xx": -1} as roi to force sells after xx minutes
closerate = max(closerate, sell_row.low)
else:
# This should not be reached...
closerate = sell_row.open

View File

@@ -11,7 +11,7 @@ import freqtrade.vendor.qtpylib.indicators as qtpylib
from freqtrade.optimize.hyperopt_interface import IHyperOpt
class DefaultHyperOpts(IHyperOpt):
class DefaultHyperOpt(IHyperOpt):
"""
Default hyperopt provided by the Freqtrade bot.
You can override it with your own Hyperopt

View File

@@ -69,8 +69,8 @@ class EdgeCli:
])
# Ignore type as floatfmt does allow tuples but mypy does not know that
return tabulate(tabular_data, headers=headers, # type: ignore
floatfmt=floatfmt, tablefmt="pipe")
return tabulate(tabular_data, headers=headers,
floatfmt=floatfmt, tablefmt="pipe") # type: ignore
def start(self) -> None:
result = self.edge.calculate()

View File

@@ -98,10 +98,10 @@ class Hyperopt:
self.position_stacking = self.config.get('position_stacking', False)
if self.has_space('sell'):
# Make sure experimental is enabled
if 'experimental' not in self.config:
self.config['experimental'] = {}
self.config['experimental']['use_sell_signal'] = True
# Make sure use_sell_signal is enabled
if 'ask_strategy' not in self.config:
self.config['ask_strategy'] = {}
self.config['ask_strategy']['use_sell_signal'] = True
@staticmethod
def get_lock_filename(config) -> str:

View File

@@ -64,14 +64,13 @@ def add_indicators(fig, row, indicators: List[str], data: pd.DataFrame) -> make_
"""
for indicator in indicators:
if indicator in data:
# TODO: Figure out why scattergl causes problems
scattergl = go.Scatter(
scatter = go.Scatter(
x=data['date'],
y=data[indicator].values,
mode='lines',
name=indicator
)
fig.add_trace(scattergl, row, 1)
fig.add_trace(scatter, row, 1)
else:
logger.info(
'Indicator "%s" ignored. Reason: This indicator is not found '
@@ -92,7 +91,7 @@ def add_profit(fig, row, data: pd.DataFrame, column: str, name: str) -> make_sub
:param name: Name to use
:return: fig with added profit plot
"""
profit = go.Scattergl(
profit = go.Scatter(
x=data.index,
y=data[column],
name=name,
@@ -221,23 +220,27 @@ def generate_candlestick_graph(pair: str, data: pd.DataFrame, trades: pd.DataFra
else:
logger.warning("No sell-signals found.")
# TODO: Figure out why scattergl causes problems plotly/plotly.js#2284
if 'bb_lowerband' in data and 'bb_upperband' in data:
bb_lower = go.Scattergl(
bb_lower = go.Scatter(
x=data.date,
y=data.bb_lowerband,
name='BB lower',
showlegend=False,
line={'color': 'rgba(255,255,255,0)'},
)
bb_upper = go.Scattergl(
bb_upper = go.Scatter(
x=data.date,
y=data.bb_upperband,
name='BB upper',
name='Bollinger Band',
fill="tonexty",
fillcolor="rgba(0,176,246,0.2)",
line={'color': 'rgba(255,255,255,0)'},
)
fig.add_trace(bb_lower, 1, 1)
fig.add_trace(bb_upper, 1, 1)
if 'bb_upperband' in indicators1 and 'bb_lowerband' in indicators1:
indicators1.remove('bb_upperband')
indicators1.remove('bb_lowerband')
# Add indicators to main plot
fig = add_indicators(fig=fig, row=1, indicators=indicators1, data=data)
@@ -248,11 +251,13 @@ def generate_candlestick_graph(pair: str, data: pd.DataFrame, trades: pd.DataFra
volume = go.Bar(
x=data['date'],
y=data['volume'],
name='Volume'
)
name='Volume',
marker_color='DarkSlateGrey',
marker_line_color='DarkSlateGrey'
)
fig.add_trace(volume, 2, 1)
# Add indicators to seperate row
# Add indicators to separate row
fig = add_indicators(fig=fig, row=3, indicators=indicators2, data=data)
return fig
@@ -267,7 +272,7 @@ def generate_profit_graph(pairs: str, tickers: Dict[str, pd.DataFrame],
df_comb = create_cum_profit(df_comb, trades, 'cum_profit')
# Plot the pairs average close prices, and total profit growth
avgclose = go.Scattergl(
avgclose = go.Scatter(
x=df_comb.index,
y=df_comb['mean'],
name='Avg close price',

View File

@@ -3,7 +3,7 @@ This module loads custom exchanges
"""
import logging
from freqtrade.exchange import Exchange
from freqtrade.exchange import Exchange, MAP_EXCHANGE_CHILDCLASS
import freqtrade.exchange as exchanges
from freqtrade.resolvers import IResolver
@@ -22,6 +22,8 @@ class ExchangeResolver(IResolver):
Load the custom class from config parameter
:param config: configuration dictionary
"""
# Map exchange name to avoid duplicate classes for identical exchanges
exchange_name = MAP_EXCHANGE_CHILDCLASS.get(exchange_name, exchange_name)
exchange_name = exchange_name.title()
try:
self.exchange = self._load_exchange(exchange_name, kwargs={'config': config})

View File

@@ -54,14 +54,8 @@ class HyperOptResolver(IResolver):
"""
current_path = Path(__file__).parent.parent.joinpath('optimize').resolve()
abs_paths = [
config['user_data_dir'].joinpath('hyperopts'),
current_path,
]
if extra_dir:
# Add extra hyperopt directory on top of search paths
abs_paths.insert(0, Path(extra_dir).resolve())
abs_paths = self.build_search_paths(config, current_path=current_path,
user_subdir='hyperopts', extra_dir=extra_dir)
hyperopt = self._load_object(paths=abs_paths, object_type=IHyperOpt,
object_name=hyperopt_name, kwargs={'config': config})
@@ -112,14 +106,8 @@ class HyperOptLossResolver(IResolver):
"""
current_path = Path(__file__).parent.parent.joinpath('optimize').resolve()
abs_paths = [
config['user_data_dir'].joinpath('hyperopts'),
current_path,
]
if extra_dir:
# Add extra hyperopt directory on top of search paths
abs_paths.insert(0, Path(extra_dir).resolve())
abs_paths = self.build_search_paths(config, current_path=current_path,
user_subdir='hyperopts', extra_dir=extra_dir)
hyperoptloss = self._load_object(paths=abs_paths, object_type=IHyperOptLoss,
object_name=hyper_loss_name)

View File

@@ -7,7 +7,7 @@ import importlib.util
import inspect
import logging
from pathlib import Path
from typing import Any, List, Optional, Tuple, Type, Union
from typing import Any, List, Optional, Tuple, Union, Generator
logger = logging.getLogger(__name__)
@@ -17,15 +17,29 @@ class IResolver:
This class contains all the logic to load custom classes
"""
def build_search_paths(self, config, current_path: Path, user_subdir: str,
extra_dir: Optional[str] = None) -> List[Path]:
abs_paths = [
config['user_data_dir'].joinpath(user_subdir),
current_path,
]
if extra_dir:
# Add extra directory to the top of the search paths
abs_paths.insert(0, Path(extra_dir).resolve())
return abs_paths
@staticmethod
def _get_valid_object(object_type, module_path: Path,
object_name: str) -> Optional[Type[Any]]:
object_name: str) -> Generator[Any, None, None]:
"""
Returns the first object with matching object_type and object_name in the path given.
Generator returning objects with matching object_type and object_name in the path given.
:param object_type: object_type (class)
:param module_path: absolute path to the module
:param object_name: Class name of the object
:return: class or None
:return: generator containing matching objects
"""
# Generate spec based on absolute path
@@ -42,7 +56,7 @@ class IResolver:
obj for name, obj in inspect.getmembers(module, inspect.isclass)
if object_name == name and object_type in obj.__bases__
)
return next(valid_objects_gen, None)
return valid_objects_gen
@staticmethod
def _search_object(directory: Path, object_type, object_name: str,
@@ -59,9 +73,9 @@ class IResolver:
logger.debug('Ignoring %s', entry)
continue
module_path = entry.resolve()
obj = IResolver._get_valid_object(
object_type, module_path, object_name
)
obj = next(IResolver._get_valid_object(object_type, module_path, object_name), None)
if obj:
return (obj(**kwargs), module_path)
return (None, None)

View File

@@ -1,7 +1,7 @@
# pragma pylint: disable=attribute-defined-outside-init
"""
This module load custom hyperopts
This module load custom pairlists
"""
import logging
from pathlib import Path
@@ -15,7 +15,7 @@ logger = logging.getLogger(__name__)
class PairListResolver(IResolver):
"""
This class contains all the logic to load custom hyperopt class
This class contains all the logic to load custom PairList class
"""
__slots__ = ['pairlist']
@@ -39,10 +39,8 @@ class PairListResolver(IResolver):
"""
current_path = Path(__file__).parent.parent.joinpath('pairlist').resolve()
abs_paths = [
config['user_data_dir'].joinpath('pairlist'),
current_path,
]
abs_paths = self.build_search_paths(config, current_path=current_path,
user_subdir='pairlist', extra_dir=None)
pairlist = self._load_object(paths=abs_paths, object_type=IPairList,
object_name=pairlist_name, kwargs=kwargs)

View File

@@ -41,13 +41,13 @@ class StrategyResolver(IResolver):
config=config,
extra_dir=config.get('strategy_path'))
# make sure experimental dict is available
if 'experimental' not in config:
config['experimental'] = {}
# make sure ask_strategy dict is available
if 'ask_strategy' not in config:
config['ask_strategy'] = {}
# Set attributes
# Check if we need to override configuration
# (Attribute name, default, experimental)
# (Attribute name, default, ask_strategy)
attributes = [("minimal_roi", {"0": 10.0}, False),
("ticker_interval", None, False),
("stoploss", None, False),
@@ -60,20 +60,20 @@ class StrategyResolver(IResolver):
("order_time_in_force", None, False),
("stake_currency", None, False),
("stake_amount", None, False),
("use_sell_signal", False, True),
("use_sell_signal", True, True),
("sell_profit_only", False, True),
("ignore_roi_if_buy_signal", False, True),
]
for attribute, default, experimental in attributes:
if experimental:
self._override_attribute_helper(config['experimental'], attribute, default)
for attribute, default, ask_strategy in attributes:
if ask_strategy:
self._override_attribute_helper(config['ask_strategy'], attribute, default)
else:
self._override_attribute_helper(config, attribute, default)
# Loop this list again to have output combined
for attribute, _, exp in attributes:
if exp and attribute in config['experimental']:
logger.info("Strategy using %s: %s", attribute, config['experimental'][attribute])
if exp and attribute in config['ask_strategy']:
logger.info("Strategy using %s: %s", attribute, config['ask_strategy'][attribute])
elif attribute in config:
logger.info("Strategy using %s: %s", attribute, config[attribute])
@@ -98,7 +98,10 @@ class StrategyResolver(IResolver):
logger.info("Override strategy '%s' with value in config file: %s.",
attribute, config[attribute])
elif hasattr(self.strategy, attribute):
config[attribute] = getattr(self.strategy, attribute)
val = getattr(self.strategy, attribute)
# None's cannot exist in the config, so do not copy them
if val is not None:
config[attribute] = val
# Explicitly check for None here as other "falsy" values are possible
elif default is not None:
setattr(self.strategy, attribute, default)
@@ -124,14 +127,8 @@ class StrategyResolver(IResolver):
"""
current_path = Path(__file__).parent.parent.joinpath('strategy').resolve()
abs_paths = [
config['user_data_dir'].joinpath('strategies'),
current_path,
]
if extra_dir:
# Add extra strategy directory on top of search paths
abs_paths.insert(0, Path(extra_dir).resolve())
abs_paths = self.build_search_paths(config, current_path=current_path,
user_subdir='strategies', extra_dir=extra_dir)
if ":" in strategy_name:
logger.info("loading base64 encoded strategy")

View File

@@ -18,7 +18,7 @@ class RPCManager:
self.registered_modules: List[RPC] = []
# Enable telegram
if freqtrade.config['telegram'].get('enabled', False):
if freqtrade.config.get('telegram', {}).get('enabled', False):
logger.info('Enabling rpc.telegram ...')
from freqtrade.rpc.telegram import Telegram
self.registered_modules.append(Telegram(freqtrade))

View File

@@ -78,8 +78,8 @@ class IStrategy(ABC):
# trailing stoploss
trailing_stop: bool = False
trailing_stop_positive: float
trailing_stop_positive_offset: float
trailing_stop_positive: Optional[float] = None
trailing_stop_positive_offset: float = 0.0
trailing_only_offset_is_reached = False
# associated ticker interval
@@ -309,9 +309,9 @@ class IStrategy(ABC):
# Set current rate to high for backtesting sell
current_rate = high or rate
current_profit = trade.calc_profit_percent(current_rate)
experimental = self.config.get('experimental', {})
config_ask_strategy = self.config.get('ask_strategy', {})
if buy and experimental.get('ignore_roi_if_buy_signal', False):
if buy and config_ask_strategy.get('ignore_roi_if_buy_signal', False):
# This one is noisy, commented out
# logger.debug(f"{trade.pair} - Buy signal still active. sell_flag=False")
return SellCheckTuple(sell_flag=False, sell_type=SellType.NONE)
@@ -322,7 +322,7 @@ class IStrategy(ABC):
f"sell_type=SellType.ROI")
return SellCheckTuple(sell_flag=True, sell_type=SellType.ROI)
if experimental.get('sell_profit_only', False):
if config_ask_strategy.get('sell_profit_only', False):
# This one is noisy, commented out
# logger.debug(f"{trade.pair} - Checking if trade is profitable...")
if trade.calc_profit(rate=rate) <= 0:
@@ -330,7 +330,7 @@ class IStrategy(ABC):
# logger.debug(f"{trade.pair} - Trade is not profitable. sell_flag=False")
return SellCheckTuple(sell_flag=False, sell_type=SellType.NONE)
if sell and not buy and experimental.get('use_sell_signal', False):
if sell and not buy and config_ask_strategy.get('use_sell_signal', True):
logger.debug(f"{trade.pair} - Sell signal received. sell_flag=True, "
f"sell_type=SellType.SELL_SIGNAL")
return SellCheckTuple(sell_flag=True, sell_type=SellType.SELL_SIGNAL)
@@ -347,26 +347,23 @@ class IStrategy(ABC):
decides to sell or not
:param current_profit: current profit in percent
"""
trailing_stop = self.config.get('trailing_stop', False)
stop_loss_value = force_stoploss if force_stoploss else self.stoploss
# Initiate stoploss with open_rate. Does nothing if stoploss is already set.
trade.adjust_stop_loss(trade.open_rate, stop_loss_value, initial=True)
if trailing_stop:
if self.trailing_stop:
# trailing stoploss handling
sl_offset = self.config.get('trailing_stop_positive_offset') or 0.0
tsl_only_offset = self.config.get('trailing_only_offset_is_reached', False)
sl_offset = self.trailing_stop_positive_offset
# Make sure current_profit is calculated using high for backtesting.
high_profit = current_profit if not high else trade.calc_profit_percent(high)
# Don't update stoploss if trailing_only_offset_is_reached is true.
if not (tsl_only_offset and high_profit < sl_offset):
if not (self.trailing_only_offset_is_reached and high_profit < sl_offset):
# Specific handling for trailing_stop_positive
if 'trailing_stop_positive' in self.config and high_profit > sl_offset:
# Ignore mypy error check in configuration that this is a float
stop_loss_value = self.config.get('trailing_stop_positive') # type: ignore
if self.trailing_stop_positive is not None and high_profit > sl_offset:
stop_loss_value = self.trailing_stop_positive
logger.debug(f"{trade.pair} - Using positive stoploss: {stop_loss_value} "
f"offset: {sl_offset:.4g} profit: {current_profit:.4f}%")

View File

@@ -8,8 +8,10 @@ import arrow
from freqtrade import OperationalException
from freqtrade.configuration import Configuration, TimeRange
from freqtrade.configuration.directory_operations import create_userdata_dir
from freqtrade.data.history import refresh_backtest_ohlcv_data
from freqtrade.exchange import available_exchanges
from freqtrade.data.history import (convert_trades_to_ohlcv,
refresh_backtest_ohlcv_data,
refresh_backtest_trades_data)
from freqtrade.exchange import available_exchanges, ccxt_exchanges
from freqtrade.resolvers import ExchangeResolver
from freqtrade.state import RunMode
@@ -50,12 +52,14 @@ def start_list_exchanges(args: Dict[str, Any]) -> None:
:param args: Cli args from Arguments()
:return: None
"""
exchanges = ccxt_exchanges() if args['list_exchanges_all'] else available_exchanges()
if args['print_one_column']:
print('\n'.join(available_exchanges()))
print('\n'.join(exchanges))
else:
print(f"Exchanges supported by ccxt and available for Freqtrade: "
f"{', '.join(available_exchanges())}")
if args['list_exchanges_all']:
print(f"All exchanges supported by the ccxt library: {', '.join(exchanges)}")
else:
print(f"Exchanges available for Freqtrade: {', '.join(exchanges)}")
def start_create_userdir(args: Dict[str, Any]) -> None:
@@ -97,9 +101,19 @@ def start_download_data(args: Dict[str, Any]) -> None:
# Init exchange
exchange = ExchangeResolver(config['exchange']['name'], config).exchange
pairs_not_available = refresh_backtest_ohlcv_data(
exchange, pairs=config["pairs"], timeframes=config["timeframes"],
dl_path=Path(config['datadir']), timerange=timerange, erase=config.get("erase"))
if config.get('download_trades'):
pairs_not_available = refresh_backtest_trades_data(
exchange, pairs=config["pairs"], datadir=Path(config['datadir']),
timerange=timerange, erase=config.get("erase"))
# Convert downloaded trade data to different timeframes
convert_trades_to_ohlcv(
pairs=config["pairs"], timeframes=config["timeframes"],
datadir=Path(config['datadir']), timerange=timerange, erase=config.get("erase"))
else:
pairs_not_available = refresh_backtest_ohlcv_data(
exchange, pairs=config["pairs"], timeframes=config["timeframes"],
dl_path=Path(config['datadir']), timerange=timerange, erase=config.get("erase"))
except KeyboardInterrupt:
sys.exit("SIGINT received, aborting ...")
@@ -108,3 +122,21 @@ def start_download_data(args: Dict[str, Any]) -> None:
if pairs_not_available:
logger.info(f"Pairs [{','.join(pairs_not_available)}] not available "
f"on exchange {config['exchange']['name']}.")
def start_list_timeframes(args: Dict[str, Any]) -> None:
"""
Print ticker intervals (timeframes) available on Exchange
"""
config = setup_utils_configuration(args, RunMode.OTHER)
# Do not use ticker_interval set in the config
config['ticker_interval'] = None
# Init exchange
exchange = ExchangeResolver(config['exchange']['name'], config).exchange
if args['print_one_column']:
print('\n'.join(exchange.timeframes))
else:
print(f"Timeframes available for the exchange `{config['exchange']['name']}`: "
f"{', '.join(exchange.timeframes)}")