diff --git a/.dependabot/config.yml b/.dependabot/config.yml new file mode 100644 index 000000000..66b91e99f --- /dev/null +++ b/.dependabot/config.yml @@ -0,0 +1,17 @@ +version: 1 + +update_configs: + - package_manager: "python" + directory: "/" + update_schedule: "weekly" + allowed_updates: + - match: + update_type: "all" + target_branch: "develop" + + - package_manager: "docker" + directory: "/" + update_schedule: "daily" + allowed_updates: + - match: + update_type: "all" diff --git a/.pyup.yml b/.pyup.yml deleted file mode 100644 index b1b721113..000000000 --- a/.pyup.yml +++ /dev/null @@ -1,37 +0,0 @@ -# autogenerated pyup.io config file -# see https://pyup.io/docs/configuration/ for all available options - -# configure updates globally -# default: all -# allowed: all, insecure, False -update: all - -# configure dependency pinning globally -# default: True -# allowed: True, False -pin: True - -# update schedule -# default: empty -# allowed: "every day", "every week", .. -schedule: "every week" - - -search: False -# Specify requirement files by hand, default is empty -# default: empty -# allowed: list -requirements: - - requirements.txt - - requirements-dev.txt - - requirements-plot.txt - - requirements-common.txt - - -# configure the branch prefix the bot is using -# default: pyup- -branch_prefix: pyup/ - -# allow to close stale PRs -# default: True -close_prs: True diff --git a/docs/docker.md b/docs/docker.md index 615d31796..923dec1e2 100644 --- a/docs/docker.md +++ b/docs/docker.md @@ -26,6 +26,10 @@ To update the image, simply run the above commands again and restart your runnin Should you require additional libraries, please [build the image yourself](#build-your-own-docker-image). +!!! Note Docker image update frequency + The official docker images with tags `master`, `develop` and `latest` are automatically rebuild once a week to keep the base image uptodate. + In addition to that, every merge to `develop` will trigger a rebuild for `develop` and `latest`. + ### Prepare the configuration files Even though you will use docker, you'll still need some files from the github repository. diff --git a/docs/strategy-customization.md b/docs/strategy-customization.md index 0d08bdd02..d71ebfded 100644 --- a/docs/strategy-customization.md +++ b/docs/strategy-customization.md @@ -274,27 +274,24 @@ Please always check the mode of operation to select the correct method to get da #### Possible options for DataProvider -- `available_pairs` - Property with tuples listing cached pairs with their intervals. (pair, interval) -- `ohlcv(pair, ticker_interval)` - Currently cached ticker data for all pairs in the whitelist, returns DataFrame or empty DataFrame -- `historic_ohlcv(pair, ticker_interval)` - Data stored on disk +- `available_pairs` - Property with tuples listing cached pairs with their intervals (pair, interval). +- `ohlcv(pair, ticker_interval)` - Currently cached ticker data for the pair, returns DataFrame or empty DataFrame. +- `historic_ohlcv(pair, ticker_interval)` - Returns historical data stored on disk. +- `get_pair_dataframe(pair, ticker_interval)` - This is a universal method, which returns either historical data (for backtesting) or cached live data (for the Dry-Run and Live-Run modes). - `runmode` - Property containing the current runmode. -#### ohlcv / historic_ohlcv +#### Example: fetch live ohlcv / historic data for the first informative pair ``` python if self.dp: - if self.dp.runmode in ('live', 'dry_run'): - if (f'{self.stake_currency}/BTC', self.ticker_interval) in self.dp.available_pairs: - data_eth = self.dp.ohlcv(pair='{self.stake_currency}/BTC', - ticker_interval=self.ticker_interval) - else: - # Get historic ohlcv data (cached on disk). - history_eth = self.dp.historic_ohlcv(pair='{self.stake_currency}/BTC', - ticker_interval='1h') + inf_pair, inf_timeframe = self.informative_pairs()[0] + informative = self.dp.get_pair_dataframe(pair=inf_pair, + ticker_interval=inf_timeframe) ``` !!! Warning Warning about backtesting - Be carefull when using dataprovider in backtesting. `historic_ohlcv()` provides the full time-range in one go, + Be carefull when using dataprovider in backtesting. `historic_ohlcv()` (and `get_pair_dataframe()` + for the backtesting runmode) provides the full time-range in one go, so please be aware of it and make sure to not "look into the future" to avoid surprises when running in dry/live mode). !!! Warning Warning in hyperopt diff --git a/freqtrade/configuration/__init__.py b/freqtrade/configuration/__init__.py index 7b476d173..ac59421a7 100644 --- a/freqtrade/configuration/__init__.py +++ b/freqtrade/configuration/__init__.py @@ -1,3 +1,4 @@ from freqtrade.configuration.arguments import Arguments # noqa: F401 from freqtrade.configuration.timerange import TimeRange # noqa: F401 from freqtrade.configuration.configuration import Configuration # noqa: F401 +from freqtrade.configuration.config_validation import validate_config_consistency # noqa: F401 diff --git a/freqtrade/configuration/config_validation.py b/freqtrade/configuration/config_validation.py new file mode 100644 index 000000000..92846b704 --- /dev/null +++ b/freqtrade/configuration/config_validation.py @@ -0,0 +1,102 @@ +import logging +from typing import Any, Dict + +from jsonschema import Draft4Validator, validators +from jsonschema.exceptions import ValidationError, best_match + +from freqtrade import constants, OperationalException + + +logger = logging.getLogger(__name__) + + +def _extend_validator(validator_class): + """ + Extended validator for the Freqtrade configuration JSON Schema. + Currently it only handles defaults for subschemas. + """ + validate_properties = validator_class.VALIDATORS['properties'] + + def set_defaults(validator, properties, instance, schema): + for prop, subschema in properties.items(): + if 'default' in subschema: + instance.setdefault(prop, subschema['default']) + + for error in validate_properties( + validator, properties, instance, schema, + ): + yield error + + return validators.extend( + validator_class, {'properties': set_defaults} + ) + + +FreqtradeValidator = _extend_validator(Draft4Validator) + + +def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]: + """ + Validate the configuration follow the Config Schema + :param conf: Config in JSON format + :return: Returns the config if valid, otherwise throw an exception + """ + try: + FreqtradeValidator(constants.CONF_SCHEMA).validate(conf) + return conf + except ValidationError as e: + logger.critical( + f"Invalid configuration. See config.json.example. Reason: {e}" + ) + raise ValidationError( + best_match(Draft4Validator(constants.CONF_SCHEMA).iter_errors(conf)).message + ) + + +def validate_config_consistency(conf: Dict[str, Any]) -> None: + """ + Validate the configuration consistency. + Should be ran after loading both configuration and strategy, + since strategies can set certain configuration settings too. + :param conf: Config in JSON format + :return: Returns None if everything is ok, otherwise throw an OperationalException + """ + # validating trailing stoploss + _validate_trailing_stoploss(conf) + _validate_edge(conf) + + +def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None: + + # Skip if trailing stoploss is not activated + if not conf.get('trailing_stop', False): + return + + tsl_positive = float(conf.get('trailing_stop_positive', 0)) + tsl_offset = float(conf.get('trailing_stop_positive_offset', 0)) + tsl_only_offset = conf.get('trailing_only_offset_is_reached', False) + + if tsl_only_offset: + if tsl_positive == 0.0: + raise OperationalException( + f'The config trailing_only_offset_is_reached needs ' + 'trailing_stop_positive_offset to be more than 0 in your config.') + if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive: + raise OperationalException( + f'The config trailing_stop_positive_offset needs ' + 'to be greater than trailing_stop_positive_offset in your config.') + + +def _validate_edge(conf: Dict[str, Any]) -> None: + """ + Edge and Dynamic whitelist should not both be enabled, since edge overrides dynamic whitelists. + """ + + if not conf.get('edge', {}).get('enabled'): + return + + if conf.get('pairlist', {}).get('method') == 'VolumePairList': + raise OperationalException( + "Edge and VolumePairList are incompatible, " + "Edge will override whatever pairs VolumePairlist selects." + ) diff --git a/freqtrade/configuration/configuration.py b/freqtrade/configuration/configuration.py index 75319ac47..329058cef 100644 --- a/freqtrade/configuration/configuration.py +++ b/freqtrade/configuration/configuration.py @@ -7,10 +7,11 @@ from argparse import Namespace from pathlib import Path from typing import Any, Callable, Dict, List, Optional -from freqtrade import OperationalException, constants +from freqtrade import constants, OperationalException from freqtrade.configuration.check_exchange import check_exchange from freqtrade.configuration.create_datadir import create_datadir -from freqtrade.configuration.json_schema import validate_config_schema +from freqtrade.configuration.config_validation import (validate_config_schema, + validate_config_consistency) from freqtrade.configuration.load_config import load_config_file from freqtrade.loggers import setup_logging from freqtrade.misc import deep_merge_dicts, json_load @@ -81,8 +82,6 @@ class Configuration(object): # Load all configs config: Dict[str, Any] = Configuration.from_files(self.args.config) - self._validate_config_consistency(config) - self._process_common_options(config) self._process_optimize_options(config) @@ -96,6 +95,8 @@ class Configuration(object): self._resolve_pairs_list(config) + validate_config_consistency(config) + return config def _process_logging_options(self, config: Dict[str, Any]) -> None: @@ -304,35 +305,6 @@ class Configuration(object): config.update({'runmode': self.runmode}) - def _validate_config_consistency(self, conf: Dict[str, Any]) -> None: - """ - Validate the configuration consistency - :param conf: Config in JSON format - :return: Returns None if everything is ok, otherwise throw an OperationalException - """ - # validating trailing stoploss - self._validate_trailing_stoploss(conf) - - def _validate_trailing_stoploss(self, conf: Dict[str, Any]) -> None: - - # Skip if trailing stoploss is not activated - if not conf.get('trailing_stop', False): - return - - tsl_positive = float(conf.get('trailing_stop_positive', 0)) - tsl_offset = float(conf.get('trailing_stop_positive_offset', 0)) - tsl_only_offset = conf.get('trailing_only_offset_is_reached', False) - - if tsl_only_offset: - if tsl_positive == 0.0: - raise OperationalException( - f'The config trailing_only_offset_is_reached needs ' - 'trailing_stop_positive_offset to be more than 0 in your config.') - if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive: - raise OperationalException( - f'The config trailing_stop_positive_offset needs ' - 'to be greater than trailing_stop_positive_offset in your config.') - def _args_to_config(self, config: Dict[str, Any], argname: str, logstring: str, logfun: Optional[Callable] = None, deprecated_msg: Optional[str] = None) -> None: diff --git a/freqtrade/configuration/json_schema.py b/freqtrade/configuration/json_schema.py deleted file mode 100644 index 4c6f4a4a0..000000000 --- a/freqtrade/configuration/json_schema.py +++ /dev/null @@ -1,53 +0,0 @@ -import logging -from typing import Any, Dict - -from jsonschema import Draft4Validator, validators -from jsonschema.exceptions import ValidationError, best_match - -from freqtrade import constants - - -logger = logging.getLogger(__name__) - - -def _extend_validator(validator_class): - """ - Extended validator for the Freqtrade configuration JSON Schema. - Currently it only handles defaults for subschemas. - """ - validate_properties = validator_class.VALIDATORS['properties'] - - def set_defaults(validator, properties, instance, schema): - for prop, subschema in properties.items(): - if 'default' in subschema: - instance.setdefault(prop, subschema['default']) - - for error in validate_properties( - validator, properties, instance, schema, - ): - yield error - - return validators.extend( - validator_class, {'properties': set_defaults} - ) - - -FreqtradeValidator = _extend_validator(Draft4Validator) - - -def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]: - """ - Validate the configuration follow the Config Schema - :param conf: Config in JSON format - :return: Returns the config if valid, otherwise throw an exception - """ - try: - FreqtradeValidator(constants.CONF_SCHEMA).validate(conf) - return conf - except ValidationError as e: - logger.critical( - f"Invalid configuration. See config.json.example. Reason: {e}" - ) - raise ValidationError( - best_match(Draft4Validator(constants.CONF_SCHEMA).iter_errors(conf)).message - ) diff --git a/freqtrade/constants.py b/freqtrade/constants.py index b73a723eb..05ee99c1b 100644 --- a/freqtrade/constants.py +++ b/freqtrade/constants.py @@ -5,7 +5,6 @@ bot constants """ DEFAULT_CONFIG = 'config.json' DEFAULT_EXCHANGE = 'bittrex' -DYNAMIC_WHITELIST = 20 # pairs PROCESS_THROTTLE_SECS = 5 # sec DEFAULT_TICKER_INTERVAL = 5 # min HYPEROPT_EPOCH = 100 # epochs diff --git a/freqtrade/data/dataprovider.py b/freqtrade/data/dataprovider.py index b87589df7..5b71c21a8 100644 --- a/freqtrade/data/dataprovider.py +++ b/freqtrade/data/dataprovider.py @@ -44,36 +44,49 @@ class DataProvider(): def ohlcv(self, pair: str, ticker_interval: str = None, copy: bool = True) -> DataFrame: """ - get ohlcv data for the given pair as DataFrame - Please check `available_pairs` to verify which pairs are currently cached. + Get ohlcv data for the given pair as DataFrame + Please use the `available_pairs` method to verify which pairs are currently cached. :param pair: pair to get the data for - :param ticker_interval: ticker_interval to get pair for - :param copy: copy dataframe before returning. - Use false only for RO operations (where the dataframe is not modified) + :param ticker_interval: ticker interval to get data for + :param copy: copy dataframe before returning if True. + Use False only for read-only operations (where the dataframe is not modified) """ if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): - if ticker_interval: - pairtick = (pair, ticker_interval) - else: - pairtick = (pair, self._config['ticker_interval']) - - return self._exchange.klines(pairtick, copy=copy) + return self._exchange.klines((pair, ticker_interval or self._config['ticker_interval']), + copy=copy) else: return DataFrame() - def historic_ohlcv(self, pair: str, ticker_interval: str) -> DataFrame: + def historic_ohlcv(self, pair: str, ticker_interval: str = None) -> DataFrame: """ - get stored historic ohlcv data + Get stored historic ohlcv data :param pair: pair to get the data for - :param ticker_interval: ticker_interval to get pair for + :param ticker_interval: ticker interval to get data for """ return load_pair_history(pair=pair, - ticker_interval=ticker_interval, + ticker_interval=ticker_interval or self._config['ticker_interval'], refresh_pairs=False, datadir=Path(self._config['datadir']) if self._config.get( 'datadir') else None ) + def get_pair_dataframe(self, pair: str, ticker_interval: str = None) -> DataFrame: + """ + Return pair ohlcv data, either live or cached historical -- depending + on the runmode. + :param pair: pair to get the data for + :param ticker_interval: ticker interval to get data for + """ + if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE): + # Get live ohlcv data. + data = self.ohlcv(pair=pair, ticker_interval=ticker_interval) + else: + # Get historic ohlcv data (cached on disk). + data = self.historic_ohlcv(pair=pair, ticker_interval=ticker_interval) + if len(data) == 0: + logger.warning(f"No data found for ({pair}, {ticker_interval}).") + return data + def ticker(self, pair: str): """ Return last ticker data diff --git a/freqtrade/data/history.py b/freqtrade/data/history.py index c7b3a28b0..007357d9a 100644 --- a/freqtrade/data/history.py +++ b/freqtrade/data/history.py @@ -43,7 +43,7 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]: start_index += 1 if timerange.stoptype == 'line': - start_index = len(tickerlist) + timerange.stopts + start_index = max(len(tickerlist) + timerange.stopts, 0) if timerange.stoptype == 'index': stop_index = timerange.stopts elif timerange.stoptype == 'date': @@ -57,10 +57,8 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]: return tickerlist[start_index:stop_index] -def load_tickerdata_file( - datadir: Optional[Path], pair: str, - ticker_interval: str, - timerange: Optional[TimeRange] = None) -> Optional[list]: +def load_tickerdata_file(datadir: Optional[Path], pair: str, ticker_interval: str, + timerange: Optional[TimeRange] = None) -> Optional[list]: """ Load a pair from file, either .json.gz or .json :return: tickerlist or None if unsuccesful @@ -68,13 +66,22 @@ def load_tickerdata_file( filename = pair_data_filename(datadir, pair, ticker_interval) pairdata = misc.file_load_json(filename) if not pairdata: - return None + return [] if timerange: pairdata = trim_tickerlist(pairdata, timerange) return pairdata +def store_tickerdata_file(datadir: Optional[Path], pair: str, + ticker_interval: str, data: list, is_zip: bool = False): + """ + Stores tickerdata to file + """ + filename = pair_data_filename(datadir, pair, ticker_interval) + misc.file_dump_json(filename, data, is_zip=is_zip) + + def load_pair_history(pair: str, ticker_interval: str, datadir: Optional[Path], @@ -177,11 +184,14 @@ def pair_data_filename(datadir: Optional[Path], pair: str, ticker_interval: str) return filename -def load_cached_data_for_updating(filename: Path, ticker_interval: str, +def load_cached_data_for_updating(datadir: Optional[Path], pair: str, ticker_interval: str, timerange: Optional[TimeRange]) -> Tuple[List[Any], Optional[int]]: """ - Load cached data and choose what part of the data should be updated + Load cached data to download more data. + If timerange is passed in, checks wether data from an before the stored data will be downloaded. + If that's the case than what's available should be completely overwritten. + Only used by download_pair_history(). """ since_ms = None @@ -195,12 +205,11 @@ def load_cached_data_for_updating(filename: Path, ticker_interval: str, since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000 # read the cached file - if filename.is_file(): - with open(filename, "rt") as file: - data = misc.json_load(file) - # remove the last item, could be incomplete candle - if data: - data.pop() + # Intentionally don't pass timerange in - since we need to load the full dataset. + data = load_tickerdata_file(datadir, pair, ticker_interval) + # remove the last item, could be incomplete candle + if data: + data.pop() else: data = [] @@ -239,14 +248,12 @@ def download_pair_history(datadir: Optional[Path], ) try: - filename = pair_data_filename(datadir, pair, ticker_interval) - logger.info( f'Download history data for pair: "{pair}", interval: {ticker_interval} ' f'and store in {datadir}.' ) - data, since_ms = load_cached_data_for_updating(filename, ticker_interval, timerange) + data, since_ms = load_cached_data_for_updating(datadir, pair, ticker_interval, timerange) logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None') logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None') @@ -262,7 +269,7 @@ def download_pair_history(datadir: Optional[Path], logger.debug("New Start: %s", misc.format_ms_time(data[0][0])) logger.debug("New End: %s", misc.format_ms_time(data[-1][0])) - misc.file_dump_json(filename, data) + store_tickerdata_file(datadir, pair, ticker_interval, data=data) return True except Exception as e: diff --git a/freqtrade/exchange/exchange.py b/freqtrade/exchange/exchange.py index 5119e0fcd..7aafff96a 100644 --- a/freqtrade/exchange/exchange.py +++ b/freqtrade/exchange/exchange.py @@ -408,12 +408,12 @@ class Exchange(object): except ccxt.InsufficientFunds as e: raise DependencyException( f'Insufficient funds to create {ordertype} {side} order on market {pair}.' - f'Tried to {side} amount {amount} at rate {rate} (total {rate * amount}).' + f'Tried to {side} amount {amount} at rate {rate}.' f'Message: {e}') from e except ccxt.InvalidOrder as e: raise DependencyException( f'Could not create {ordertype} {side} order on market {pair}.' - f'Tried to {side} amount {amount} at rate {rate} (total {rate * amount}).' + f'Tried to {side} amount {amount} at rate {rate}.' f'Message: {e}') from e except (ccxt.NetworkError, ccxt.ExchangeError) as e: raise TemporaryError( @@ -472,7 +472,7 @@ class Exchange(object): order = self.create_order(pair, ordertype, 'sell', amount, rate, params) logger.info('stoploss limit order added for %s. ' - 'stop price: %s. limit: %s' % (pair, stop_price, rate)) + 'stop price: %s. limit: %s', pair, stop_price, rate) return order @retrier @@ -696,8 +696,13 @@ class Exchange(object): @retrier def get_order(self, order_id: str, pair: str) -> Dict: if self._config['dry_run']: - order = self._dry_run_open_orders[order_id] - return order + try: + order = self._dry_run_open_orders[order_id] + return order + except KeyError as e: + # Gracefully handle errors with dry-run orders. + raise InvalidOrderException( + f'Tried to get an invalid dry-run-order (id: {order_id}). Message: {e}') from e try: return self._api.fetch_order(order_id, pair) except ccxt.InvalidOrder as e: diff --git a/freqtrade/freqtradebot.py b/freqtrade/freqtradebot.py index 68b45d96f..e5ecef8bf 100644 --- a/freqtrade/freqtradebot.py +++ b/freqtrade/freqtradebot.py @@ -16,6 +16,7 @@ from freqtrade import (DependencyException, OperationalException, InvalidOrderEx from freqtrade.data.converter import order_book_to_dataframe from freqtrade.data.dataprovider import DataProvider from freqtrade.edge import Edge +from freqtrade.configuration import validate_config_consistency from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date from freqtrade.persistence import Trade from freqtrade.rpc import RPCManager, RPCMessageType @@ -51,6 +52,9 @@ class FreqtradeBot(object): self.strategy: IStrategy = StrategyResolver(self.config).strategy + # Check config consistency here since strategies can set certain options + validate_config_consistency(config) + self.rpc: RPCManager = RPCManager(self) self.exchange = ExchangeResolver(self.config['exchange']['name'], self.config).exchange diff --git a/freqtrade/misc.py b/freqtrade/misc.py index 05946e008..d01d6a254 100644 --- a/freqtrade/misc.py +++ b/freqtrade/misc.py @@ -5,11 +5,11 @@ import gzip import logging import re from datetime import datetime +from pathlib import Path import numpy as np import rapidjson - logger = logging.getLogger(__name__) @@ -39,7 +39,7 @@ def datesarray_to_datetimearray(dates: np.ndarray) -> np.ndarray: return dates.dt.to_pydatetime() -def file_dump_json(filename, data, is_zip=False) -> None: +def file_dump_json(filename: Path, data, is_zip=False) -> None: """ Dump JSON data into a file :param filename: file to create @@ -49,8 +49,8 @@ def file_dump_json(filename, data, is_zip=False) -> None: logger.info(f'dumping json to "{filename}"') if is_zip: - if not filename.endswith('.gz'): - filename = filename + '.gz' + if filename.suffix != '.gz': + filename = filename.with_suffix('.gz') with gzip.open(filename, 'w') as fp: rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE) else: diff --git a/freqtrade/optimize/backtesting.py b/freqtrade/optimize/backtesting.py index 8f40a6582..3878d7603 100644 --- a/freqtrade/optimize/backtesting.py +++ b/freqtrade/optimize/backtesting.py @@ -190,7 +190,7 @@ class Backtesting(object): return tabulate(tabular_data, headers=headers, # type: ignore floatfmt=floatfmt, tablefmt="pipe") - def _store_backtest_result(self, recordfilename: str, results: DataFrame, + def _store_backtest_result(self, recordfilename: Path, results: DataFrame, strategyname: Optional[str] = None) -> None: records = [(t.pair, t.profit_percent, t.open_time.timestamp(), @@ -201,10 +201,10 @@ class Backtesting(object): if records: if strategyname: # Inject strategyname to filename - recname = Path(recordfilename) - recordfilename = str(Path.joinpath( - recname.parent, f'{recname.stem}-{strategyname}').with_suffix(recname.suffix)) - logger.info('Dumping backtest results to %s', recordfilename) + recordfilename = Path.joinpath( + recordfilename.parent, + f'{recordfilename.stem}-{strategyname}').with_suffix(recordfilename.suffix) + logger.info(f'Dumping backtest results to {recordfilename}') file_dump_json(recordfilename, records) def _get_ticker_list(self, processed) -> Dict[str, DataFrame]: @@ -458,7 +458,7 @@ class Backtesting(object): for strategy, results in all_results.items(): if self.config.get('export', False): - self._store_backtest_result(self.config['exportfilename'], results, + self._store_backtest_result(Path(self.config['exportfilename']), results, strategy if len(self.strategylist) > 1 else None) print(f"Result for strategy {strategy}") diff --git a/freqtrade/pairlist/VolumePairList.py b/freqtrade/pairlist/VolumePairList.py index 9a2e2eac4..b9b7977ab 100644 --- a/freqtrade/pairlist/VolumePairList.py +++ b/freqtrade/pairlist/VolumePairList.py @@ -55,7 +55,6 @@ class VolumePairList(IPairList): # Generate dynamic whitelist self._whitelist = self._gen_pair_whitelist( self._config['stake_currency'], self._sort_key)[:self._number_pairs] - logger.info(f"Searching pairs: {self._whitelist}") @cached(TTLCache(maxsize=1, ttl=1800)) def _gen_pair_whitelist(self, base_currency: str, key: str) -> List[str]: @@ -92,4 +91,6 @@ class VolumePairList(IPairList): valid_tickers.remove(t) pairs = [s['symbol'] for s in valid_tickers] + logger.info(f"Searching pairs: {self._whitelist}") + return pairs diff --git a/freqtrade/resolvers/iresolver.py b/freqtrade/resolvers/iresolver.py index 841c3cf43..310c54015 100644 --- a/freqtrade/resolvers/iresolver.py +++ b/freqtrade/resolvers/iresolver.py @@ -29,7 +29,8 @@ class IResolver(object): """ # Generate spec based on absolute path - spec = importlib.util.spec_from_file_location('unknown', str(module_path)) + # Pass object_name as first argument to have logging print a reasonable name. + spec = importlib.util.spec_from_file_location(object_name, str(module_path)) module = importlib.util.module_from_spec(spec) try: spec.loader.exec_module(module) # type: ignore # importlib does not use typehints diff --git a/freqtrade/tests/data/test_dataprovider.py b/freqtrade/tests/data/test_dataprovider.py index 993f0b59b..2272f69a3 100644 --- a/freqtrade/tests/data/test_dataprovider.py +++ b/freqtrade/tests/data/test_dataprovider.py @@ -13,6 +13,7 @@ def test_ohlcv(mocker, default_conf, ticker_history): exchange = get_patched_exchange(mocker, default_conf) exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history + dp = DataProvider(default_conf, exchange) assert dp.runmode == RunMode.DRY_RUN assert ticker_history.equals(dp.ohlcv("UNITTEST/BTC", ticker_interval)) @@ -37,11 +38,9 @@ def test_ohlcv(mocker, default_conf, ticker_history): def test_historic_ohlcv(mocker, default_conf, ticker_history): - historymock = MagicMock(return_value=ticker_history) mocker.patch("freqtrade.data.dataprovider.load_pair_history", historymock) - # exchange = get_patched_exchange(mocker, default_conf) dp = DataProvider(default_conf, None) data = dp.historic_ohlcv("UNITTEST/BTC", "5m") assert isinstance(data, DataFrame) @@ -51,14 +50,47 @@ def test_historic_ohlcv(mocker, default_conf, ticker_history): assert historymock.call_args_list[0][1]["ticker_interval"] == "5m" +def test_get_pair_dataframe(mocker, default_conf, ticker_history): + default_conf["runmode"] = RunMode.DRY_RUN + ticker_interval = default_conf["ticker_interval"] + exchange = get_patched_exchange(mocker, default_conf) + exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history + exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history + + dp = DataProvider(default_conf, exchange) + assert dp.runmode == RunMode.DRY_RUN + assert ticker_history.equals(dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval)) + assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval), DataFrame) + assert dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval) is not ticker_history + assert not dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval).empty + assert dp.get_pair_dataframe("NONESENSE/AAA", ticker_interval).empty + + # Test with and without parameter + assert dp.get_pair_dataframe("UNITTEST/BTC", + ticker_interval).equals(dp.get_pair_dataframe("UNITTEST/BTC")) + + default_conf["runmode"] = RunMode.LIVE + dp = DataProvider(default_conf, exchange) + assert dp.runmode == RunMode.LIVE + assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval), DataFrame) + assert dp.get_pair_dataframe("NONESENSE/AAA", ticker_interval).empty + + historymock = MagicMock(return_value=ticker_history) + mocker.patch("freqtrade.data.dataprovider.load_pair_history", historymock) + default_conf["runmode"] = RunMode.BACKTEST + dp = DataProvider(default_conf, exchange) + assert dp.runmode == RunMode.BACKTEST + assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval), DataFrame) + # assert dp.get_pair_dataframe("NONESENSE/AAA", ticker_interval).empty + + def test_available_pairs(mocker, default_conf, ticker_history): exchange = get_patched_exchange(mocker, default_conf) - ticker_interval = default_conf["ticker_interval"] exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history - dp = DataProvider(default_conf, exchange) + dp = DataProvider(default_conf, exchange) assert len(dp.available_pairs) == 2 assert dp.available_pairs == [ ("XRP/BTC", ticker_interval), diff --git a/freqtrade/tests/data/test_history.py b/freqtrade/tests/data/test_history.py index ea56b4bec..7360f3c1c 100644 --- a/freqtrade/tests/data/test_history.py +++ b/freqtrade/tests/data/test_history.py @@ -178,16 +178,13 @@ def test_load_cached_data_for_updating(mocker) -> None: # timeframe starts earlier than the cached data # should fully update data timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0) - data, start_ts = load_cached_data_for_updating(test_filename, - '1m', - timerange) + data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange) assert data == [] assert start_ts == test_data[0][0] - 1000 # same with 'line' timeframe num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 120 - data, start_ts = load_cached_data_for_updating(test_filename, - '1m', + data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', TimeRange(None, 'line', 0, -num_lines)) assert data == [] assert start_ts < test_data[0][0] - 1 @@ -195,36 +192,29 @@ def test_load_cached_data_for_updating(mocker) -> None: # timeframe starts in the center of the cached data # should return the chached data w/o the last item timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0) - data, start_ts = load_cached_data_for_updating(test_filename, - '1m', - timerange) + data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] # same with 'line' timeframe num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 30 timerange = TimeRange(None, 'line', 0, -num_lines) - data, start_ts = load_cached_data_for_updating(test_filename, - '1m', - timerange) + data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] # timeframe starts after the chached data # should return the chached data w/o the last item timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 1, 0) - data, start_ts = load_cached_data_for_updating(test_filename, - '1m', - timerange) + data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] - # same with 'line' timeframe + # Try loading last 30 lines. + # Not supported by load_cached_data_for_updating, we always need to get the full data. num_lines = 30 timerange = TimeRange(None, 'line', 0, -num_lines) - data, start_ts = load_cached_data_for_updating(test_filename, - '1m', - timerange) + data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] @@ -232,35 +222,27 @@ def test_load_cached_data_for_updating(mocker) -> None: # should return the chached data w/o the last item num_lines = 30 timerange = TimeRange(None, 'line', 0, -num_lines) - data, start_ts = load_cached_data_for_updating(test_filename, - '1m', - timerange) + data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] # no datafile exist # should return timestamp start time timerange = TimeRange('date', None, now_ts - 10000, 0) - data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'), - '1m', - timerange) + data, start_ts = load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange) assert data == [] assert start_ts == (now_ts - 10000) * 1000 # same with 'line' timeframe num_lines = 30 timerange = TimeRange(None, 'line', 0, -num_lines) - data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'), - '1m', - timerange) + data, start_ts = load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange) assert data == [] assert start_ts == (now_ts - num_lines * 60) * 1000 # no datafile exist, no timeframe is set # should return an empty array and None - data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'), - '1m', - None) + data, start_ts = load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', None) assert data == [] assert start_ts is None diff --git a/freqtrade/tests/exchange/test_exchange.py b/freqtrade/tests/exchange/test_exchange.py index 6b833054d..e453b5dca 100644 --- a/freqtrade/tests/exchange/test_exchange.py +++ b/freqtrade/tests/exchange/test_exchange.py @@ -656,7 +656,13 @@ def test_buy_prod(default_conf, mocker, exchange_name): with pytest.raises(DependencyException): api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found")) exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) - exchange.buy(pair='ETH/BTC', ordertype=order_type, + exchange.buy(pair='ETH/BTC', ordertype='limit', + amount=1, rate=200, time_in_force=time_in_force) + + with pytest.raises(DependencyException): + api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found")) + exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) + exchange.buy(pair='ETH/BTC', ordertype='market', amount=1, rate=200, time_in_force=time_in_force) with pytest.raises(TemporaryError): @@ -779,7 +785,13 @@ def test_sell_prod(default_conf, mocker, exchange_name): with pytest.raises(DependencyException): api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found")) exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) - exchange.sell(pair='ETH/BTC', ordertype=order_type, amount=1, rate=200) + exchange.sell(pair='ETH/BTC', ordertype='limit', amount=1, rate=200) + + # Market orders don't require price, so the behaviour is slightly different + with pytest.raises(DependencyException): + api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found")) + exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name) + exchange.sell(pair='ETH/BTC', ordertype='market', amount=1, rate=200) with pytest.raises(TemporaryError): api_mock.create_order = MagicMock(side_effect=ccxt.NetworkError("No Connection")) @@ -1328,6 +1340,9 @@ def test_get_order(default_conf, mocker, exchange_name): print(exchange.get_order('X', 'TKN/BTC')) assert exchange.get_order('X', 'TKN/BTC').myid == 123 + with pytest.raises(InvalidOrderException, match=r'Tried to get an invalid dry-run-order.*'): + exchange.get_order('Y', 'TKN/BTC') + default_conf['dry_run'] = False api_mock = MagicMock() api_mock.fetch_order = MagicMock(return_value=456) diff --git a/freqtrade/tests/optimize/test_backtesting.py b/freqtrade/tests/optimize/test_backtesting.py index 9ed7e7296..02e9a9c28 100644 --- a/freqtrade/tests/optimize/test_backtesting.py +++ b/freqtrade/tests/optimize/test_backtesting.py @@ -2,6 +2,7 @@ import math import random +from pathlib import Path from unittest.mock import MagicMock import numpy as np @@ -785,10 +786,10 @@ def test_backtest_record(default_conf, fee, mocker): # reset test to test with strategy name names = [] records = [] - backtesting._store_backtest_result("backtest-result.json", results, "DefStrat") + backtesting._store_backtest_result(Path("backtest-result.json"), results, "DefStrat") assert len(results) == 4 # Assert file_dump_json was only called once - assert names == ['backtest-result-DefStrat.json'] + assert names == [Path('backtest-result-DefStrat.json')] records = records[0] # Ensure records are of correct type assert len(records) == 4 diff --git a/freqtrade/tests/test_configuration.py b/freqtrade/tests/test_configuration.py index b6e8a76d9..5cfee0698 100644 --- a/freqtrade/tests/test_configuration.py +++ b/freqtrade/tests/test_configuration.py @@ -2,7 +2,6 @@ import json import logging import warnings -from argparse import Namespace from copy import deepcopy from pathlib import Path from unittest.mock import MagicMock @@ -11,10 +10,10 @@ import pytest from jsonschema import Draft4Validator, ValidationError, validate from freqtrade import OperationalException, constants -from freqtrade.configuration import Arguments, Configuration +from freqtrade.configuration import Arguments, Configuration, validate_config_consistency from freqtrade.configuration.check_exchange import check_exchange +from freqtrade.configuration.config_validation import validate_config_schema from freqtrade.configuration.create_datadir import create_datadir -from freqtrade.configuration.json_schema import validate_config_schema from freqtrade.configuration.load_config import load_config_file from freqtrade.constants import DEFAULT_DB_DRYRUN_URL, DEFAULT_DB_PROD_URL from freqtrade.loggers import _set_loggers @@ -625,21 +624,34 @@ def test_validate_tsl(default_conf): with pytest.raises(OperationalException, match=r'The config trailing_only_offset_is_reached needs ' 'trailing_stop_positive_offset to be more than 0 in your config.'): - configuration = Configuration(Namespace()) - configuration._validate_config_consistency(default_conf) + validate_config_consistency(default_conf) default_conf['trailing_stop_positive_offset'] = 0.01 default_conf['trailing_stop_positive'] = 0.015 with pytest.raises(OperationalException, match=r'The config trailing_stop_positive_offset needs ' 'to be greater than trailing_stop_positive_offset in your config.'): - configuration = Configuration(Namespace()) - configuration._validate_config_consistency(default_conf) + validate_config_consistency(default_conf) default_conf['trailing_stop_positive'] = 0.01 default_conf['trailing_stop_positive_offset'] = 0.015 - Configuration(Namespace()) - configuration._validate_config_consistency(default_conf) + validate_config_consistency(default_conf) + + +def test_validate_edge(edge_conf): + edge_conf.update({"pairlist": { + "method": "VolumePairList", + }}) + + with pytest.raises(OperationalException, + match="Edge and VolumePairList are incompatible, " + "Edge will override whatever pairs VolumePairlist selects."): + validate_config_consistency(edge_conf) + + edge_conf.update({"pairlist": { + "method": "StaticPairList", + }}) + validate_config_consistency(edge_conf) def test_load_config_test_comments() -> None: diff --git a/freqtrade/tests/test_misc.py b/freqtrade/tests/test_misc.py index 1a6b2a92d..c55083e64 100644 --- a/freqtrade/tests/test_misc.py +++ b/freqtrade/tests/test_misc.py @@ -1,6 +1,7 @@ # pragma pylint: disable=missing-docstring,C0103 import datetime +from pathlib import Path from unittest.mock import MagicMock from freqtrade.data.converter import parse_ticker_dataframe @@ -34,12 +35,12 @@ def test_datesarray_to_datetimearray(ticker_history_list): def test_file_dump_json(mocker) -> None: file_open = mocker.patch('freqtrade.misc.open', MagicMock()) json_dump = mocker.patch('rapidjson.dump', MagicMock()) - file_dump_json('somefile', [1, 2, 3]) + file_dump_json(Path('somefile'), [1, 2, 3]) assert file_open.call_count == 1 assert json_dump.call_count == 1 file_open = mocker.patch('freqtrade.misc.gzip.open', MagicMock()) json_dump = mocker.patch('rapidjson.dump', MagicMock()) - file_dump_json('somefile', [1, 2, 3], True) + file_dump_json(Path('somefile'), [1, 2, 3], True) assert file_open.call_count == 1 assert json_dump.call_count == 1 diff --git a/requirements-common.txt b/requirements-common.txt index 651be7611..3d80c3ef5 100644 --- a/requirements-common.txt +++ b/requirements-common.txt @@ -1,7 +1,7 @@ # requirements without requirements installable via conda # mainly used for Raspberry pi installs -ccxt==1.18.1043 -SQLAlchemy==1.3.6 +ccxt==1.18.1068 +SQLAlchemy==1.3.7 python-telegram-bot==11.1.0 arrow==0.14.5 cachetools==3.1.1 diff --git a/requirements-dev.txt b/requirements-dev.txt index 03b37417e..6436c60e4 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -7,7 +7,7 @@ flake8==3.7.8 flake8-type-annotations==0.1.0 flake8-tidy-imports==2.0.0 mypy==0.720 -pytest==5.0.1 +pytest==5.1.0 pytest-asyncio==0.10.0 pytest-cov==2.7.1 pytest-mock==1.10.4