Merge branch 'develop' into download_module
This commit is contained in:
commit
e8ee087e9d
17
.dependabot/config.yml
Normal file
17
.dependabot/config.yml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
version: 1
|
||||||
|
|
||||||
|
update_configs:
|
||||||
|
- package_manager: "python"
|
||||||
|
directory: "/"
|
||||||
|
update_schedule: "weekly"
|
||||||
|
allowed_updates:
|
||||||
|
- match:
|
||||||
|
update_type: "all"
|
||||||
|
target_branch: "develop"
|
||||||
|
|
||||||
|
- package_manager: "docker"
|
||||||
|
directory: "/"
|
||||||
|
update_schedule: "daily"
|
||||||
|
allowed_updates:
|
||||||
|
- match:
|
||||||
|
update_type: "all"
|
37
.pyup.yml
37
.pyup.yml
@ -1,37 +0,0 @@
|
|||||||
# autogenerated pyup.io config file
|
|
||||||
# see https://pyup.io/docs/configuration/ for all available options
|
|
||||||
|
|
||||||
# configure updates globally
|
|
||||||
# default: all
|
|
||||||
# allowed: all, insecure, False
|
|
||||||
update: all
|
|
||||||
|
|
||||||
# configure dependency pinning globally
|
|
||||||
# default: True
|
|
||||||
# allowed: True, False
|
|
||||||
pin: True
|
|
||||||
|
|
||||||
# update schedule
|
|
||||||
# default: empty
|
|
||||||
# allowed: "every day", "every week", ..
|
|
||||||
schedule: "every week"
|
|
||||||
|
|
||||||
|
|
||||||
search: False
|
|
||||||
# Specify requirement files by hand, default is empty
|
|
||||||
# default: empty
|
|
||||||
# allowed: list
|
|
||||||
requirements:
|
|
||||||
- requirements.txt
|
|
||||||
- requirements-dev.txt
|
|
||||||
- requirements-plot.txt
|
|
||||||
- requirements-common.txt
|
|
||||||
|
|
||||||
|
|
||||||
# configure the branch prefix the bot is using
|
|
||||||
# default: pyup-
|
|
||||||
branch_prefix: pyup/
|
|
||||||
|
|
||||||
# allow to close stale PRs
|
|
||||||
# default: True
|
|
||||||
close_prs: True
|
|
@ -26,6 +26,10 @@ To update the image, simply run the above commands again and restart your runnin
|
|||||||
|
|
||||||
Should you require additional libraries, please [build the image yourself](#build-your-own-docker-image).
|
Should you require additional libraries, please [build the image yourself](#build-your-own-docker-image).
|
||||||
|
|
||||||
|
!!! Note Docker image update frequency
|
||||||
|
The official docker images with tags `master`, `develop` and `latest` are automatically rebuild once a week to keep the base image uptodate.
|
||||||
|
In addition to that, every merge to `develop` will trigger a rebuild for `develop` and `latest`.
|
||||||
|
|
||||||
### Prepare the configuration files
|
### Prepare the configuration files
|
||||||
|
|
||||||
Even though you will use docker, you'll still need some files from the github repository.
|
Even though you will use docker, you'll still need some files from the github repository.
|
||||||
|
@ -274,27 +274,24 @@ Please always check the mode of operation to select the correct method to get da
|
|||||||
|
|
||||||
#### Possible options for DataProvider
|
#### Possible options for DataProvider
|
||||||
|
|
||||||
- `available_pairs` - Property with tuples listing cached pairs with their intervals. (pair, interval)
|
- `available_pairs` - Property with tuples listing cached pairs with their intervals (pair, interval).
|
||||||
- `ohlcv(pair, ticker_interval)` - Currently cached ticker data for all pairs in the whitelist, returns DataFrame or empty DataFrame
|
- `ohlcv(pair, ticker_interval)` - Currently cached ticker data for the pair, returns DataFrame or empty DataFrame.
|
||||||
- `historic_ohlcv(pair, ticker_interval)` - Data stored on disk
|
- `historic_ohlcv(pair, ticker_interval)` - Returns historical data stored on disk.
|
||||||
|
- `get_pair_dataframe(pair, ticker_interval)` - This is a universal method, which returns either historical data (for backtesting) or cached live data (for the Dry-Run and Live-Run modes).
|
||||||
- `runmode` - Property containing the current runmode.
|
- `runmode` - Property containing the current runmode.
|
||||||
|
|
||||||
#### ohlcv / historic_ohlcv
|
#### Example: fetch live ohlcv / historic data for the first informative pair
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
if self.dp:
|
if self.dp:
|
||||||
if self.dp.runmode in ('live', 'dry_run'):
|
inf_pair, inf_timeframe = self.informative_pairs()[0]
|
||||||
if (f'{self.stake_currency}/BTC', self.ticker_interval) in self.dp.available_pairs:
|
informative = self.dp.get_pair_dataframe(pair=inf_pair,
|
||||||
data_eth = self.dp.ohlcv(pair='{self.stake_currency}/BTC',
|
ticker_interval=inf_timeframe)
|
||||||
ticker_interval=self.ticker_interval)
|
|
||||||
else:
|
|
||||||
# Get historic ohlcv data (cached on disk).
|
|
||||||
history_eth = self.dp.historic_ohlcv(pair='{self.stake_currency}/BTC',
|
|
||||||
ticker_interval='1h')
|
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! Warning Warning about backtesting
|
!!! Warning Warning about backtesting
|
||||||
Be carefull when using dataprovider in backtesting. `historic_ohlcv()` provides the full time-range in one go,
|
Be carefull when using dataprovider in backtesting. `historic_ohlcv()` (and `get_pair_dataframe()`
|
||||||
|
for the backtesting runmode) provides the full time-range in one go,
|
||||||
so please be aware of it and make sure to not "look into the future" to avoid surprises when running in dry/live mode).
|
so please be aware of it and make sure to not "look into the future" to avoid surprises when running in dry/live mode).
|
||||||
|
|
||||||
!!! Warning Warning in hyperopt
|
!!! Warning Warning in hyperopt
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
from freqtrade.configuration.arguments import Arguments # noqa: F401
|
from freqtrade.configuration.arguments import Arguments # noqa: F401
|
||||||
from freqtrade.configuration.timerange import TimeRange # noqa: F401
|
from freqtrade.configuration.timerange import TimeRange # noqa: F401
|
||||||
from freqtrade.configuration.configuration import Configuration # noqa: F401
|
from freqtrade.configuration.configuration import Configuration # noqa: F401
|
||||||
|
from freqtrade.configuration.config_validation import validate_config_consistency # noqa: F401
|
||||||
|
102
freqtrade/configuration/config_validation.py
Normal file
102
freqtrade/configuration/config_validation.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
import logging
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from jsonschema import Draft4Validator, validators
|
||||||
|
from jsonschema.exceptions import ValidationError, best_match
|
||||||
|
|
||||||
|
from freqtrade import constants, OperationalException
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _extend_validator(validator_class):
|
||||||
|
"""
|
||||||
|
Extended validator for the Freqtrade configuration JSON Schema.
|
||||||
|
Currently it only handles defaults for subschemas.
|
||||||
|
"""
|
||||||
|
validate_properties = validator_class.VALIDATORS['properties']
|
||||||
|
|
||||||
|
def set_defaults(validator, properties, instance, schema):
|
||||||
|
for prop, subschema in properties.items():
|
||||||
|
if 'default' in subschema:
|
||||||
|
instance.setdefault(prop, subschema['default'])
|
||||||
|
|
||||||
|
for error in validate_properties(
|
||||||
|
validator, properties, instance, schema,
|
||||||
|
):
|
||||||
|
yield error
|
||||||
|
|
||||||
|
return validators.extend(
|
||||||
|
validator_class, {'properties': set_defaults}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
FreqtradeValidator = _extend_validator(Draft4Validator)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Validate the configuration follow the Config Schema
|
||||||
|
:param conf: Config in JSON format
|
||||||
|
:return: Returns the config if valid, otherwise throw an exception
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
FreqtradeValidator(constants.CONF_SCHEMA).validate(conf)
|
||||||
|
return conf
|
||||||
|
except ValidationError as e:
|
||||||
|
logger.critical(
|
||||||
|
f"Invalid configuration. See config.json.example. Reason: {e}"
|
||||||
|
)
|
||||||
|
raise ValidationError(
|
||||||
|
best_match(Draft4Validator(constants.CONF_SCHEMA).iter_errors(conf)).message
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_config_consistency(conf: Dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
Validate the configuration consistency.
|
||||||
|
Should be ran after loading both configuration and strategy,
|
||||||
|
since strategies can set certain configuration settings too.
|
||||||
|
:param conf: Config in JSON format
|
||||||
|
:return: Returns None if everything is ok, otherwise throw an OperationalException
|
||||||
|
"""
|
||||||
|
# validating trailing stoploss
|
||||||
|
_validate_trailing_stoploss(conf)
|
||||||
|
_validate_edge(conf)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
|
||||||
|
|
||||||
|
# Skip if trailing stoploss is not activated
|
||||||
|
if not conf.get('trailing_stop', False):
|
||||||
|
return
|
||||||
|
|
||||||
|
tsl_positive = float(conf.get('trailing_stop_positive', 0))
|
||||||
|
tsl_offset = float(conf.get('trailing_stop_positive_offset', 0))
|
||||||
|
tsl_only_offset = conf.get('trailing_only_offset_is_reached', False)
|
||||||
|
|
||||||
|
if tsl_only_offset:
|
||||||
|
if tsl_positive == 0.0:
|
||||||
|
raise OperationalException(
|
||||||
|
f'The config trailing_only_offset_is_reached needs '
|
||||||
|
'trailing_stop_positive_offset to be more than 0 in your config.')
|
||||||
|
if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive:
|
||||||
|
raise OperationalException(
|
||||||
|
f'The config trailing_stop_positive_offset needs '
|
||||||
|
'to be greater than trailing_stop_positive_offset in your config.')
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_edge(conf: Dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
Edge and Dynamic whitelist should not both be enabled, since edge overrides dynamic whitelists.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not conf.get('edge', {}).get('enabled'):
|
||||||
|
return
|
||||||
|
|
||||||
|
if conf.get('pairlist', {}).get('method') == 'VolumePairList':
|
||||||
|
raise OperationalException(
|
||||||
|
"Edge and VolumePairList are incompatible, "
|
||||||
|
"Edge will override whatever pairs VolumePairlist selects."
|
||||||
|
)
|
@ -7,10 +7,11 @@ from argparse import Namespace
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Callable, Dict, List, Optional
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
from freqtrade import OperationalException, constants
|
from freqtrade import constants, OperationalException
|
||||||
from freqtrade.configuration.check_exchange import check_exchange
|
from freqtrade.configuration.check_exchange import check_exchange
|
||||||
from freqtrade.configuration.create_datadir import create_datadir
|
from freqtrade.configuration.create_datadir import create_datadir
|
||||||
from freqtrade.configuration.json_schema import validate_config_schema
|
from freqtrade.configuration.config_validation import (validate_config_schema,
|
||||||
|
validate_config_consistency)
|
||||||
from freqtrade.configuration.load_config import load_config_file
|
from freqtrade.configuration.load_config import load_config_file
|
||||||
from freqtrade.loggers import setup_logging
|
from freqtrade.loggers import setup_logging
|
||||||
from freqtrade.misc import deep_merge_dicts, json_load
|
from freqtrade.misc import deep_merge_dicts, json_load
|
||||||
@ -81,8 +82,6 @@ class Configuration(object):
|
|||||||
# Load all configs
|
# Load all configs
|
||||||
config: Dict[str, Any] = Configuration.from_files(self.args.config)
|
config: Dict[str, Any] = Configuration.from_files(self.args.config)
|
||||||
|
|
||||||
self._validate_config_consistency(config)
|
|
||||||
|
|
||||||
self._process_common_options(config)
|
self._process_common_options(config)
|
||||||
|
|
||||||
self._process_optimize_options(config)
|
self._process_optimize_options(config)
|
||||||
@ -96,6 +95,8 @@ class Configuration(object):
|
|||||||
|
|
||||||
self._resolve_pairs_list(config)
|
self._resolve_pairs_list(config)
|
||||||
|
|
||||||
|
validate_config_consistency(config)
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
def _process_logging_options(self, config: Dict[str, Any]) -> None:
|
def _process_logging_options(self, config: Dict[str, Any]) -> None:
|
||||||
@ -304,35 +305,6 @@ class Configuration(object):
|
|||||||
|
|
||||||
config.update({'runmode': self.runmode})
|
config.update({'runmode': self.runmode})
|
||||||
|
|
||||||
def _validate_config_consistency(self, conf: Dict[str, Any]) -> None:
|
|
||||||
"""
|
|
||||||
Validate the configuration consistency
|
|
||||||
:param conf: Config in JSON format
|
|
||||||
:return: Returns None if everything is ok, otherwise throw an OperationalException
|
|
||||||
"""
|
|
||||||
# validating trailing stoploss
|
|
||||||
self._validate_trailing_stoploss(conf)
|
|
||||||
|
|
||||||
def _validate_trailing_stoploss(self, conf: Dict[str, Any]) -> None:
|
|
||||||
|
|
||||||
# Skip if trailing stoploss is not activated
|
|
||||||
if not conf.get('trailing_stop', False):
|
|
||||||
return
|
|
||||||
|
|
||||||
tsl_positive = float(conf.get('trailing_stop_positive', 0))
|
|
||||||
tsl_offset = float(conf.get('trailing_stop_positive_offset', 0))
|
|
||||||
tsl_only_offset = conf.get('trailing_only_offset_is_reached', False)
|
|
||||||
|
|
||||||
if tsl_only_offset:
|
|
||||||
if tsl_positive == 0.0:
|
|
||||||
raise OperationalException(
|
|
||||||
f'The config trailing_only_offset_is_reached needs '
|
|
||||||
'trailing_stop_positive_offset to be more than 0 in your config.')
|
|
||||||
if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive:
|
|
||||||
raise OperationalException(
|
|
||||||
f'The config trailing_stop_positive_offset needs '
|
|
||||||
'to be greater than trailing_stop_positive_offset in your config.')
|
|
||||||
|
|
||||||
def _args_to_config(self, config: Dict[str, Any], argname: str,
|
def _args_to_config(self, config: Dict[str, Any], argname: str,
|
||||||
logstring: str, logfun: Optional[Callable] = None,
|
logstring: str, logfun: Optional[Callable] = None,
|
||||||
deprecated_msg: Optional[str] = None) -> None:
|
deprecated_msg: Optional[str] = None) -> None:
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
import logging
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
from jsonschema import Draft4Validator, validators
|
|
||||||
from jsonschema.exceptions import ValidationError, best_match
|
|
||||||
|
|
||||||
from freqtrade import constants
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _extend_validator(validator_class):
|
|
||||||
"""
|
|
||||||
Extended validator for the Freqtrade configuration JSON Schema.
|
|
||||||
Currently it only handles defaults for subschemas.
|
|
||||||
"""
|
|
||||||
validate_properties = validator_class.VALIDATORS['properties']
|
|
||||||
|
|
||||||
def set_defaults(validator, properties, instance, schema):
|
|
||||||
for prop, subschema in properties.items():
|
|
||||||
if 'default' in subschema:
|
|
||||||
instance.setdefault(prop, subschema['default'])
|
|
||||||
|
|
||||||
for error in validate_properties(
|
|
||||||
validator, properties, instance, schema,
|
|
||||||
):
|
|
||||||
yield error
|
|
||||||
|
|
||||||
return validators.extend(
|
|
||||||
validator_class, {'properties': set_defaults}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
FreqtradeValidator = _extend_validator(Draft4Validator)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Validate the configuration follow the Config Schema
|
|
||||||
:param conf: Config in JSON format
|
|
||||||
:return: Returns the config if valid, otherwise throw an exception
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
FreqtradeValidator(constants.CONF_SCHEMA).validate(conf)
|
|
||||||
return conf
|
|
||||||
except ValidationError as e:
|
|
||||||
logger.critical(
|
|
||||||
f"Invalid configuration. See config.json.example. Reason: {e}"
|
|
||||||
)
|
|
||||||
raise ValidationError(
|
|
||||||
best_match(Draft4Validator(constants.CONF_SCHEMA).iter_errors(conf)).message
|
|
||||||
)
|
|
@ -5,7 +5,6 @@ bot constants
|
|||||||
"""
|
"""
|
||||||
DEFAULT_CONFIG = 'config.json'
|
DEFAULT_CONFIG = 'config.json'
|
||||||
DEFAULT_EXCHANGE = 'bittrex'
|
DEFAULT_EXCHANGE = 'bittrex'
|
||||||
DYNAMIC_WHITELIST = 20 # pairs
|
|
||||||
PROCESS_THROTTLE_SECS = 5 # sec
|
PROCESS_THROTTLE_SECS = 5 # sec
|
||||||
DEFAULT_TICKER_INTERVAL = 5 # min
|
DEFAULT_TICKER_INTERVAL = 5 # min
|
||||||
HYPEROPT_EPOCH = 100 # epochs
|
HYPEROPT_EPOCH = 100 # epochs
|
||||||
|
@ -44,36 +44,49 @@ class DataProvider():
|
|||||||
|
|
||||||
def ohlcv(self, pair: str, ticker_interval: str = None, copy: bool = True) -> DataFrame:
|
def ohlcv(self, pair: str, ticker_interval: str = None, copy: bool = True) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
get ohlcv data for the given pair as DataFrame
|
Get ohlcv data for the given pair as DataFrame
|
||||||
Please check `available_pairs` to verify which pairs are currently cached.
|
Please use the `available_pairs` method to verify which pairs are currently cached.
|
||||||
:param pair: pair to get the data for
|
:param pair: pair to get the data for
|
||||||
:param ticker_interval: ticker_interval to get pair for
|
:param ticker_interval: ticker interval to get data for
|
||||||
:param copy: copy dataframe before returning.
|
:param copy: copy dataframe before returning if True.
|
||||||
Use false only for RO operations (where the dataframe is not modified)
|
Use False only for read-only operations (where the dataframe is not modified)
|
||||||
"""
|
"""
|
||||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||||
if ticker_interval:
|
return self._exchange.klines((pair, ticker_interval or self._config['ticker_interval']),
|
||||||
pairtick = (pair, ticker_interval)
|
copy=copy)
|
||||||
else:
|
|
||||||
pairtick = (pair, self._config['ticker_interval'])
|
|
||||||
|
|
||||||
return self._exchange.klines(pairtick, copy=copy)
|
|
||||||
else:
|
else:
|
||||||
return DataFrame()
|
return DataFrame()
|
||||||
|
|
||||||
def historic_ohlcv(self, pair: str, ticker_interval: str) -> DataFrame:
|
def historic_ohlcv(self, pair: str, ticker_interval: str = None) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
get stored historic ohlcv data
|
Get stored historic ohlcv data
|
||||||
:param pair: pair to get the data for
|
:param pair: pair to get the data for
|
||||||
:param ticker_interval: ticker_interval to get pair for
|
:param ticker_interval: ticker interval to get data for
|
||||||
"""
|
"""
|
||||||
return load_pair_history(pair=pair,
|
return load_pair_history(pair=pair,
|
||||||
ticker_interval=ticker_interval,
|
ticker_interval=ticker_interval or self._config['ticker_interval'],
|
||||||
refresh_pairs=False,
|
refresh_pairs=False,
|
||||||
datadir=Path(self._config['datadir']) if self._config.get(
|
datadir=Path(self._config['datadir']) if self._config.get(
|
||||||
'datadir') else None
|
'datadir') else None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_pair_dataframe(self, pair: str, ticker_interval: str = None) -> DataFrame:
|
||||||
|
"""
|
||||||
|
Return pair ohlcv data, either live or cached historical -- depending
|
||||||
|
on the runmode.
|
||||||
|
:param pair: pair to get the data for
|
||||||
|
:param ticker_interval: ticker interval to get data for
|
||||||
|
"""
|
||||||
|
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||||
|
# Get live ohlcv data.
|
||||||
|
data = self.ohlcv(pair=pair, ticker_interval=ticker_interval)
|
||||||
|
else:
|
||||||
|
# Get historic ohlcv data (cached on disk).
|
||||||
|
data = self.historic_ohlcv(pair=pair, ticker_interval=ticker_interval)
|
||||||
|
if len(data) == 0:
|
||||||
|
logger.warning(f"No data found for ({pair}, {ticker_interval}).")
|
||||||
|
return data
|
||||||
|
|
||||||
def ticker(self, pair: str):
|
def ticker(self, pair: str):
|
||||||
"""
|
"""
|
||||||
Return last ticker data
|
Return last ticker data
|
||||||
|
@ -43,7 +43,7 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
|
|||||||
start_index += 1
|
start_index += 1
|
||||||
|
|
||||||
if timerange.stoptype == 'line':
|
if timerange.stoptype == 'line':
|
||||||
start_index = len(tickerlist) + timerange.stopts
|
start_index = max(len(tickerlist) + timerange.stopts, 0)
|
||||||
if timerange.stoptype == 'index':
|
if timerange.stoptype == 'index':
|
||||||
stop_index = timerange.stopts
|
stop_index = timerange.stopts
|
||||||
elif timerange.stoptype == 'date':
|
elif timerange.stoptype == 'date':
|
||||||
@ -57,10 +57,8 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
|
|||||||
return tickerlist[start_index:stop_index]
|
return tickerlist[start_index:stop_index]
|
||||||
|
|
||||||
|
|
||||||
def load_tickerdata_file(
|
def load_tickerdata_file(datadir: Optional[Path], pair: str, ticker_interval: str,
|
||||||
datadir: Optional[Path], pair: str,
|
timerange: Optional[TimeRange] = None) -> Optional[list]:
|
||||||
ticker_interval: str,
|
|
||||||
timerange: Optional[TimeRange] = None) -> Optional[list]:
|
|
||||||
"""
|
"""
|
||||||
Load a pair from file, either .json.gz or .json
|
Load a pair from file, either .json.gz or .json
|
||||||
:return: tickerlist or None if unsuccesful
|
:return: tickerlist or None if unsuccesful
|
||||||
@ -68,13 +66,22 @@ def load_tickerdata_file(
|
|||||||
filename = pair_data_filename(datadir, pair, ticker_interval)
|
filename = pair_data_filename(datadir, pair, ticker_interval)
|
||||||
pairdata = misc.file_load_json(filename)
|
pairdata = misc.file_load_json(filename)
|
||||||
if not pairdata:
|
if not pairdata:
|
||||||
return None
|
return []
|
||||||
|
|
||||||
if timerange:
|
if timerange:
|
||||||
pairdata = trim_tickerlist(pairdata, timerange)
|
pairdata = trim_tickerlist(pairdata, timerange)
|
||||||
return pairdata
|
return pairdata
|
||||||
|
|
||||||
|
|
||||||
|
def store_tickerdata_file(datadir: Optional[Path], pair: str,
|
||||||
|
ticker_interval: str, data: list, is_zip: bool = False):
|
||||||
|
"""
|
||||||
|
Stores tickerdata to file
|
||||||
|
"""
|
||||||
|
filename = pair_data_filename(datadir, pair, ticker_interval)
|
||||||
|
misc.file_dump_json(filename, data, is_zip=is_zip)
|
||||||
|
|
||||||
|
|
||||||
def load_pair_history(pair: str,
|
def load_pair_history(pair: str,
|
||||||
ticker_interval: str,
|
ticker_interval: str,
|
||||||
datadir: Optional[Path],
|
datadir: Optional[Path],
|
||||||
@ -177,11 +184,14 @@ def pair_data_filename(datadir: Optional[Path], pair: str, ticker_interval: str)
|
|||||||
return filename
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def load_cached_data_for_updating(filename: Path, ticker_interval: str,
|
def load_cached_data_for_updating(datadir: Optional[Path], pair: str, ticker_interval: str,
|
||||||
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
||||||
Optional[int]]:
|
Optional[int]]:
|
||||||
"""
|
"""
|
||||||
Load cached data and choose what part of the data should be updated
|
Load cached data to download more data.
|
||||||
|
If timerange is passed in, checks wether data from an before the stored data will be downloaded.
|
||||||
|
If that's the case than what's available should be completely overwritten.
|
||||||
|
Only used by download_pair_history().
|
||||||
"""
|
"""
|
||||||
|
|
||||||
since_ms = None
|
since_ms = None
|
||||||
@ -195,12 +205,11 @@ def load_cached_data_for_updating(filename: Path, ticker_interval: str,
|
|||||||
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
||||||
|
|
||||||
# read the cached file
|
# read the cached file
|
||||||
if filename.is_file():
|
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||||
with open(filename, "rt") as file:
|
data = load_tickerdata_file(datadir, pair, ticker_interval)
|
||||||
data = misc.json_load(file)
|
# remove the last item, could be incomplete candle
|
||||||
# remove the last item, could be incomplete candle
|
if data:
|
||||||
if data:
|
data.pop()
|
||||||
data.pop()
|
|
||||||
else:
|
else:
|
||||||
data = []
|
data = []
|
||||||
|
|
||||||
@ -239,14 +248,12 @@ def download_pair_history(datadir: Optional[Path],
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
filename = pair_data_filename(datadir, pair, ticker_interval)
|
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f'Download history data for pair: "{pair}", interval: {ticker_interval} '
|
f'Download history data for pair: "{pair}", interval: {ticker_interval} '
|
||||||
f'and store in {datadir}.'
|
f'and store in {datadir}.'
|
||||||
)
|
)
|
||||||
|
|
||||||
data, since_ms = load_cached_data_for_updating(filename, ticker_interval, timerange)
|
data, since_ms = load_cached_data_for_updating(datadir, pair, ticker_interval, timerange)
|
||||||
|
|
||||||
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
|
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
|
||||||
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
|
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
|
||||||
@ -262,7 +269,7 @@ def download_pair_history(datadir: Optional[Path],
|
|||||||
logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
|
logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
|
||||||
logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
|
logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
|
||||||
|
|
||||||
misc.file_dump_json(filename, data)
|
store_tickerdata_file(datadir, pair, ticker_interval, data=data)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -408,12 +408,12 @@ class Exchange(object):
|
|||||||
except ccxt.InsufficientFunds as e:
|
except ccxt.InsufficientFunds as e:
|
||||||
raise DependencyException(
|
raise DependencyException(
|
||||||
f'Insufficient funds to create {ordertype} {side} order on market {pair}.'
|
f'Insufficient funds to create {ordertype} {side} order on market {pair}.'
|
||||||
f'Tried to {side} amount {amount} at rate {rate} (total {rate * amount}).'
|
f'Tried to {side} amount {amount} at rate {rate}.'
|
||||||
f'Message: {e}') from e
|
f'Message: {e}') from e
|
||||||
except ccxt.InvalidOrder as e:
|
except ccxt.InvalidOrder as e:
|
||||||
raise DependencyException(
|
raise DependencyException(
|
||||||
f'Could not create {ordertype} {side} order on market {pair}.'
|
f'Could not create {ordertype} {side} order on market {pair}.'
|
||||||
f'Tried to {side} amount {amount} at rate {rate} (total {rate * amount}).'
|
f'Tried to {side} amount {amount} at rate {rate}.'
|
||||||
f'Message: {e}') from e
|
f'Message: {e}') from e
|
||||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||||
raise TemporaryError(
|
raise TemporaryError(
|
||||||
@ -472,7 +472,7 @@ class Exchange(object):
|
|||||||
|
|
||||||
order = self.create_order(pair, ordertype, 'sell', amount, rate, params)
|
order = self.create_order(pair, ordertype, 'sell', amount, rate, params)
|
||||||
logger.info('stoploss limit order added for %s. '
|
logger.info('stoploss limit order added for %s. '
|
||||||
'stop price: %s. limit: %s' % (pair, stop_price, rate))
|
'stop price: %s. limit: %s', pair, stop_price, rate)
|
||||||
return order
|
return order
|
||||||
|
|
||||||
@retrier
|
@retrier
|
||||||
@ -696,8 +696,13 @@ class Exchange(object):
|
|||||||
@retrier
|
@retrier
|
||||||
def get_order(self, order_id: str, pair: str) -> Dict:
|
def get_order(self, order_id: str, pair: str) -> Dict:
|
||||||
if self._config['dry_run']:
|
if self._config['dry_run']:
|
||||||
order = self._dry_run_open_orders[order_id]
|
try:
|
||||||
return order
|
order = self._dry_run_open_orders[order_id]
|
||||||
|
return order
|
||||||
|
except KeyError as e:
|
||||||
|
# Gracefully handle errors with dry-run orders.
|
||||||
|
raise InvalidOrderException(
|
||||||
|
f'Tried to get an invalid dry-run-order (id: {order_id}). Message: {e}') from e
|
||||||
try:
|
try:
|
||||||
return self._api.fetch_order(order_id, pair)
|
return self._api.fetch_order(order_id, pair)
|
||||||
except ccxt.InvalidOrder as e:
|
except ccxt.InvalidOrder as e:
|
||||||
|
@ -16,6 +16,7 @@ from freqtrade import (DependencyException, OperationalException, InvalidOrderEx
|
|||||||
from freqtrade.data.converter import order_book_to_dataframe
|
from freqtrade.data.converter import order_book_to_dataframe
|
||||||
from freqtrade.data.dataprovider import DataProvider
|
from freqtrade.data.dataprovider import DataProvider
|
||||||
from freqtrade.edge import Edge
|
from freqtrade.edge import Edge
|
||||||
|
from freqtrade.configuration import validate_config_consistency
|
||||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date
|
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.rpc import RPCManager, RPCMessageType
|
from freqtrade.rpc import RPCManager, RPCMessageType
|
||||||
@ -51,6 +52,9 @@ class FreqtradeBot(object):
|
|||||||
|
|
||||||
self.strategy: IStrategy = StrategyResolver(self.config).strategy
|
self.strategy: IStrategy = StrategyResolver(self.config).strategy
|
||||||
|
|
||||||
|
# Check config consistency here since strategies can set certain options
|
||||||
|
validate_config_consistency(config)
|
||||||
|
|
||||||
self.rpc: RPCManager = RPCManager(self)
|
self.rpc: RPCManager = RPCManager(self)
|
||||||
|
|
||||||
self.exchange = ExchangeResolver(self.config['exchange']['name'], self.config).exchange
|
self.exchange = ExchangeResolver(self.config['exchange']['name'], self.config).exchange
|
||||||
|
@ -5,11 +5,11 @@ import gzip
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import rapidjson
|
import rapidjson
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -39,7 +39,7 @@ def datesarray_to_datetimearray(dates: np.ndarray) -> np.ndarray:
|
|||||||
return dates.dt.to_pydatetime()
|
return dates.dt.to_pydatetime()
|
||||||
|
|
||||||
|
|
||||||
def file_dump_json(filename, data, is_zip=False) -> None:
|
def file_dump_json(filename: Path, data, is_zip=False) -> None:
|
||||||
"""
|
"""
|
||||||
Dump JSON data into a file
|
Dump JSON data into a file
|
||||||
:param filename: file to create
|
:param filename: file to create
|
||||||
@ -49,8 +49,8 @@ def file_dump_json(filename, data, is_zip=False) -> None:
|
|||||||
logger.info(f'dumping json to "{filename}"')
|
logger.info(f'dumping json to "{filename}"')
|
||||||
|
|
||||||
if is_zip:
|
if is_zip:
|
||||||
if not filename.endswith('.gz'):
|
if filename.suffix != '.gz':
|
||||||
filename = filename + '.gz'
|
filename = filename.with_suffix('.gz')
|
||||||
with gzip.open(filename, 'w') as fp:
|
with gzip.open(filename, 'w') as fp:
|
||||||
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
||||||
else:
|
else:
|
||||||
|
@ -190,7 +190,7 @@ class Backtesting(object):
|
|||||||
return tabulate(tabular_data, headers=headers, # type: ignore
|
return tabulate(tabular_data, headers=headers, # type: ignore
|
||||||
floatfmt=floatfmt, tablefmt="pipe")
|
floatfmt=floatfmt, tablefmt="pipe")
|
||||||
|
|
||||||
def _store_backtest_result(self, recordfilename: str, results: DataFrame,
|
def _store_backtest_result(self, recordfilename: Path, results: DataFrame,
|
||||||
strategyname: Optional[str] = None) -> None:
|
strategyname: Optional[str] = None) -> None:
|
||||||
|
|
||||||
records = [(t.pair, t.profit_percent, t.open_time.timestamp(),
|
records = [(t.pair, t.profit_percent, t.open_time.timestamp(),
|
||||||
@ -201,10 +201,10 @@ class Backtesting(object):
|
|||||||
if records:
|
if records:
|
||||||
if strategyname:
|
if strategyname:
|
||||||
# Inject strategyname to filename
|
# Inject strategyname to filename
|
||||||
recname = Path(recordfilename)
|
recordfilename = Path.joinpath(
|
||||||
recordfilename = str(Path.joinpath(
|
recordfilename.parent,
|
||||||
recname.parent, f'{recname.stem}-{strategyname}').with_suffix(recname.suffix))
|
f'{recordfilename.stem}-{strategyname}').with_suffix(recordfilename.suffix)
|
||||||
logger.info('Dumping backtest results to %s', recordfilename)
|
logger.info(f'Dumping backtest results to {recordfilename}')
|
||||||
file_dump_json(recordfilename, records)
|
file_dump_json(recordfilename, records)
|
||||||
|
|
||||||
def _get_ticker_list(self, processed) -> Dict[str, DataFrame]:
|
def _get_ticker_list(self, processed) -> Dict[str, DataFrame]:
|
||||||
@ -458,7 +458,7 @@ class Backtesting(object):
|
|||||||
for strategy, results in all_results.items():
|
for strategy, results in all_results.items():
|
||||||
|
|
||||||
if self.config.get('export', False):
|
if self.config.get('export', False):
|
||||||
self._store_backtest_result(self.config['exportfilename'], results,
|
self._store_backtest_result(Path(self.config['exportfilename']), results,
|
||||||
strategy if len(self.strategylist) > 1 else None)
|
strategy if len(self.strategylist) > 1 else None)
|
||||||
|
|
||||||
print(f"Result for strategy {strategy}")
|
print(f"Result for strategy {strategy}")
|
||||||
|
@ -55,7 +55,6 @@ class VolumePairList(IPairList):
|
|||||||
# Generate dynamic whitelist
|
# Generate dynamic whitelist
|
||||||
self._whitelist = self._gen_pair_whitelist(
|
self._whitelist = self._gen_pair_whitelist(
|
||||||
self._config['stake_currency'], self._sort_key)[:self._number_pairs]
|
self._config['stake_currency'], self._sort_key)[:self._number_pairs]
|
||||||
logger.info(f"Searching pairs: {self._whitelist}")
|
|
||||||
|
|
||||||
@cached(TTLCache(maxsize=1, ttl=1800))
|
@cached(TTLCache(maxsize=1, ttl=1800))
|
||||||
def _gen_pair_whitelist(self, base_currency: str, key: str) -> List[str]:
|
def _gen_pair_whitelist(self, base_currency: str, key: str) -> List[str]:
|
||||||
@ -92,4 +91,6 @@ class VolumePairList(IPairList):
|
|||||||
valid_tickers.remove(t)
|
valid_tickers.remove(t)
|
||||||
|
|
||||||
pairs = [s['symbol'] for s in valid_tickers]
|
pairs = [s['symbol'] for s in valid_tickers]
|
||||||
|
logger.info(f"Searching pairs: {self._whitelist}")
|
||||||
|
|
||||||
return pairs
|
return pairs
|
||||||
|
@ -29,7 +29,8 @@ class IResolver(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Generate spec based on absolute path
|
# Generate spec based on absolute path
|
||||||
spec = importlib.util.spec_from_file_location('unknown', str(module_path))
|
# Pass object_name as first argument to have logging print a reasonable name.
|
||||||
|
spec = importlib.util.spec_from_file_location(object_name, str(module_path))
|
||||||
module = importlib.util.module_from_spec(spec)
|
module = importlib.util.module_from_spec(spec)
|
||||||
try:
|
try:
|
||||||
spec.loader.exec_module(module) # type: ignore # importlib does not use typehints
|
spec.loader.exec_module(module) # type: ignore # importlib does not use typehints
|
||||||
|
@ -13,6 +13,7 @@ def test_ohlcv(mocker, default_conf, ticker_history):
|
|||||||
exchange = get_patched_exchange(mocker, default_conf)
|
exchange = get_patched_exchange(mocker, default_conf)
|
||||||
exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history
|
exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history
|
||||||
exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history
|
exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history
|
||||||
|
|
||||||
dp = DataProvider(default_conf, exchange)
|
dp = DataProvider(default_conf, exchange)
|
||||||
assert dp.runmode == RunMode.DRY_RUN
|
assert dp.runmode == RunMode.DRY_RUN
|
||||||
assert ticker_history.equals(dp.ohlcv("UNITTEST/BTC", ticker_interval))
|
assert ticker_history.equals(dp.ohlcv("UNITTEST/BTC", ticker_interval))
|
||||||
@ -37,11 +38,9 @@ def test_ohlcv(mocker, default_conf, ticker_history):
|
|||||||
|
|
||||||
|
|
||||||
def test_historic_ohlcv(mocker, default_conf, ticker_history):
|
def test_historic_ohlcv(mocker, default_conf, ticker_history):
|
||||||
|
|
||||||
historymock = MagicMock(return_value=ticker_history)
|
historymock = MagicMock(return_value=ticker_history)
|
||||||
mocker.patch("freqtrade.data.dataprovider.load_pair_history", historymock)
|
mocker.patch("freqtrade.data.dataprovider.load_pair_history", historymock)
|
||||||
|
|
||||||
# exchange = get_patched_exchange(mocker, default_conf)
|
|
||||||
dp = DataProvider(default_conf, None)
|
dp = DataProvider(default_conf, None)
|
||||||
data = dp.historic_ohlcv("UNITTEST/BTC", "5m")
|
data = dp.historic_ohlcv("UNITTEST/BTC", "5m")
|
||||||
assert isinstance(data, DataFrame)
|
assert isinstance(data, DataFrame)
|
||||||
@ -51,14 +50,47 @@ def test_historic_ohlcv(mocker, default_conf, ticker_history):
|
|||||||
assert historymock.call_args_list[0][1]["ticker_interval"] == "5m"
|
assert historymock.call_args_list[0][1]["ticker_interval"] == "5m"
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_pair_dataframe(mocker, default_conf, ticker_history):
|
||||||
|
default_conf["runmode"] = RunMode.DRY_RUN
|
||||||
|
ticker_interval = default_conf["ticker_interval"]
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf)
|
||||||
|
exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history
|
||||||
|
exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history
|
||||||
|
|
||||||
|
dp = DataProvider(default_conf, exchange)
|
||||||
|
assert dp.runmode == RunMode.DRY_RUN
|
||||||
|
assert ticker_history.equals(dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval))
|
||||||
|
assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval), DataFrame)
|
||||||
|
assert dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval) is not ticker_history
|
||||||
|
assert not dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval).empty
|
||||||
|
assert dp.get_pair_dataframe("NONESENSE/AAA", ticker_interval).empty
|
||||||
|
|
||||||
|
# Test with and without parameter
|
||||||
|
assert dp.get_pair_dataframe("UNITTEST/BTC",
|
||||||
|
ticker_interval).equals(dp.get_pair_dataframe("UNITTEST/BTC"))
|
||||||
|
|
||||||
|
default_conf["runmode"] = RunMode.LIVE
|
||||||
|
dp = DataProvider(default_conf, exchange)
|
||||||
|
assert dp.runmode == RunMode.LIVE
|
||||||
|
assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval), DataFrame)
|
||||||
|
assert dp.get_pair_dataframe("NONESENSE/AAA", ticker_interval).empty
|
||||||
|
|
||||||
|
historymock = MagicMock(return_value=ticker_history)
|
||||||
|
mocker.patch("freqtrade.data.dataprovider.load_pair_history", historymock)
|
||||||
|
default_conf["runmode"] = RunMode.BACKTEST
|
||||||
|
dp = DataProvider(default_conf, exchange)
|
||||||
|
assert dp.runmode == RunMode.BACKTEST
|
||||||
|
assert isinstance(dp.get_pair_dataframe("UNITTEST/BTC", ticker_interval), DataFrame)
|
||||||
|
# assert dp.get_pair_dataframe("NONESENSE/AAA", ticker_interval).empty
|
||||||
|
|
||||||
|
|
||||||
def test_available_pairs(mocker, default_conf, ticker_history):
|
def test_available_pairs(mocker, default_conf, ticker_history):
|
||||||
exchange = get_patched_exchange(mocker, default_conf)
|
exchange = get_patched_exchange(mocker, default_conf)
|
||||||
|
|
||||||
ticker_interval = default_conf["ticker_interval"]
|
ticker_interval = default_conf["ticker_interval"]
|
||||||
exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history
|
exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history
|
||||||
exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history
|
exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history
|
||||||
dp = DataProvider(default_conf, exchange)
|
|
||||||
|
|
||||||
|
dp = DataProvider(default_conf, exchange)
|
||||||
assert len(dp.available_pairs) == 2
|
assert len(dp.available_pairs) == 2
|
||||||
assert dp.available_pairs == [
|
assert dp.available_pairs == [
|
||||||
("XRP/BTC", ticker_interval),
|
("XRP/BTC", ticker_interval),
|
||||||
|
@ -178,16 +178,13 @@ def test_load_cached_data_for_updating(mocker) -> None:
|
|||||||
# timeframe starts earlier than the cached data
|
# timeframe starts earlier than the cached data
|
||||||
# should fully update data
|
# should fully update data
|
||||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0)
|
timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0)
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||||
'1m',
|
|
||||||
timerange)
|
|
||||||
assert data == []
|
assert data == []
|
||||||
assert start_ts == test_data[0][0] - 1000
|
assert start_ts == test_data[0][0] - 1000
|
||||||
|
|
||||||
# same with 'line' timeframe
|
# same with 'line' timeframe
|
||||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 120
|
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 120
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m',
|
||||||
'1m',
|
|
||||||
TimeRange(None, 'line', 0, -num_lines))
|
TimeRange(None, 'line', 0, -num_lines))
|
||||||
assert data == []
|
assert data == []
|
||||||
assert start_ts < test_data[0][0] - 1
|
assert start_ts < test_data[0][0] - 1
|
||||||
@ -195,36 +192,29 @@ def test_load_cached_data_for_updating(mocker) -> None:
|
|||||||
# timeframe starts in the center of the cached data
|
# timeframe starts in the center of the cached data
|
||||||
# should return the chached data w/o the last item
|
# should return the chached data w/o the last item
|
||||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0)
|
timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0)
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||||
'1m',
|
|
||||||
timerange)
|
|
||||||
assert data == test_data[:-1]
|
assert data == test_data[:-1]
|
||||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||||
|
|
||||||
# same with 'line' timeframe
|
# same with 'line' timeframe
|
||||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 30
|
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 30
|
||||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||||
'1m',
|
|
||||||
timerange)
|
|
||||||
assert data == test_data[:-1]
|
assert data == test_data[:-1]
|
||||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||||
|
|
||||||
# timeframe starts after the chached data
|
# timeframe starts after the chached data
|
||||||
# should return the chached data w/o the last item
|
# should return the chached data w/o the last item
|
||||||
timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 1, 0)
|
timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 1, 0)
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||||
'1m',
|
|
||||||
timerange)
|
|
||||||
assert data == test_data[:-1]
|
assert data == test_data[:-1]
|
||||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||||
|
|
||||||
# same with 'line' timeframe
|
# Try loading last 30 lines.
|
||||||
|
# Not supported by load_cached_data_for_updating, we always need to get the full data.
|
||||||
num_lines = 30
|
num_lines = 30
|
||||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||||
'1m',
|
|
||||||
timerange)
|
|
||||||
assert data == test_data[:-1]
|
assert data == test_data[:-1]
|
||||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||||
|
|
||||||
@ -232,35 +222,27 @@ def test_load_cached_data_for_updating(mocker) -> None:
|
|||||||
# should return the chached data w/o the last item
|
# should return the chached data w/o the last item
|
||||||
num_lines = 30
|
num_lines = 30
|
||||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||||
'1m',
|
|
||||||
timerange)
|
|
||||||
assert data == test_data[:-1]
|
assert data == test_data[:-1]
|
||||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||||
|
|
||||||
# no datafile exist
|
# no datafile exist
|
||||||
# should return timestamp start time
|
# should return timestamp start time
|
||||||
timerange = TimeRange('date', None, now_ts - 10000, 0)
|
timerange = TimeRange('date', None, now_ts - 10000, 0)
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'),
|
data, start_ts = load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange)
|
||||||
'1m',
|
|
||||||
timerange)
|
|
||||||
assert data == []
|
assert data == []
|
||||||
assert start_ts == (now_ts - 10000) * 1000
|
assert start_ts == (now_ts - 10000) * 1000
|
||||||
|
|
||||||
# same with 'line' timeframe
|
# same with 'line' timeframe
|
||||||
num_lines = 30
|
num_lines = 30
|
||||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'),
|
data, start_ts = load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange)
|
||||||
'1m',
|
|
||||||
timerange)
|
|
||||||
assert data == []
|
assert data == []
|
||||||
assert start_ts == (now_ts - num_lines * 60) * 1000
|
assert start_ts == (now_ts - num_lines * 60) * 1000
|
||||||
|
|
||||||
# no datafile exist, no timeframe is set
|
# no datafile exist, no timeframe is set
|
||||||
# should return an empty array and None
|
# should return an empty array and None
|
||||||
data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'),
|
data, start_ts = load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', None)
|
||||||
'1m',
|
|
||||||
None)
|
|
||||||
assert data == []
|
assert data == []
|
||||||
assert start_ts is None
|
assert start_ts is None
|
||||||
|
|
||||||
|
@ -656,7 +656,13 @@ def test_buy_prod(default_conf, mocker, exchange_name):
|
|||||||
with pytest.raises(DependencyException):
|
with pytest.raises(DependencyException):
|
||||||
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
exchange.buy(pair='ETH/BTC', ordertype=order_type,
|
exchange.buy(pair='ETH/BTC', ordertype='limit',
|
||||||
|
amount=1, rate=200, time_in_force=time_in_force)
|
||||||
|
|
||||||
|
with pytest.raises(DependencyException):
|
||||||
|
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
|
exchange.buy(pair='ETH/BTC', ordertype='market',
|
||||||
amount=1, rate=200, time_in_force=time_in_force)
|
amount=1, rate=200, time_in_force=time_in_force)
|
||||||
|
|
||||||
with pytest.raises(TemporaryError):
|
with pytest.raises(TemporaryError):
|
||||||
@ -779,7 +785,13 @@ def test_sell_prod(default_conf, mocker, exchange_name):
|
|||||||
with pytest.raises(DependencyException):
|
with pytest.raises(DependencyException):
|
||||||
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
exchange.sell(pair='ETH/BTC', ordertype=order_type, amount=1, rate=200)
|
exchange.sell(pair='ETH/BTC', ordertype='limit', amount=1, rate=200)
|
||||||
|
|
||||||
|
# Market orders don't require price, so the behaviour is slightly different
|
||||||
|
with pytest.raises(DependencyException):
|
||||||
|
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
|
exchange.sell(pair='ETH/BTC', ordertype='market', amount=1, rate=200)
|
||||||
|
|
||||||
with pytest.raises(TemporaryError):
|
with pytest.raises(TemporaryError):
|
||||||
api_mock.create_order = MagicMock(side_effect=ccxt.NetworkError("No Connection"))
|
api_mock.create_order = MagicMock(side_effect=ccxt.NetworkError("No Connection"))
|
||||||
@ -1328,6 +1340,9 @@ def test_get_order(default_conf, mocker, exchange_name):
|
|||||||
print(exchange.get_order('X', 'TKN/BTC'))
|
print(exchange.get_order('X', 'TKN/BTC'))
|
||||||
assert exchange.get_order('X', 'TKN/BTC').myid == 123
|
assert exchange.get_order('X', 'TKN/BTC').myid == 123
|
||||||
|
|
||||||
|
with pytest.raises(InvalidOrderException, match=r'Tried to get an invalid dry-run-order.*'):
|
||||||
|
exchange.get_order('Y', 'TKN/BTC')
|
||||||
|
|
||||||
default_conf['dry_run'] = False
|
default_conf['dry_run'] = False
|
||||||
api_mock = MagicMock()
|
api_mock = MagicMock()
|
||||||
api_mock.fetch_order = MagicMock(return_value=456)
|
api_mock.fetch_order = MagicMock(return_value=456)
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import math
|
import math
|
||||||
import random
|
import random
|
||||||
|
from pathlib import Path
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@ -785,10 +786,10 @@ def test_backtest_record(default_conf, fee, mocker):
|
|||||||
# reset test to test with strategy name
|
# reset test to test with strategy name
|
||||||
names = []
|
names = []
|
||||||
records = []
|
records = []
|
||||||
backtesting._store_backtest_result("backtest-result.json", results, "DefStrat")
|
backtesting._store_backtest_result(Path("backtest-result.json"), results, "DefStrat")
|
||||||
assert len(results) == 4
|
assert len(results) == 4
|
||||||
# Assert file_dump_json was only called once
|
# Assert file_dump_json was only called once
|
||||||
assert names == ['backtest-result-DefStrat.json']
|
assert names == [Path('backtest-result-DefStrat.json')]
|
||||||
records = records[0]
|
records = records[0]
|
||||||
# Ensure records are of correct type
|
# Ensure records are of correct type
|
||||||
assert len(records) == 4
|
assert len(records) == 4
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import warnings
|
import warnings
|
||||||
from argparse import Namespace
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
@ -11,10 +10,10 @@ import pytest
|
|||||||
from jsonschema import Draft4Validator, ValidationError, validate
|
from jsonschema import Draft4Validator, ValidationError, validate
|
||||||
|
|
||||||
from freqtrade import OperationalException, constants
|
from freqtrade import OperationalException, constants
|
||||||
from freqtrade.configuration import Arguments, Configuration
|
from freqtrade.configuration import Arguments, Configuration, validate_config_consistency
|
||||||
from freqtrade.configuration.check_exchange import check_exchange
|
from freqtrade.configuration.check_exchange import check_exchange
|
||||||
|
from freqtrade.configuration.config_validation import validate_config_schema
|
||||||
from freqtrade.configuration.create_datadir import create_datadir
|
from freqtrade.configuration.create_datadir import create_datadir
|
||||||
from freqtrade.configuration.json_schema import validate_config_schema
|
|
||||||
from freqtrade.configuration.load_config import load_config_file
|
from freqtrade.configuration.load_config import load_config_file
|
||||||
from freqtrade.constants import DEFAULT_DB_DRYRUN_URL, DEFAULT_DB_PROD_URL
|
from freqtrade.constants import DEFAULT_DB_DRYRUN_URL, DEFAULT_DB_PROD_URL
|
||||||
from freqtrade.loggers import _set_loggers
|
from freqtrade.loggers import _set_loggers
|
||||||
@ -625,21 +624,34 @@ def test_validate_tsl(default_conf):
|
|||||||
with pytest.raises(OperationalException,
|
with pytest.raises(OperationalException,
|
||||||
match=r'The config trailing_only_offset_is_reached needs '
|
match=r'The config trailing_only_offset_is_reached needs '
|
||||||
'trailing_stop_positive_offset to be more than 0 in your config.'):
|
'trailing_stop_positive_offset to be more than 0 in your config.'):
|
||||||
configuration = Configuration(Namespace())
|
validate_config_consistency(default_conf)
|
||||||
configuration._validate_config_consistency(default_conf)
|
|
||||||
|
|
||||||
default_conf['trailing_stop_positive_offset'] = 0.01
|
default_conf['trailing_stop_positive_offset'] = 0.01
|
||||||
default_conf['trailing_stop_positive'] = 0.015
|
default_conf['trailing_stop_positive'] = 0.015
|
||||||
with pytest.raises(OperationalException,
|
with pytest.raises(OperationalException,
|
||||||
match=r'The config trailing_stop_positive_offset needs '
|
match=r'The config trailing_stop_positive_offset needs '
|
||||||
'to be greater than trailing_stop_positive_offset in your config.'):
|
'to be greater than trailing_stop_positive_offset in your config.'):
|
||||||
configuration = Configuration(Namespace())
|
validate_config_consistency(default_conf)
|
||||||
configuration._validate_config_consistency(default_conf)
|
|
||||||
|
|
||||||
default_conf['trailing_stop_positive'] = 0.01
|
default_conf['trailing_stop_positive'] = 0.01
|
||||||
default_conf['trailing_stop_positive_offset'] = 0.015
|
default_conf['trailing_stop_positive_offset'] = 0.015
|
||||||
Configuration(Namespace())
|
validate_config_consistency(default_conf)
|
||||||
configuration._validate_config_consistency(default_conf)
|
|
||||||
|
|
||||||
|
def test_validate_edge(edge_conf):
|
||||||
|
edge_conf.update({"pairlist": {
|
||||||
|
"method": "VolumePairList",
|
||||||
|
}})
|
||||||
|
|
||||||
|
with pytest.raises(OperationalException,
|
||||||
|
match="Edge and VolumePairList are incompatible, "
|
||||||
|
"Edge will override whatever pairs VolumePairlist selects."):
|
||||||
|
validate_config_consistency(edge_conf)
|
||||||
|
|
||||||
|
edge_conf.update({"pairlist": {
|
||||||
|
"method": "StaticPairList",
|
||||||
|
}})
|
||||||
|
validate_config_consistency(edge_conf)
|
||||||
|
|
||||||
|
|
||||||
def test_load_config_test_comments() -> None:
|
def test_load_config_test_comments() -> None:
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# pragma pylint: disable=missing-docstring,C0103
|
# pragma pylint: disable=missing-docstring,C0103
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
from pathlib import Path
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
from freqtrade.data.converter import parse_ticker_dataframe
|
from freqtrade.data.converter import parse_ticker_dataframe
|
||||||
@ -34,12 +35,12 @@ def test_datesarray_to_datetimearray(ticker_history_list):
|
|||||||
def test_file_dump_json(mocker) -> None:
|
def test_file_dump_json(mocker) -> None:
|
||||||
file_open = mocker.patch('freqtrade.misc.open', MagicMock())
|
file_open = mocker.patch('freqtrade.misc.open', MagicMock())
|
||||||
json_dump = mocker.patch('rapidjson.dump', MagicMock())
|
json_dump = mocker.patch('rapidjson.dump', MagicMock())
|
||||||
file_dump_json('somefile', [1, 2, 3])
|
file_dump_json(Path('somefile'), [1, 2, 3])
|
||||||
assert file_open.call_count == 1
|
assert file_open.call_count == 1
|
||||||
assert json_dump.call_count == 1
|
assert json_dump.call_count == 1
|
||||||
file_open = mocker.patch('freqtrade.misc.gzip.open', MagicMock())
|
file_open = mocker.patch('freqtrade.misc.gzip.open', MagicMock())
|
||||||
json_dump = mocker.patch('rapidjson.dump', MagicMock())
|
json_dump = mocker.patch('rapidjson.dump', MagicMock())
|
||||||
file_dump_json('somefile', [1, 2, 3], True)
|
file_dump_json(Path('somefile'), [1, 2, 3], True)
|
||||||
assert file_open.call_count == 1
|
assert file_open.call_count == 1
|
||||||
assert json_dump.call_count == 1
|
assert json_dump.call_count == 1
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# requirements without requirements installable via conda
|
# requirements without requirements installable via conda
|
||||||
# mainly used for Raspberry pi installs
|
# mainly used for Raspberry pi installs
|
||||||
ccxt==1.18.1043
|
ccxt==1.18.1068
|
||||||
SQLAlchemy==1.3.6
|
SQLAlchemy==1.3.7
|
||||||
python-telegram-bot==11.1.0
|
python-telegram-bot==11.1.0
|
||||||
arrow==0.14.5
|
arrow==0.14.5
|
||||||
cachetools==3.1.1
|
cachetools==3.1.1
|
||||||
|
@ -7,7 +7,7 @@ flake8==3.7.8
|
|||||||
flake8-type-annotations==0.1.0
|
flake8-type-annotations==0.1.0
|
||||||
flake8-tidy-imports==2.0.0
|
flake8-tidy-imports==2.0.0
|
||||||
mypy==0.720
|
mypy==0.720
|
||||||
pytest==5.0.1
|
pytest==5.1.0
|
||||||
pytest-asyncio==0.10.0
|
pytest-asyncio==0.10.0
|
||||||
pytest-cov==2.7.1
|
pytest-cov==2.7.1
|
||||||
pytest-mock==1.10.4
|
pytest-mock==1.10.4
|
||||||
|
Loading…
Reference in New Issue
Block a user