Merge pull request #2719 from xmatthias/data_handler
Introduce Data handler
This commit is contained in:
commit
f2f2c281c0
@ -129,5 +129,7 @@
|
||||
"heartbeat_interval": 60
|
||||
},
|
||||
"strategy": "DefaultStrategy",
|
||||
"strategy_path": "user_data/strategies/"
|
||||
"strategy_path": "user_data/strategies/",
|
||||
"dataformat_ohlcv": "json",
|
||||
"dataformat_trades": "jsongz"
|
||||
}
|
||||
|
@ -111,6 +111,8 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
||||
| `internals.sd_notify` | Enables use of the sd_notify protocol to tell systemd service manager about changes in the bot state and issue keep-alive pings. See [here](installation.md#7-optional-configure-freqtrade-as-a-systemd-service) for more details. <br> **Datatype:** Boolean
|
||||
| `logfile` | Specifies logfile name. Uses a rolling strategy for log file rotation for 10 files with the 1MB limit per file. <br> **Datatype:** String
|
||||
| `user_data_dir` | Directory containing user data. <br> *Defaults to `./user_data/`*. <br> **Datatype:** String
|
||||
| `dataformat_ohlcv` | Data format to use to store OHLCV historic data. <br> *Defaults to `json`*. <br> **Datatype:** String
|
||||
| `dataformat_trades` | Data format to use to store trades historic data. <br> *Defaults to `jsongz`*. <br> **Datatype:** String
|
||||
|
||||
### Parameters in the strategy
|
||||
|
||||
|
@ -12,6 +12,152 @@ Otherwise `--exchange` becomes mandatory.
|
||||
If you already have backtesting data available in your data-directory and would like to refresh this data up to today, use `--days xx` with a number slightly higher than the missing number of days. Freqtrade will keep the available data and only download the missing data.
|
||||
Be carefull though: If the number is too small (which would result in a few missing days), the whole dataset will be removed and only xx days will be downloaded.
|
||||
|
||||
### Usage
|
||||
|
||||
```
|
||||
usage: freqtrade download-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH] [--userdir PATH] [-p PAIRS [PAIRS ...]]
|
||||
[--pairs-file FILE] [--days INT] [--dl-trades] [--exchange EXCHANGE]
|
||||
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]]
|
||||
[--erase] [--data-format-ohlcv {json,jsongz}] [--data-format-trades {json,jsongz}]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
||||
Show profits for only these pairs. Pairs are space-separated.
|
||||
--pairs-file FILE File containing a list of pairs to download.
|
||||
--days INT Download data for given number of days.
|
||||
--dl-trades Download trades instead of OHLCV data. The bot will resample trades to the desired timeframe as specified as
|
||||
--timeframes/-t.
|
||||
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no config is provided.
|
||||
-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...], --timeframes {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]
|
||||
Specify which tickers to download. Space-separated list. Default: `1m 5m`.
|
||||
--erase Clean all existing data for the selected exchange/pairs/timeframes.
|
||||
--data-format-ohlcv {json,jsongz}
|
||||
Storage format for downloaded ohlcv data. (default: `json`).
|
||||
--data-format-trades {json,jsongz}
|
||||
Storage format for downloaded trades data. (default: `jsongz`).
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE Log to the file specified. Special values are: 'syslog', 'journald'. See the documentation for more details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default: `config.json`). Multiple --config options may be used. Can be set to `-`
|
||||
to read config from stdin.
|
||||
-d PATH, --datadir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
```
|
||||
|
||||
### Data format
|
||||
|
||||
Freqtrade currently supports 2 dataformats, `json` (plain "text" json files) and `jsongz` (a gzipped version of json files).
|
||||
By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data.
|
||||
|
||||
This can be changed via the `--data-format-ohlcv` and `--data-format-trades` parameters respectivly.
|
||||
|
||||
If the default dataformat has been changed during download, then the keys `dataformat_ohlcv` and `dataformat_trades` in the configuration file need to be adjusted to the selected dataformat as well.
|
||||
|
||||
!!! Note
|
||||
You can convert between data-formats using the [convert-data](#subcommand-convert-data) and [convert-trade-data](#subcommand-convert-trade-data) methods.
|
||||
|
||||
#### Subcommand convert data
|
||||
|
||||
```
|
||||
usage: freqtrade convert-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[-d PATH] [--userdir PATH]
|
||||
[-p PAIRS [PAIRS ...]] --format-from
|
||||
{json,jsongz} --format-to {json,jsongz}
|
||||
[--erase]
|
||||
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
||||
Show profits for only these pairs. Pairs are space-
|
||||
separated.
|
||||
--format-from {json,jsongz}
|
||||
Source format for data conversion.
|
||||
--format-to {json,jsongz}
|
||||
Destination format for data conversion.
|
||||
--erase Clean all existing data for the selected
|
||||
exchange/pairs/timeframes.
|
||||
-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...], --timeframes {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]
|
||||
Specify which tickers to download. Space-separated
|
||||
list. Default: `1m 5m`.
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE Log to the file specified. Special values are:
|
||||
'syslog', 'journald'. See the documentation for more
|
||||
details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default: `config.json`).
|
||||
Multiple --config options may be used. Can be set to
|
||||
`-` to read config from stdin.
|
||||
-d PATH, --datadir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
```
|
||||
|
||||
##### Example converting data
|
||||
|
||||
The following command will convert all ohlcv (candle) data available in `~/.freqtrade/data/binance` from json to jsongz, saving diskspace in the process.
|
||||
It'll also remove original json data files (`--erase` parameter).
|
||||
|
||||
``` bash
|
||||
freqtrade convert-data --format-from json --format-to jsongz --data-dir ~/.freqtrade/data/binance -t 5m 15m --erase
|
||||
```
|
||||
|
||||
#### Subcommand convert-trade data
|
||||
|
||||
```
|
||||
usage: freqtrade convert-trade-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[-d PATH] [--userdir PATH]
|
||||
[-p PAIRS [PAIRS ...]] --format-from
|
||||
{json,jsongz} --format-to {json,jsongz}
|
||||
[--erase]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
||||
Show profits for only these pairs. Pairs are space-
|
||||
separated.
|
||||
--format-from {json,jsongz}
|
||||
Source format for data conversion.
|
||||
--format-to {json,jsongz}
|
||||
Destination format for data conversion.
|
||||
--erase Clean all existing data for the selected
|
||||
exchange/pairs/timeframes.
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE Log to the file specified. Special values are:
|
||||
'syslog', 'journald'. See the documentation for more
|
||||
details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default: `config.json`).
|
||||
Multiple --config options may be used. Can be set to
|
||||
`-` to read config from stdin.
|
||||
-d PATH, --datadir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
```
|
||||
|
||||
##### Example converting trades
|
||||
|
||||
The following command will convert all available trade-data in `~/.freqtrade/data/kraken` from jsongz to json.
|
||||
It'll also remove original jsongz data files (`--erase` parameter).
|
||||
|
||||
``` bash
|
||||
freqtrade convert-trade-data --format-from jsongz --format-to json --data-dir ~/.freqtrade/data/kraken --erase
|
||||
```
|
||||
|
||||
### Pairs file
|
||||
|
||||
In alternative to the whitelist from `config.json`, a `pairs.json` file can be used.
|
||||
|
@ -8,7 +8,8 @@ Note: Be careful with file-scoped imports in these subfiles.
|
||||
"""
|
||||
from freqtrade.commands.arguments import Arguments
|
||||
from freqtrade.commands.build_config_commands import start_new_config
|
||||
from freqtrade.commands.data_commands import start_download_data
|
||||
from freqtrade.commands.data_commands import (start_convert_data,
|
||||
start_download_data)
|
||||
from freqtrade.commands.deploy_commands import (start_create_userdir,
|
||||
start_new_hyperopt,
|
||||
start_new_strategy)
|
||||
|
@ -51,8 +51,11 @@ ARGS_BUILD_STRATEGY = ["user_data_dir", "strategy", "template"]
|
||||
|
||||
ARGS_BUILD_HYPEROPT = ["user_data_dir", "hyperopt", "template"]
|
||||
|
||||
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase"]
|
||||
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes"]
|
||||
|
||||
ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "download_trades", "exchange",
|
||||
"timeframes", "erase"]
|
||||
"timeframes", "erase", "dataformat_ohlcv", "dataformat_trades"]
|
||||
|
||||
ARGS_PLOT_DATAFRAME = ["pairs", "indicators1", "indicators2", "plot_limit",
|
||||
"db_url", "trade_source", "export", "exportfilename",
|
||||
@ -71,8 +74,9 @@ ARGS_HYPEROPT_LIST = ["hyperopt_list_best", "hyperopt_list_profitable",
|
||||
ARGS_HYPEROPT_SHOW = ["hyperopt_list_best", "hyperopt_list_profitable", "hyperopt_show_index",
|
||||
"print_json", "hyperopt_show_no_header"]
|
||||
|
||||
NO_CONF_REQURIED = ["download-data", "list-timeframes", "list-markets", "list-pairs",
|
||||
"list-strategies", "list-hyperopts", "hyperopt-list", "hyperopt-show",
|
||||
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
||||
"list-markets", "list-pairs", "list-strategies",
|
||||
"list-hyperopts", "hyperopt-list", "hyperopt-show",
|
||||
"plot-dataframe", "plot-profit"]
|
||||
|
||||
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-hyperopt", "new-strategy"]
|
||||
@ -151,7 +155,8 @@ class Arguments:
|
||||
self.parser = argparse.ArgumentParser(description='Free, open source crypto trading bot')
|
||||
self._build_args(optionlist=['version'], parser=self.parser)
|
||||
|
||||
from freqtrade.commands import (start_create_userdir, start_download_data,
|
||||
from freqtrade.commands import (start_create_userdir, start_convert_data,
|
||||
start_download_data,
|
||||
start_hyperopt_list, start_hyperopt_show,
|
||||
start_list_exchanges, start_list_hyperopts,
|
||||
start_list_markets, start_list_strategies,
|
||||
@ -288,6 +293,24 @@ class Arguments:
|
||||
download_data_cmd.set_defaults(func=start_download_data)
|
||||
self._build_args(optionlist=ARGS_DOWNLOAD_DATA, parser=download_data_cmd)
|
||||
|
||||
# Add convert-data subcommand
|
||||
convert_data_cmd = subparsers.add_parser(
|
||||
'convert-data',
|
||||
help='Convert OHLCV data from one format to another.',
|
||||
parents=[_common_parser],
|
||||
)
|
||||
convert_data_cmd.set_defaults(func=partial(start_convert_data, ohlcv=True))
|
||||
self._build_args(optionlist=ARGS_CONVERT_DATA_OHLCV, parser=convert_data_cmd)
|
||||
|
||||
# Add convert-trade-data subcommand
|
||||
convert_trade_data_cmd = subparsers.add_parser(
|
||||
'convert-trade-data',
|
||||
help='Convert trade-data from one format to another.',
|
||||
parents=[_common_parser],
|
||||
)
|
||||
convert_trade_data_cmd.set_defaults(func=partial(start_convert_data, ohlcv=False))
|
||||
self._build_args(optionlist=ARGS_CONVERT_DATA, parser=convert_trade_data_cmd)
|
||||
|
||||
# Add Plotting subcommand
|
||||
plot_dataframe_cmd = subparsers.add_parser(
|
||||
'plot-dataframe',
|
||||
|
@ -333,6 +333,30 @@ AVAILABLE_CLI_OPTIONS = {
|
||||
'desired timeframe as specified as --timeframes/-t.',
|
||||
action='store_true',
|
||||
),
|
||||
"format_from": Arg(
|
||||
'--format-from',
|
||||
help='Source format for data conversion.',
|
||||
choices=constants.AVAILABLE_DATAHANDLERS,
|
||||
required=True,
|
||||
),
|
||||
"format_to": Arg(
|
||||
'--format-to',
|
||||
help='Destination format for data conversion.',
|
||||
choices=constants.AVAILABLE_DATAHANDLERS,
|
||||
required=True,
|
||||
),
|
||||
"dataformat_ohlcv": Arg(
|
||||
'--data-format-ohlcv',
|
||||
help='Storage format for downloaded ohlcv data. (default: `%(default)s`).',
|
||||
choices=constants.AVAILABLE_DATAHANDLERS,
|
||||
default='json'
|
||||
),
|
||||
"dataformat_trades": Arg(
|
||||
'--data-format-trades',
|
||||
help='Storage format for downloaded trades data. (default: `%(default)s`).',
|
||||
choices=constants.AVAILABLE_DATAHANDLERS,
|
||||
default='jsongz'
|
||||
),
|
||||
"exchange": Arg(
|
||||
'--exchange',
|
||||
help=f'Exchange name (default: `{constants.DEFAULT_EXCHANGE}`). '
|
||||
|
@ -5,6 +5,8 @@ from typing import Any, Dict, List
|
||||
import arrow
|
||||
|
||||
from freqtrade.configuration import TimeRange, setup_utils_configuration
|
||||
from freqtrade.data.converter import (convert_ohlcv_format,
|
||||
convert_trades_format)
|
||||
from freqtrade.data.history import (convert_trades_to_ohlcv,
|
||||
refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data)
|
||||
@ -48,18 +50,21 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
if config.get('download_trades'):
|
||||
pairs_not_available = refresh_backtest_trades_data(
|
||||
exchange, pairs=config["pairs"], datadir=config['datadir'],
|
||||
timerange=timerange, erase=bool(config.get("erase")))
|
||||
timerange=timerange, erase=bool(config.get("erase")),
|
||||
data_format=config['dataformat_trades'])
|
||||
|
||||
# Convert downloaded trade data to different timeframes
|
||||
convert_trades_to_ohlcv(
|
||||
pairs=config["pairs"], timeframes=config["timeframes"],
|
||||
datadir=config['datadir'], timerange=timerange,
|
||||
erase=bool(config.get("erase")))
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get("erase")),
|
||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||
data_format_trades=config['dataformat_trades'],
|
||||
)
|
||||
else:
|
||||
pairs_not_available = refresh_backtest_ohlcv_data(
|
||||
exchange, pairs=config["pairs"], timeframes=config["timeframes"],
|
||||
datadir=config['datadir'], timerange=timerange,
|
||||
erase=bool(config.get("erase")))
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get("erase")),
|
||||
data_format=config['dataformat_ohlcv'])
|
||||
|
||||
except KeyboardInterrupt:
|
||||
sys.exit("SIGINT received, aborting ...")
|
||||
@ -68,3 +73,18 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
||||
if pairs_not_available:
|
||||
logger.info(f"Pairs [{','.join(pairs_not_available)}] not available "
|
||||
f"on exchange {exchange.name}.")
|
||||
|
||||
|
||||
def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
|
||||
"""
|
||||
Convert data from one format to another
|
||||
"""
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
if ohlcv:
|
||||
convert_ohlcv_format(config,
|
||||
convert_from=args['format_from'], convert_to=args['format_to'],
|
||||
erase=args['erase'])
|
||||
else:
|
||||
convert_trades_format(config,
|
||||
convert_from=args['format_from'], convert_to=args['format_to'],
|
||||
erase=args['erase'])
|
||||
|
@ -364,9 +364,16 @@ class Configuration:
|
||||
|
||||
self._args_to_config(config, argname='days',
|
||||
logstring='Detected --days: {}')
|
||||
|
||||
self._args_to_config(config, argname='download_trades',
|
||||
logstring='Detected --dl-trades: {}')
|
||||
|
||||
self._args_to_config(config, argname='dataformat_ohlcv',
|
||||
logstring='Using "{}" to store OHLCV data.')
|
||||
|
||||
self._args_to_config(config, argname='dataformat_trades',
|
||||
logstring='Using "{}" to store trades data.')
|
||||
|
||||
def _process_runmode(self, config: Dict[str, Any]) -> None:
|
||||
|
||||
if not self.runmode:
|
||||
|
@ -19,8 +19,10 @@ ORDERTYPE_POSSIBILITIES = ['limit', 'market']
|
||||
ORDERTIF_POSSIBILITIES = ['gtc', 'fok', 'ioc']
|
||||
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList',
|
||||
'PrecisionFilter', 'PriceFilter', 'SpreadFilter']
|
||||
AVAILABLE_DATAHANDLERS = ['json', 'jsongz']
|
||||
DRY_RUN_WALLET = 1000
|
||||
MATH_CLOSE_PREC = 1e-14 # Precision used for float comparisons
|
||||
DEFAULT_DATAFRAME_COLUMNS = ['date', 'open', 'high', 'low', 'close', 'volume']
|
||||
|
||||
USERPATH_HYPEROPTS = 'hyperopts'
|
||||
USERPATH_STRATEGIES = 'strategies'
|
||||
@ -217,11 +219,22 @@ CONF_SCHEMA = {
|
||||
'forcebuy_enable': {'type': 'boolean'},
|
||||
'internals': {
|
||||
'type': 'object',
|
||||
'default': {},
|
||||
'properties': {
|
||||
'process_throttle_secs': {'type': 'integer'},
|
||||
'interval': {'type': 'integer'},
|
||||
'sd_notify': {'type': 'boolean'},
|
||||
}
|
||||
},
|
||||
'dataformat_ohlcv': {
|
||||
'type': 'string',
|
||||
'enum': AVAILABLE_DATAHANDLERS,
|
||||
'default': 'json'
|
||||
},
|
||||
'dataformat_trades': {
|
||||
'type': 'string',
|
||||
'enum': AVAILABLE_DATAHANDLERS,
|
||||
'default': 'jsongz'
|
||||
}
|
||||
},
|
||||
'definitions': {
|
||||
@ -292,9 +305,14 @@ SCHEMA_TRADE_REQUIRED = [
|
||||
'unfilledtimeout',
|
||||
'stoploss',
|
||||
'minimal_roi',
|
||||
'internals',
|
||||
'dataformat_ohlcv',
|
||||
'dataformat_trades',
|
||||
]
|
||||
|
||||
SCHEMA_MINIMAL_REQUIRED = [
|
||||
'exchange',
|
||||
'dry_run',
|
||||
'dataformat_ohlcv',
|
||||
'dataformat_trades',
|
||||
]
|
||||
|
@ -2,10 +2,13 @@
|
||||
Functions to convert data from one format to another
|
||||
"""
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict
|
||||
|
||||
import pandas as pd
|
||||
from pandas import DataFrame, to_datetime
|
||||
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -24,7 +27,7 @@ def parse_ticker_dataframe(ticker: list, timeframe: str, pair: str, *,
|
||||
:return: DataFrame
|
||||
"""
|
||||
logger.debug("Parsing tickerlist to dataframe")
|
||||
cols = ['date', 'open', 'high', 'low', 'close', 'volume']
|
||||
cols = DEFAULT_DATAFRAME_COLUMNS
|
||||
frame = DataFrame(ticker, columns=cols)
|
||||
|
||||
frame['date'] = to_datetime(frame['date'],
|
||||
@ -37,9 +40,29 @@ def parse_ticker_dataframe(ticker: list, timeframe: str, pair: str, *,
|
||||
# and fail with exception...
|
||||
frame = frame.astype(dtype={'open': 'float', 'high': 'float', 'low': 'float', 'close': 'float',
|
||||
'volume': 'float'})
|
||||
return clean_ohlcv_dataframe(frame, timeframe, pair,
|
||||
fill_missing=fill_missing,
|
||||
drop_incomplete=drop_incomplete)
|
||||
|
||||
|
||||
def clean_ohlcv_dataframe(data: DataFrame, timeframe: str, pair: str, *,
|
||||
fill_missing: bool = True,
|
||||
drop_incomplete: bool = True) -> DataFrame:
|
||||
"""
|
||||
Clense a ohlcv dataframe by
|
||||
* Grouping it by date (removes duplicate tics)
|
||||
* dropping last candles if requested
|
||||
* Filling up missing data (if requested)
|
||||
:param data: DataFrame containing ohlcv data.
|
||||
:param timeframe: timeframe (e.g. 5m). Used to fill up eventual missing data
|
||||
:param pair: Pair this data is for (used to warn if fillup was necessary)
|
||||
:param fill_missing: fill up missing candles with 0 candles
|
||||
(see ohlcv_fill_up_missing_data for details)
|
||||
:param drop_incomplete: Drop the last candle of the dataframe, assuming it's incomplete
|
||||
:return: DataFrame
|
||||
"""
|
||||
# group by index and aggregate results to eliminate duplicate ticks
|
||||
frame = frame.groupby(by='date', as_index=False, sort=True).agg({
|
||||
data = data.groupby(by='date', as_index=False, sort=True).agg({
|
||||
'open': 'first',
|
||||
'high': 'max',
|
||||
'low': 'min',
|
||||
@ -48,13 +71,13 @@ def parse_ticker_dataframe(ticker: list, timeframe: str, pair: str, *,
|
||||
})
|
||||
# eliminate partial candle
|
||||
if drop_incomplete:
|
||||
frame.drop(frame.tail(1).index, inplace=True)
|
||||
data.drop(data.tail(1).index, inplace=True)
|
||||
logger.debug('Dropping last candle')
|
||||
|
||||
if fill_missing:
|
||||
return ohlcv_fill_up_missing_data(frame, timeframe, pair)
|
||||
return ohlcv_fill_up_missing_data(data, timeframe, pair)
|
||||
else:
|
||||
return frame
|
||||
return data
|
||||
|
||||
|
||||
def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str) -> DataFrame:
|
||||
@ -92,8 +115,26 @@ def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str)
|
||||
return df
|
||||
|
||||
|
||||
def trim_dataframe(df: DataFrame, timerange, df_date_col: str = 'date') -> DataFrame:
|
||||
"""
|
||||
Trim dataframe based on given timerange
|
||||
:param df: Dataframe to trim
|
||||
:param timerange: timerange (use start and end date if available)
|
||||
:param: df_date_col: Column in the dataframe to use as Date column
|
||||
:return: trimmed dataframe
|
||||
"""
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
df = df.loc[df[df_date_col] >= start, :]
|
||||
if timerange.stoptype == 'date':
|
||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
df = df.loc[df[df_date_col] <= stop, :]
|
||||
return df
|
||||
|
||||
|
||||
def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
|
||||
"""
|
||||
TODO: This should get a dedicated test
|
||||
Gets order book list, returns dataframe with below format per suggested by creslin
|
||||
-------------------------------------------------------------------
|
||||
b_sum b_size bids asks a_size a_sum
|
||||
@ -116,12 +157,13 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
|
||||
return frame
|
||||
|
||||
|
||||
def trades_to_ohlcv(trades: list, timeframe: str) -> list:
|
||||
def trades_to_ohlcv(trades: list, timeframe: str) -> DataFrame:
|
||||
"""
|
||||
Converts trades list to ohlcv list
|
||||
TODO: This should get a dedicated test
|
||||
:param trades: List of trades, as returned by ccxt.fetch_trades.
|
||||
:param timeframe: Ticker timeframe to resample data to
|
||||
:return: ohlcv timeframe as list (as returned by ccxt.fetch_ohlcv)
|
||||
:return: ohlcv Dataframe.
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
ticker_minutes = timeframe_to_minutes(timeframe)
|
||||
@ -131,8 +173,68 @@ def trades_to_ohlcv(trades: list, timeframe: str) -> list:
|
||||
|
||||
df_new = df['price'].resample(f'{ticker_minutes}min').ohlc()
|
||||
df_new['volume'] = df['amount'].resample(f'{ticker_minutes}min').sum()
|
||||
df_new['date'] = df_new.index.astype("int64") // 10 ** 6
|
||||
df_new['date'] = df_new.index
|
||||
# Drop 0 volume rows
|
||||
df_new = df_new.dropna()
|
||||
columns = ["date", "open", "high", "low", "close", "volume"]
|
||||
return list(zip(*[df_new[x].values.tolist() for x in columns]))
|
||||
return df_new[DEFAULT_DATAFRAME_COLUMNS]
|
||||
|
||||
|
||||
def convert_trades_format(config: Dict[str, Any], convert_from: str, convert_to: str, erase: bool):
|
||||
"""
|
||||
Convert trades from one format to another format.
|
||||
:param config: Config dictionary
|
||||
:param convert_from: Source format
|
||||
:param convert_to: Target format
|
||||
:param erase: Erase souce data (does not apply if source and target format are identical)
|
||||
"""
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
|
||||
if 'pairs' not in config:
|
||||
config['pairs'] = src.trades_get_pairs(config['datadir'])
|
||||
logger.info(f"Converting trades for {config['pairs']}")
|
||||
|
||||
for pair in config['pairs']:
|
||||
data = src.trades_load(pair=pair)
|
||||
logger.info(f"Converting {len(data)} trades for {pair}")
|
||||
trg.trades_store(pair, data)
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source Trade data for {pair}.")
|
||||
src.trades_purge(pair=pair)
|
||||
|
||||
|
||||
def convert_ohlcv_format(config: Dict[str, Any], convert_from: str, convert_to: str, erase: bool):
|
||||
"""
|
||||
Convert ohlcv from one format to another format.
|
||||
:param config: Config dictionary
|
||||
:param convert_from: Source format
|
||||
:param convert_to: Target format
|
||||
:param erase: Erase souce data (does not apply if source and target format are identical)
|
||||
"""
|
||||
from freqtrade.data.history.idatahandler import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
timeframes = config.get('timeframes', [config.get('ticker_interval')])
|
||||
logger.info(f"Converting OHLCV for timeframe {timeframes}")
|
||||
|
||||
if 'pairs' not in config:
|
||||
config['pairs'] = []
|
||||
# Check timeframes or fall back to ticker_interval.
|
||||
for timeframe in timeframes:
|
||||
config['pairs'].extend(src.ohlcv_get_pairs(config['datadir'],
|
||||
timeframe))
|
||||
logger.info(f"Converting OHLCV for {config['pairs']}")
|
||||
|
||||
for timeframe in timeframes:
|
||||
for pair in config['pairs']:
|
||||
data = src.ohlcv_load(pair=pair, timeframe=timeframe,
|
||||
timerange=None,
|
||||
fill_missing=False,
|
||||
drop_incomplete=False,
|
||||
startup_candles=0)
|
||||
logger.info(f"Converting {len(data)} candles for {pair}")
|
||||
trg.ohlcv_store(pair=pair, timeframe=timeframe, data=data)
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
||||
src.ohlcv_purge(pair=pair, timeframe=timeframe)
|
||||
|
14
freqtrade/data/history/__init__.py
Normal file
14
freqtrade/data/history/__init__.py
Normal file
@ -0,0 +1,14 @@
|
||||
"""
|
||||
Handle historic data (ohlcv).
|
||||
|
||||
Includes:
|
||||
* load data for a pair (or a list of pairs) from disk
|
||||
* download data from exchange and store to disk
|
||||
"""
|
||||
|
||||
from .history_utils import (convert_trades_to_ohlcv, # noqa: F401
|
||||
get_timerange, load_data, load_pair_history,
|
||||
refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data, refresh_data,
|
||||
validate_backtest_data)
|
||||
from .idatahandler import get_datahandler # noqa: F401
|
@ -1,138 +1,31 @@
|
||||
"""
|
||||
Handle historic data (ohlcv).
|
||||
|
||||
Includes:
|
||||
* load data for a pair (or a list of pairs) from disk
|
||||
* download data from exchange and store to disk
|
||||
"""
|
||||
|
||||
import logging
|
||||
import operator
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import arrow
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade import misc
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
|
||||
from freqtrade.data.converter import parse_ticker_dataframe, trades_to_ohlcv
|
||||
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import (Exchange, timeframe_to_minutes,
|
||||
timeframe_to_seconds)
|
||||
from freqtrade.exchange import Exchange
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
|
||||
"""
|
||||
Trim tickerlist based on given timerange
|
||||
"""
|
||||
if not tickerlist:
|
||||
return tickerlist
|
||||
|
||||
start_index = 0
|
||||
stop_index = len(tickerlist)
|
||||
|
||||
if timerange.starttype == 'date':
|
||||
while (start_index < len(tickerlist) and
|
||||
tickerlist[start_index][0] < timerange.startts * 1000):
|
||||
start_index += 1
|
||||
|
||||
if timerange.stoptype == 'date':
|
||||
while (stop_index > 0 and
|
||||
tickerlist[stop_index-1][0] > timerange.stopts * 1000):
|
||||
stop_index -= 1
|
||||
|
||||
if start_index > stop_index:
|
||||
raise ValueError(f'The timerange [{timerange.startts},{timerange.stopts}] is incorrect')
|
||||
|
||||
return tickerlist[start_index:stop_index]
|
||||
|
||||
|
||||
def trim_dataframe(df: DataFrame, timerange: TimeRange, df_date_col: str = 'date') -> DataFrame:
|
||||
"""
|
||||
Trim dataframe based on given timerange
|
||||
:param df: Dataframe to trim
|
||||
:param timerange: timerange (use start and end date if available)
|
||||
:param: df_date_col: Column in the dataframe to use as Date column
|
||||
:return: trimmed dataframe
|
||||
"""
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
df = df.loc[df[df_date_col] >= start, :]
|
||||
if timerange.stoptype == 'date':
|
||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
df = df.loc[df[df_date_col] <= stop, :]
|
||||
return df
|
||||
|
||||
|
||||
def load_tickerdata_file(datadir: Path, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange] = None) -> List[Dict]:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
:return: tickerlist or None if unsuccessful
|
||||
"""
|
||||
filename = pair_data_filename(datadir, pair, timeframe)
|
||||
pairdata = misc.file_load_json(filename)
|
||||
if not pairdata:
|
||||
return []
|
||||
|
||||
if timerange:
|
||||
pairdata = trim_tickerlist(pairdata, timerange)
|
||||
return pairdata
|
||||
|
||||
|
||||
def store_tickerdata_file(datadir: Path, pair: str,
|
||||
timeframe: str, data: list, is_zip: bool = False) -> None:
|
||||
"""
|
||||
Stores tickerdata to file
|
||||
"""
|
||||
filename = pair_data_filename(datadir, pair, timeframe)
|
||||
misc.file_dump_json(filename, data, is_zip=is_zip)
|
||||
|
||||
|
||||
def load_trades_file(datadir: Path, pair: str,
|
||||
timerange: Optional[TimeRange] = None) -> List[Dict]:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
:return: tradelist or empty list if unsuccesful
|
||||
"""
|
||||
filename = pair_trades_filename(datadir, pair)
|
||||
tradesdata = misc.file_load_json(filename)
|
||||
if not tradesdata:
|
||||
return []
|
||||
|
||||
return tradesdata
|
||||
|
||||
|
||||
def store_trades_file(datadir: Path, pair: str,
|
||||
data: list, is_zip: bool = True) -> None:
|
||||
"""
|
||||
Stores tickerdata to file
|
||||
"""
|
||||
filename = pair_trades_filename(datadir, pair)
|
||||
misc.file_dump_json(filename, data, is_zip=is_zip)
|
||||
|
||||
|
||||
def _validate_pairdata(pair: str, pairdata: List[Dict], timerange: TimeRange) -> None:
|
||||
if timerange.starttype == 'date' and pairdata[0][0] > timerange.startts * 1000:
|
||||
logger.warning('Missing data at start for pair %s, data starts at %s',
|
||||
pair, arrow.get(pairdata[0][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
|
||||
if timerange.stoptype == 'date' and pairdata[-1][0] < timerange.stopts * 1000:
|
||||
logger.warning('Missing data at end for pair %s, data ends at %s',
|
||||
pair, arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
|
||||
|
||||
|
||||
def load_pair_history(pair: str,
|
||||
timeframe: str,
|
||||
datadir: Path,
|
||||
datadir: Path, *,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_up_missing: bool = True,
|
||||
drop_incomplete: bool = True,
|
||||
startup_candles: int = 0,
|
||||
data_format: str = None,
|
||||
data_handler: IDataHandler = None,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load cached ticker history for the given pair.
|
||||
@ -140,39 +33,34 @@ def load_pair_history(pair: str,
|
||||
:param pair: Pair to load data for
|
||||
:param timeframe: Ticker timeframe (e.g. "5m")
|
||||
:param datadir: Path to the data storage location.
|
||||
:param data_format: Format of the data. Ignored if data_handler is set.
|
||||
:param timerange: Limit data to be loaded to this timerange
|
||||
:param fill_up_missing: Fill missing values with "No action"-candles
|
||||
:param drop_incomplete: Drop last candle assuming it may be incomplete.
|
||||
:param startup_candles: Additional candles to load at the start of the period
|
||||
:param data_handler: Initialized data-handler to use.
|
||||
Will be initialized from data_format if not set
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
timerange_startup = deepcopy(timerange)
|
||||
if startup_candles > 0 and timerange_startup:
|
||||
timerange_startup.subtract_start(timeframe_to_seconds(timeframe) * startup_candles)
|
||||
data_handler = get_datahandler(datadir, data_format, data_handler)
|
||||
|
||||
pairdata = load_tickerdata_file(datadir, pair, timeframe, timerange=timerange_startup)
|
||||
|
||||
if pairdata:
|
||||
if timerange_startup:
|
||||
_validate_pairdata(pair, pairdata, timerange_startup)
|
||||
return parse_ticker_dataframe(pairdata, timeframe, pair=pair,
|
||||
fill_missing=fill_up_missing,
|
||||
drop_incomplete=drop_incomplete)
|
||||
else:
|
||||
logger.warning(
|
||||
f'No history data for pair: "{pair}", timeframe: {timeframe}. '
|
||||
'Use `freqtrade download-data` to download the data'
|
||||
)
|
||||
return DataFrame()
|
||||
return data_handler.ohlcv_load(pair=pair,
|
||||
timeframe=timeframe,
|
||||
timerange=timerange,
|
||||
fill_missing=fill_up_missing,
|
||||
drop_incomplete=drop_incomplete,
|
||||
startup_candles=startup_candles,
|
||||
)
|
||||
|
||||
|
||||
def load_data(datadir: Path,
|
||||
timeframe: str,
|
||||
pairs: List[str],
|
||||
pairs: List[str], *,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_up_missing: bool = True,
|
||||
startup_candles: int = 0,
|
||||
fail_without_data: bool = False
|
||||
fail_without_data: bool = False,
|
||||
data_format: str = 'json',
|
||||
) -> Dict[str, DataFrame]:
|
||||
"""
|
||||
Load ticker history data for a list of pairs.
|
||||
@ -184,17 +72,22 @@ def load_data(datadir: Path,
|
||||
:param fill_up_missing: Fill missing values with "No action"-candles
|
||||
:param startup_candles: Additional candles to load at the start of the period
|
||||
:param fail_without_data: Raise OperationalException if no data is found.
|
||||
:param data_format: Data format which should be used. Defaults to json
|
||||
:return: dict(<pair>:<Dataframe>)
|
||||
"""
|
||||
result: Dict[str, DataFrame] = {}
|
||||
if startup_candles > 0 and timerange:
|
||||
logger.info(f'Using indicator startup period: {startup_candles} ...')
|
||||
|
||||
data_handler = get_datahandler(datadir, data_format)
|
||||
|
||||
for pair in pairs:
|
||||
hist = load_pair_history(pair=pair, timeframe=timeframe,
|
||||
datadir=datadir, timerange=timerange,
|
||||
fill_up_missing=fill_up_missing,
|
||||
startup_candles=startup_candles)
|
||||
startup_candles=startup_candles,
|
||||
data_handler=data_handler
|
||||
)
|
||||
if not hist.empty:
|
||||
result[pair] = hist
|
||||
|
||||
@ -207,6 +100,7 @@ def refresh_data(datadir: Path,
|
||||
timeframe: str,
|
||||
pairs: List[str],
|
||||
exchange: Exchange,
|
||||
data_format: str = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
) -> None:
|
||||
"""
|
||||
@ -218,70 +112,50 @@ def refresh_data(datadir: Path,
|
||||
:param exchange: Exchange object
|
||||
:param timerange: Limit data to be loaded to this timerange
|
||||
"""
|
||||
data_handler = get_datahandler(datadir, data_format)
|
||||
for pair in pairs:
|
||||
_download_pair_history(pair=pair, timeframe=timeframe,
|
||||
datadir=datadir, timerange=timerange,
|
||||
exchange=exchange)
|
||||
exchange=exchange, data_handler=data_handler)
|
||||
|
||||
|
||||
def pair_data_filename(datadir: Path, pair: str, timeframe: str) -> Path:
|
||||
pair_s = pair.replace("/", "_")
|
||||
filename = datadir.joinpath(f'{pair_s}-{timeframe}.json')
|
||||
return filename
|
||||
|
||||
|
||||
def pair_trades_filename(datadir: Path, pair: str) -> Path:
|
||||
pair_s = pair.replace("/", "_")
|
||||
filename = datadir.joinpath(f'{pair_s}-trades.json.gz')
|
||||
return filename
|
||||
|
||||
|
||||
def _load_cached_data_for_updating(datadir: Path, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
||||
Optional[int]]:
|
||||
def _load_cached_data_for_updating(pair: str, timeframe: str, timerange: Optional[TimeRange],
|
||||
data_handler: IDataHandler) -> Tuple[DataFrame, Optional[int]]:
|
||||
"""
|
||||
Load cached data to download more data.
|
||||
If timerange is passed in, checks whether data from an before the stored data will be
|
||||
downloaded.
|
||||
If that's the case then what's available should be completely overwritten.
|
||||
Only used by download_pair_history().
|
||||
Otherwise downloads always start at the end of the available data to avoid data gaps.
|
||||
Note: Only used by download_pair_history().
|
||||
"""
|
||||
|
||||
since_ms = None
|
||||
|
||||
# user sets timerange, so find the start time
|
||||
start = None
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
since_ms = timerange.startts * 1000
|
||||
elif timerange.stoptype == 'line':
|
||||
num_minutes = timerange.stopts * timeframe_to_minutes(timeframe)
|
||||
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
||||
# TODO: convert to date for conversion
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
|
||||
# read the cached file
|
||||
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||
data = load_tickerdata_file(datadir, pair, timeframe)
|
||||
# remove the last item, could be incomplete candle
|
||||
if data:
|
||||
data.pop()
|
||||
else:
|
||||
data = []
|
||||
|
||||
if data:
|
||||
if since_ms and since_ms < data[0][0]:
|
||||
data = data_handler.ohlcv_load(pair, timeframe=timeframe,
|
||||
timerange=None, fill_missing=False,
|
||||
drop_incomplete=True, warn_no_data=False)
|
||||
if not data.empty:
|
||||
if start and start < data.iloc[0]['date']:
|
||||
# Earlier data than existing data requested, redownload all
|
||||
data = []
|
||||
data = DataFrame(columns=DEFAULT_DATAFRAME_COLUMNS)
|
||||
else:
|
||||
# a part of the data was already downloaded, so download unexist data only
|
||||
since_ms = data[-1][0] + 1
|
||||
start = data.iloc[-1]['date']
|
||||
|
||||
return (data, since_ms)
|
||||
start_ms = int(start.timestamp() * 1000) if start else None
|
||||
return data, start_ms
|
||||
|
||||
|
||||
def _download_pair_history(datadir: Path,
|
||||
exchange: Exchange,
|
||||
pair: str,
|
||||
pair: str, *,
|
||||
timeframe: str = '5m',
|
||||
timerange: Optional[TimeRange] = None) -> bool:
|
||||
timerange: Optional[TimeRange] = None,
|
||||
data_handler: IDataHandler = None) -> bool:
|
||||
"""
|
||||
Download latest candles from the exchange for the pair and timeframe passed in parameters
|
||||
The data is downloaded starting from the last correct data that
|
||||
@ -295,16 +169,22 @@ def _download_pair_history(datadir: Path,
|
||||
:param timerange: range of time to download
|
||||
:return: bool with success state
|
||||
"""
|
||||
data_handler = get_datahandler(datadir, data_handler=data_handler)
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
f'Download history data for pair: "{pair}", timeframe: {timeframe} '
|
||||
f'and store in {datadir}.'
|
||||
)
|
||||
|
||||
data, since_ms = _load_cached_data_for_updating(datadir, pair, timeframe, timerange)
|
||||
# data, since_ms = _load_cached_data_for_updating_old(datadir, pair, timeframe, timerange)
|
||||
data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange,
|
||||
data_handler=data_handler)
|
||||
|
||||
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
|
||||
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
|
||||
logger.debug("Current Start: %s",
|
||||
f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
||||
logger.debug("Current End: %s",
|
||||
f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
||||
|
||||
# Default since_ms to 30 days if nothing is given
|
||||
new_data = exchange.get_historic_ohlcv(pair=pair,
|
||||
@ -313,12 +193,20 @@ def _download_pair_history(datadir: Path,
|
||||
int(arrow.utcnow().shift(
|
||||
days=-30).float_timestamp) * 1000
|
||||
)
|
||||
data.extend(new_data)
|
||||
# TODO: Maybe move parsing to exchange class (?)
|
||||
new_dataframe = parse_ticker_dataframe(new_data, timeframe, pair,
|
||||
fill_missing=False, drop_incomplete=True)
|
||||
if data.empty:
|
||||
data = new_dataframe
|
||||
else:
|
||||
data = data.append(new_dataframe)
|
||||
|
||||
logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
|
||||
logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
|
||||
logger.debug("New Start: %s",
|
||||
f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
||||
logger.debug("New End: %s",
|
||||
f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
||||
|
||||
store_tickerdata_file(datadir, pair, timeframe, data=data)
|
||||
data_handler.ohlcv_store(pair, timeframe, data=data)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
@ -331,13 +219,14 @@ def _download_pair_history(datadir: Path,
|
||||
|
||||
def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes: List[str],
|
||||
datadir: Path, timerange: Optional[TimeRange] = None,
|
||||
erase: bool = False) -> List[str]:
|
||||
erase: bool = False, data_format: str = None) -> List[str]:
|
||||
"""
|
||||
Refresh stored ohlcv data for backtesting and hyperopt operations.
|
||||
Used by freqtrade download-data subcommand.
|
||||
:return: List of pairs that are not available.
|
||||
"""
|
||||
pairs_not_available = []
|
||||
data_handler = get_datahandler(datadir, data_format)
|
||||
for pair in pairs:
|
||||
if pair not in exchange.markets:
|
||||
pairs_not_available.append(pair)
|
||||
@ -345,23 +234,23 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
||||
continue
|
||||
for timeframe in timeframes:
|
||||
|
||||
dl_file = pair_data_filename(datadir, pair, timeframe)
|
||||
if erase and dl_file.exists():
|
||||
logger.info(
|
||||
f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
dl_file.unlink()
|
||||
if erase:
|
||||
if data_handler.ohlcv_purge(pair, timeframe):
|
||||
logger.info(
|
||||
f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
|
||||
logger.info(f'Downloading pair {pair}, interval {timeframe}.')
|
||||
_download_pair_history(datadir=datadir, exchange=exchange,
|
||||
pair=pair, timeframe=str(timeframe),
|
||||
timerange=timerange)
|
||||
timerange=timerange, data_handler=data_handler)
|
||||
return pairs_not_available
|
||||
|
||||
|
||||
def _download_trades_history(datadir: Path,
|
||||
exchange: Exchange,
|
||||
pair: str,
|
||||
timerange: Optional[TimeRange] = None) -> bool:
|
||||
def _download_trades_history(exchange: Exchange,
|
||||
pair: str, *,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
data_handler: IDataHandler
|
||||
) -> bool:
|
||||
"""
|
||||
Download trade history from the exchange.
|
||||
Appends to previously downloaded trades data.
|
||||
@ -370,7 +259,7 @@ def _download_trades_history(datadir: Path,
|
||||
|
||||
since = timerange.startts * 1000 if timerange and timerange.starttype == 'date' else None
|
||||
|
||||
trades = load_trades_file(datadir, pair)
|
||||
trades = data_handler.trades_load(pair)
|
||||
|
||||
from_id = trades[-1]['id'] if trades else None
|
||||
|
||||
@ -385,7 +274,7 @@ def _download_trades_history(datadir: Path,
|
||||
from_id=from_id,
|
||||
)
|
||||
trades.extend(new_trades[1])
|
||||
store_trades_file(datadir, pair, trades)
|
||||
data_handler.trades_store(pair, data=trades)
|
||||
|
||||
logger.debug("New Start: %s", trades[0]['datetime'])
|
||||
logger.debug("New End: %s", trades[-1]['datetime'])
|
||||
@ -401,47 +290,52 @@ def _download_trades_history(datadir: Path,
|
||||
|
||||
|
||||
def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path,
|
||||
timerange: TimeRange, erase: bool = False) -> List[str]:
|
||||
timerange: TimeRange, erase: bool = False,
|
||||
data_format: str = 'jsongz') -> List[str]:
|
||||
"""
|
||||
Refresh stored trades data for backtesting and hyperopt operations.
|
||||
Used by freqtrade download-data subcommand.
|
||||
:return: List of pairs that are not available.
|
||||
"""
|
||||
pairs_not_available = []
|
||||
data_handler = get_datahandler(datadir, data_format=data_format)
|
||||
for pair in pairs:
|
||||
if pair not in exchange.markets:
|
||||
pairs_not_available.append(pair)
|
||||
logger.info(f"Skipping pair {pair}...")
|
||||
continue
|
||||
|
||||
dl_file = pair_trades_filename(datadir, pair)
|
||||
if erase and dl_file.exists():
|
||||
logger.info(
|
||||
f'Deleting existing data for pair {pair}.')
|
||||
dl_file.unlink()
|
||||
if erase:
|
||||
if data_handler.trades_purge(pair):
|
||||
logger.info(f'Deleting existing data for pair {pair}.')
|
||||
|
||||
logger.info(f'Downloading trades for pair {pair}.')
|
||||
_download_trades_history(datadir=datadir, exchange=exchange,
|
||||
_download_trades_history(exchange=exchange,
|
||||
pair=pair,
|
||||
timerange=timerange)
|
||||
timerange=timerange,
|
||||
data_handler=data_handler)
|
||||
return pairs_not_available
|
||||
|
||||
|
||||
def convert_trades_to_ohlcv(pairs: List[str], timeframes: List[str],
|
||||
datadir: Path, timerange: TimeRange, erase: bool = False) -> None:
|
||||
datadir: Path, timerange: TimeRange, erase: bool = False,
|
||||
data_format_ohlcv: str = 'json',
|
||||
data_format_trades: str = 'jsongz') -> None:
|
||||
"""
|
||||
Convert stored trades data to ohlcv data
|
||||
"""
|
||||
data_handler_trades = get_datahandler(datadir, data_format=data_format_trades)
|
||||
data_handler_ohlcv = get_datahandler(datadir, data_format=data_format_ohlcv)
|
||||
|
||||
for pair in pairs:
|
||||
trades = load_trades_file(datadir, pair)
|
||||
trades = data_handler_trades.trades_load(pair)
|
||||
for timeframe in timeframes:
|
||||
ohlcv_file = pair_data_filename(datadir, pair, timeframe)
|
||||
if erase and ohlcv_file.exists():
|
||||
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
ohlcv_file.unlink()
|
||||
if erase:
|
||||
if data_handler_ohlcv.ohlcv_purge(pair, timeframe):
|
||||
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
ohlcv = trades_to_ohlcv(trades, timeframe)
|
||||
# Store ohlcv
|
||||
store_tickerdata_file(datadir, pair, timeframe, data=ohlcv)
|
||||
data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv)
|
||||
|
||||
|
||||
def get_timerange(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
|
220
freqtrade/data/history/idatahandler.py
Normal file
220
freqtrade/data/history/idatahandler.py
Normal file
@ -0,0 +1,220 @@
|
||||
"""
|
||||
Abstract datahandler interface.
|
||||
It's subclasses handle and storing data from disk.
|
||||
|
||||
"""
|
||||
import logging
|
||||
from abc import ABC, abstractclassmethod, abstractmethod
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Type
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.data.converter import clean_ohlcv_dataframe, trim_dataframe
|
||||
from freqtrade.exchange import timeframe_to_seconds
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IDataHandler(ABC):
|
||||
|
||||
def __init__(self, datadir: Path) -> None:
|
||||
self._datadir = datadir
|
||||
|
||||
@abstractclassmethod
|
||||
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str) -> List[str]:
|
||||
"""
|
||||
Returns a list of all pairs with ohlcv data available in this datadir
|
||||
for the specified timeframe
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
:param timeframe: Timeframe to search pairs for
|
||||
:return: List of Pairs
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def ohlcv_store(self, pair: str, timeframe: str, data: DataFrame) -> None:
|
||||
"""
|
||||
Store data in json format "values".
|
||||
format looks as follows:
|
||||
[[<date>,<open>,<high>,<low>,<close>]]
|
||||
:param pair: Pair - used to generate filename
|
||||
:timeframe: Timeframe - used to generate filename
|
||||
:data: Dataframe containing OHLCV data
|
||||
:return: None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def _ohlcv_load(self, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
Implements the loading and conversion to a Pandas dataframe.
|
||||
Timerange trimming and dataframe validation happens outside of this method.
|
||||
:param pair: Pair to load data
|
||||
:param timeframe: Ticker timeframe (e.g. "5m")
|
||||
:param timerange: Limit data to be loaded to this timerange.
|
||||
Optionally implemented by subclasses to avoid loading
|
||||
all data where possible.
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def ohlcv_purge(self, pair: str, timeframe: str) -> bool:
|
||||
"""
|
||||
Remove data for this pair
|
||||
:param pair: Delete data for this pair.
|
||||
:param timeframe: Ticker timeframe (e.g. "5m")
|
||||
:return: True when deleted, false if file did not exist.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def ohlcv_append(self, pair: str, timeframe: str, data: DataFrame) -> None:
|
||||
"""
|
||||
Append data to existing data structures
|
||||
:param pair: Pair
|
||||
:param timeframe: Timeframe this ohlcv data is for
|
||||
:param data: Data to append.
|
||||
"""
|
||||
|
||||
@abstractclassmethod
|
||||
def trades_get_pairs(cls, datadir: Path) -> List[str]:
|
||||
"""
|
||||
Returns a list of all pairs for which trade data is available in this
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
:return: List of Pairs
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def trades_store(self, pair: str, data: List[Dict]) -> None:
|
||||
"""
|
||||
Store trades data (list of Dicts) to file
|
||||
:param pair: Pair - used for filename
|
||||
:param data: List of Dicts containing trade data
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def trades_append(self, pair: str, data: List[Dict]):
|
||||
"""
|
||||
Append data to existing files
|
||||
:param pair: Pair - used for filename
|
||||
:param data: List of Dicts containing trade data
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> List[Dict]:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
:param pair: Load trades for this pair
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: List of trades
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def trades_purge(self, pair: str) -> bool:
|
||||
"""
|
||||
Remove data for this pair
|
||||
:param pair: Delete data for this pair.
|
||||
:return: True when deleted, false if file did not exist.
|
||||
"""
|
||||
|
||||
def ohlcv_load(self, pair, timeframe: str,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_missing: bool = True,
|
||||
drop_incomplete: bool = True,
|
||||
startup_candles: int = 0,
|
||||
warn_no_data: bool = True
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load cached ticker history for the given pair.
|
||||
|
||||
:param pair: Pair to load data for
|
||||
:param timeframe: Ticker timeframe (e.g. "5m")
|
||||
:param timerange: Limit data to be loaded to this timerange
|
||||
:param fill_missing: Fill missing values with "No action"-candles
|
||||
:param drop_incomplete: Drop last candle assuming it may be incomplete.
|
||||
:param startup_candles: Additional candles to load at the start of the period
|
||||
:param warn_no_data: Log a warning message when no data is found
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
# Fix startup period
|
||||
timerange_startup = deepcopy(timerange)
|
||||
if startup_candles > 0 and timerange_startup:
|
||||
timerange_startup.subtract_start(timeframe_to_seconds(timeframe) * startup_candles)
|
||||
|
||||
pairdf = self._ohlcv_load(pair, timeframe,
|
||||
timerange=timerange_startup)
|
||||
if pairdf.empty:
|
||||
if warn_no_data:
|
||||
logger.warning(
|
||||
f'No history data for pair: "{pair}", timeframe: {timeframe}. '
|
||||
'Use `freqtrade download-data` to download the data'
|
||||
)
|
||||
return pairdf
|
||||
else:
|
||||
enddate = pairdf.iloc[-1]['date']
|
||||
|
||||
if timerange_startup:
|
||||
self._validate_pairdata(pair, pairdf, timerange_startup)
|
||||
pairdf = trim_dataframe(pairdf, timerange_startup)
|
||||
|
||||
# incomplete candles should only be dropped if we didn't trim the end beforehand.
|
||||
return clean_ohlcv_dataframe(pairdf, timeframe,
|
||||
pair=pair,
|
||||
fill_missing=fill_missing,
|
||||
drop_incomplete=(drop_incomplete and
|
||||
enddate == pairdf.iloc[-1]['date']))
|
||||
|
||||
def _validate_pairdata(self, pair, pairdata: DataFrame, timerange: TimeRange):
|
||||
"""
|
||||
Validates pairdata for missing data at start end end and logs warnings.
|
||||
:param pairdata: Dataframe to validate
|
||||
:param timerange: Timerange specified for start and end dates
|
||||
"""
|
||||
|
||||
if timerange.starttype == 'date':
|
||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||
if pairdata.iloc[0]['date'] > start:
|
||||
logger.warning(f"Missing data at start for pair {pair}, "
|
||||
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}")
|
||||
if timerange.stoptype == 'date':
|
||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||
if pairdata.iloc[-1]['date'] < stop:
|
||||
logger.warning(f"Missing data at end for pair {pair}, "
|
||||
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")
|
||||
|
||||
|
||||
def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
|
||||
"""
|
||||
Get datahandler class.
|
||||
Could be done using Resolvers, but since this may be called often and resolvers
|
||||
are rather expensive, doing this directly should improve performance.
|
||||
:param datatype: datatype to use.
|
||||
:return: Datahandler class
|
||||
"""
|
||||
|
||||
if datatype == 'json':
|
||||
from .jsondatahandler import JsonDataHandler
|
||||
return JsonDataHandler
|
||||
elif datatype == 'jsongz':
|
||||
from .jsondatahandler import JsonGzDataHandler
|
||||
return JsonGzDataHandler
|
||||
else:
|
||||
raise ValueError(f"No datahandler for datatype {datatype} available.")
|
||||
|
||||
|
||||
def get_datahandler(datadir: Path, data_format: str = None,
|
||||
data_handler: IDataHandler = None) -> IDataHandler:
|
||||
"""
|
||||
:param datadir: Folder to save data
|
||||
:data_format: dataformat to use
|
||||
:data_handler: returns this datahandler if it exists or initializes a new one
|
||||
"""
|
||||
|
||||
if not data_handler:
|
||||
HandlerClass = get_datahandlerclass(data_format or 'json')
|
||||
data_handler = HandlerClass(datadir)
|
||||
return data_handler
|
177
freqtrade/data/history/jsondatahandler.py
Normal file
177
freqtrade/data/history/jsondatahandler.py
Normal file
@ -0,0 +1,177 @@
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import numpy as np
|
||||
from pandas import DataFrame, read_json, to_datetime
|
||||
|
||||
from freqtrade import misc
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
|
||||
|
||||
from .idatahandler import IDataHandler
|
||||
|
||||
|
||||
class JsonDataHandler(IDataHandler):
|
||||
|
||||
_use_zip = False
|
||||
_columns = DEFAULT_DATAFRAME_COLUMNS
|
||||
|
||||
@classmethod
|
||||
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str) -> List[str]:
|
||||
"""
|
||||
Returns a list of all pairs with ohlcv data available in this datadir
|
||||
for the specified timeframe
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
:param timeframe: Timeframe to search pairs for
|
||||
:return: List of Pairs
|
||||
"""
|
||||
|
||||
_tmp = [re.search(r'^(\S+)(?=\-' + timeframe + '.json)', p.name)
|
||||
for p in datadir.glob(f"*{timeframe}.{cls._get_file_extension()}")]
|
||||
# Check if regex found something and only return these results
|
||||
return [match[0].replace('_', '/') for match in _tmp if match]
|
||||
|
||||
def ohlcv_store(self, pair: str, timeframe: str, data: DataFrame) -> None:
|
||||
"""
|
||||
Store data in json format "values".
|
||||
format looks as follows:
|
||||
[[<date>,<open>,<high>,<low>,<close>]]
|
||||
:param pair: Pair - used to generate filename
|
||||
:timeframe: Timeframe - used to generate filename
|
||||
:data: Dataframe containing OHLCV data
|
||||
:return: None
|
||||
"""
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe)
|
||||
_data = data.copy()
|
||||
# Convert date to int
|
||||
_data['date'] = _data['date'].astype(np.int64) // 1000 // 1000
|
||||
|
||||
# Reset index, select only appropriate columns and save as json
|
||||
_data.reset_index(drop=True).loc[:, self._columns].to_json(
|
||||
filename, orient="values",
|
||||
compression='gzip' if self._use_zip else None)
|
||||
|
||||
def _ohlcv_load(self, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
Implements the loading and conversion to a Pandas dataframe.
|
||||
Timerange trimming and dataframe validation happens outside of this method.
|
||||
:param pair: Pair to load data
|
||||
:param timeframe: Ticker timeframe (e.g. "5m")
|
||||
:param timerange: Limit data to be loaded to this timerange.
|
||||
Optionally implemented by subclasses to avoid loading
|
||||
all data where possible.
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe)
|
||||
if not filename.exists():
|
||||
return DataFrame(columns=self._columns)
|
||||
pairdata = read_json(filename, orient='values')
|
||||
pairdata.columns = self._columns
|
||||
pairdata['date'] = to_datetime(pairdata['date'],
|
||||
unit='ms',
|
||||
utc=True,
|
||||
infer_datetime_format=True)
|
||||
return pairdata
|
||||
|
||||
def ohlcv_purge(self, pair: str, timeframe: str) -> bool:
|
||||
"""
|
||||
Remove data for this pair
|
||||
:param pair: Delete data for this pair.
|
||||
:param timeframe: Ticker timeframe (e.g. "5m")
|
||||
:return: True when deleted, false if file did not exist.
|
||||
"""
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe)
|
||||
if filename.exists():
|
||||
filename.unlink()
|
||||
return True
|
||||
return False
|
||||
|
||||
def ohlcv_append(self, pair: str, timeframe: str, data: DataFrame) -> None:
|
||||
"""
|
||||
Append data to existing data structures
|
||||
:param pair: Pair
|
||||
:param timeframe: Timeframe this ohlcv data is for
|
||||
:param data: Data to append.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def trades_get_pairs(cls, datadir: Path) -> List[str]:
|
||||
"""
|
||||
Returns a list of all pairs for which trade data is available in this
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
:return: List of Pairs
|
||||
"""
|
||||
_tmp = [re.search(r'^(\S+)(?=\-trades.json)', p.name)
|
||||
for p in datadir.glob(f"*trades.{cls._get_file_extension()}")]
|
||||
# Check if regex found something and only return these results to avoid exceptions.
|
||||
return [match[0].replace('_', '/') for match in _tmp if match]
|
||||
|
||||
def trades_store(self, pair: str, data: List[Dict]) -> None:
|
||||
"""
|
||||
Store trades data (list of Dicts) to file
|
||||
:param pair: Pair - used for filename
|
||||
:param data: List of Dicts containing trade data
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
misc.file_dump_json(filename, data, is_zip=self._use_zip)
|
||||
|
||||
def trades_append(self, pair: str, data: List[Dict]):
|
||||
"""
|
||||
Append data to existing files
|
||||
:param pair: Pair - used for filename
|
||||
:param data: List of Dicts containing trade data
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> List[Dict]:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
# TODO: respect timerange ...
|
||||
:param pair: Load trades for this pair
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: List of trades
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
tradesdata = misc.file_load_json(filename)
|
||||
if not tradesdata:
|
||||
return []
|
||||
|
||||
return tradesdata
|
||||
|
||||
def trades_purge(self, pair: str) -> bool:
|
||||
"""
|
||||
Remove data for this pair
|
||||
:param pair: Delete data for this pair.
|
||||
:return: True when deleted, false if file did not exist.
|
||||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
if filename.exists():
|
||||
filename.unlink()
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _pair_data_filename(cls, datadir: Path, pair: str, timeframe: str) -> Path:
|
||||
pair_s = misc.pair_to_filename(pair)
|
||||
filename = datadir.joinpath(f'{pair_s}-{timeframe}.{cls._get_file_extension()}')
|
||||
return filename
|
||||
|
||||
@classmethod
|
||||
def _get_file_extension(cls):
|
||||
return "json.gz" if cls._use_zip else "json"
|
||||
|
||||
@classmethod
|
||||
def _pair_trades_filename(cls, datadir: Path, pair: str) -> Path:
|
||||
pair_s = misc.pair_to_filename(pair)
|
||||
filename = datadir.joinpath(f'{pair_s}-trades.{cls._get_file_extension()}')
|
||||
return filename
|
||||
|
||||
|
||||
class JsonGzDataHandler(JsonDataHandler):
|
||||
|
||||
_use_zip = True
|
@ -110,6 +110,7 @@ class Edge:
|
||||
timeframe=self.strategy.ticker_interval,
|
||||
timerange=self._timerange,
|
||||
startup_candles=self.strategy.startup_candle_count,
|
||||
data_format=self.config.get('dataformat_ohlcv', 'json'),
|
||||
)
|
||||
|
||||
if not data:
|
||||
|
@ -48,14 +48,16 @@ def file_dump_json(filename: Path, data: Any, is_zip: bool = False) -> None:
|
||||
:param data: JSON Data to save
|
||||
:return:
|
||||
"""
|
||||
logger.info(f'dumping json to "{filename}"')
|
||||
|
||||
if is_zip:
|
||||
if filename.suffix != '.gz':
|
||||
filename = filename.with_suffix('.gz')
|
||||
logger.info(f'dumping json to "{filename}"')
|
||||
|
||||
with gzip.open(filename, 'w') as fp:
|
||||
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
||||
else:
|
||||
logger.info(f'dumping json to "{filename}"')
|
||||
with open(filename, 'w') as fp:
|
||||
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
||||
|
||||
@ -91,6 +93,12 @@ def file_load_json(file):
|
||||
return pairdata
|
||||
|
||||
|
||||
def pair_to_filename(pair: str) -> str:
|
||||
for ch in ['/', '-', ' ', '.', '@', '$', '+', ':']:
|
||||
pair = pair.replace(ch, '_')
|
||||
return pair
|
||||
|
||||
|
||||
def format_ms_time(date: int) -> str:
|
||||
"""
|
||||
convert MS date to readable format.
|
||||
|
@ -15,6 +15,7 @@ from pandas import DataFrame
|
||||
from freqtrade.configuration import (TimeRange, remove_credentials,
|
||||
validate_config_consistency)
|
||||
from freqtrade.data import history
|
||||
from freqtrade.data.converter import trim_dataframe
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_seconds
|
||||
@ -118,6 +119,7 @@ class Backtesting:
|
||||
timerange=timerange,
|
||||
startup_candles=self.required_startup,
|
||||
fail_without_data=True,
|
||||
data_format=self.config.get('dataformat_ohlcv', 'json'),
|
||||
)
|
||||
|
||||
min_date, max_date = history.get_timerange(data)
|
||||
@ -397,7 +399,7 @@ class Backtesting:
|
||||
|
||||
# Trim startup period from analyzed dataframe
|
||||
for pair, df in preprocessed.items():
|
||||
preprocessed[pair] = history.trim_dataframe(df, timerange)
|
||||
preprocessed[pair] = trim_dataframe(df, timerange)
|
||||
min_date, max_date = history.get_timerange(preprocessed)
|
||||
|
||||
logger.info(
|
||||
|
@ -22,7 +22,8 @@ from joblib import (Parallel, cpu_count, delayed, dump, load,
|
||||
wrap_non_picklable_objects)
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.data.history import get_timerange, trim_dataframe
|
||||
from freqtrade.data.converter import trim_dataframe
|
||||
from freqtrade.data.history import get_timerange
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.misc import plural, round_dict
|
||||
from freqtrade.optimize.backtesting import Backtesting
|
||||
|
@ -3,11 +3,14 @@ from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.data import history
|
||||
from freqtrade.data.btanalysis import (combine_tickers_with_mean,
|
||||
create_cum_profit,
|
||||
extract_trades_of_period, load_trades)
|
||||
from freqtrade.data.converter import trim_dataframe
|
||||
from freqtrade.data.history import load_data
|
||||
from freqtrade.misc import pair_to_filename
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -36,18 +39,19 @@ def init_plotscript(config):
|
||||
# Set timerange to use
|
||||
timerange = TimeRange.parse_timerange(config.get("timerange"))
|
||||
|
||||
tickers = history.load_data(
|
||||
tickers = load_data(
|
||||
datadir=config.get("datadir"),
|
||||
pairs=pairs,
|
||||
timeframe=config.get('ticker_interval', '5m'),
|
||||
timerange=timerange,
|
||||
data_format=config.get('dataformat_ohlcv', 'json'),
|
||||
)
|
||||
|
||||
trades = load_trades(config['trade_source'],
|
||||
db_url=config.get('db_url'),
|
||||
exportfilename=config.get('exportfilename'),
|
||||
)
|
||||
trades = history.trim_dataframe(trades, timerange, 'open_time')
|
||||
trades = trim_dataframe(trades, timerange, 'open_time')
|
||||
return {"tickers": tickers,
|
||||
"trades": trades,
|
||||
"pairs": pairs,
|
||||
@ -374,8 +378,8 @@ def generate_plot_filename(pair: str, timeframe: str) -> str:
|
||||
"""
|
||||
Generate filenames per pair/timeframe to be used for storing plots
|
||||
"""
|
||||
pair_name = pair.replace("/", "_")
|
||||
file_name = 'freqtrade-plot-' + pair_name + '-' + timeframe + '.html'
|
||||
pair_s = pair_to_filename(pair)
|
||||
file_name = 'freqtrade-plot-' + pair_s + '-' + timeframe + '.html'
|
||||
|
||||
logger.info('Generate plot file for %s', pair)
|
||||
|
||||
|
@ -439,7 +439,7 @@ class IStrategy(ABC):
|
||||
else:
|
||||
return current_profit > roi
|
||||
|
||||
def tickerdata_to_dataframe(self, tickerdata: Dict[str, List]) -> Dict[str, DataFrame]:
|
||||
def tickerdata_to_dataframe(self, tickerdata: Dict[str, DataFrame]) -> Dict[str, DataFrame]:
|
||||
"""
|
||||
Creates a dataframe and populates indicators for given ticker data
|
||||
Used by optimize operations only, not during dry / live runs.
|
||||
|
@ -4,11 +4,11 @@ from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import pytest
|
||||
|
||||
from freqtrade.commands import (start_create_userdir, start_download_data,
|
||||
start_hyperopt_list, start_hyperopt_show,
|
||||
start_list_exchanges, start_list_markets,
|
||||
start_list_hyperopts, start_list_strategies,
|
||||
start_list_timeframes,
|
||||
from freqtrade.commands import (start_convert_data, start_create_userdir,
|
||||
start_download_data, start_hyperopt_list,
|
||||
start_hyperopt_show, start_list_exchanges,
|
||||
start_list_hyperopts, start_list_markets,
|
||||
start_list_strategies, start_list_timeframes,
|
||||
start_new_hyperopt, start_new_strategy,
|
||||
start_test_pairlist, start_trading)
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
@ -973,3 +973,47 @@ def test_hyperopt_show(mocker, capsys, hyperopt_results):
|
||||
with pytest.raises(OperationalException,
|
||||
match="The index of the epoch to show should be less than 4."):
|
||||
start_hyperopt_show(pargs)
|
||||
|
||||
|
||||
def test_convert_data(mocker, testdatadir):
|
||||
ohlcv_mock = mocker.patch("freqtrade.commands.data_commands.convert_ohlcv_format")
|
||||
trades_mock = mocker.patch("freqtrade.commands.data_commands.convert_trades_format")
|
||||
args = [
|
||||
"convert-data",
|
||||
"--format-from",
|
||||
"json",
|
||||
"--format-to",
|
||||
"jsongz",
|
||||
"--datadir",
|
||||
str(testdatadir),
|
||||
]
|
||||
pargs = get_args(args)
|
||||
pargs['config'] = None
|
||||
start_convert_data(pargs, True)
|
||||
assert trades_mock.call_count == 0
|
||||
assert ohlcv_mock.call_count == 1
|
||||
assert ohlcv_mock.call_args[1]['convert_from'] == 'json'
|
||||
assert ohlcv_mock.call_args[1]['convert_to'] == 'jsongz'
|
||||
assert ohlcv_mock.call_args[1]['erase'] is False
|
||||
|
||||
|
||||
def test_convert_data_trades(mocker, testdatadir):
|
||||
ohlcv_mock = mocker.patch("freqtrade.commands.data_commands.convert_ohlcv_format")
|
||||
trades_mock = mocker.patch("freqtrade.commands.data_commands.convert_trades_format")
|
||||
args = [
|
||||
"convert-trade-data",
|
||||
"--format-from",
|
||||
"jsongz",
|
||||
"--format-to",
|
||||
"json",
|
||||
"--datadir",
|
||||
str(testdatadir),
|
||||
]
|
||||
pargs = get_args(args)
|
||||
pargs['config'] = None
|
||||
start_convert_data(pargs, False)
|
||||
assert ohlcv_mock.call_count == 0
|
||||
assert trades_mock.call_count == 1
|
||||
assert trades_mock.call_args[1]['convert_from'] == 'jsongz'
|
||||
assert trades_mock.call_args[1]['convert_to'] == 'json'
|
||||
assert trades_mock.call_args[1]['erase'] is False
|
||||
|
@ -1,9 +1,15 @@
|
||||
# pragma pylint: disable=missing-docstring, C0103
|
||||
import logging
|
||||
|
||||
from freqtrade.data.converter import parse_ticker_dataframe, ohlcv_fill_up_missing_data
|
||||
from freqtrade.data.history import load_pair_history, validate_backtest_data, get_timerange
|
||||
from freqtrade.configuration.timerange import TimeRange
|
||||
from freqtrade.data.converter import (convert_ohlcv_format,
|
||||
convert_trades_format,
|
||||
ohlcv_fill_up_missing_data,
|
||||
parse_ticker_dataframe, trim_dataframe)
|
||||
from freqtrade.data.history import (get_timerange, load_data,
|
||||
load_pair_history, validate_backtest_data)
|
||||
from tests.conftest import log_has
|
||||
from tests.data.test_history import _backup_file, _clean_test_file
|
||||
|
||||
|
||||
def test_dataframe_correct_columns(result):
|
||||
@ -145,3 +151,113 @@ def test_ohlcv_drop_incomplete(caplog):
|
||||
assert len(data) == 3
|
||||
|
||||
assert log_has("Dropping last candle", caplog)
|
||||
|
||||
|
||||
def test_trim_dataframe(testdatadir) -> None:
|
||||
data = load_data(
|
||||
datadir=testdatadir,
|
||||
timeframe='1m',
|
||||
pairs=['UNITTEST/BTC']
|
||||
)['UNITTEST/BTC']
|
||||
min_date = int(data.iloc[0]['date'].timestamp())
|
||||
max_date = int(data.iloc[-1]['date'].timestamp())
|
||||
data_modify = data.copy()
|
||||
|
||||
# Remove first 30 minutes (1800 s)
|
||||
tr = TimeRange('date', None, min_date + 1800, 0)
|
||||
data_modify = trim_dataframe(data_modify, tr)
|
||||
assert not data_modify.equals(data)
|
||||
assert len(data_modify) < len(data)
|
||||
assert len(data_modify) == len(data) - 30
|
||||
assert all(data_modify.iloc[-1] == data.iloc[-1])
|
||||
assert all(data_modify.iloc[0] == data.iloc[30])
|
||||
|
||||
data_modify = data.copy()
|
||||
# Remove last 30 minutes (1800 s)
|
||||
tr = TimeRange(None, 'date', 0, max_date - 1800)
|
||||
data_modify = trim_dataframe(data_modify, tr)
|
||||
assert not data_modify.equals(data)
|
||||
assert len(data_modify) < len(data)
|
||||
assert len(data_modify) == len(data) - 30
|
||||
assert all(data_modify.iloc[0] == data.iloc[0])
|
||||
assert all(data_modify.iloc[-1] == data.iloc[-31])
|
||||
|
||||
data_modify = data.copy()
|
||||
# Remove first 25 and last 30 minutes (1800 s)
|
||||
tr = TimeRange('date', 'date', min_date + 1500, max_date - 1800)
|
||||
data_modify = trim_dataframe(data_modify, tr)
|
||||
assert not data_modify.equals(data)
|
||||
assert len(data_modify) < len(data)
|
||||
assert len(data_modify) == len(data) - 55
|
||||
# first row matches 25th original row
|
||||
assert all(data_modify.iloc[0] == data.iloc[25])
|
||||
|
||||
|
||||
def test_convert_trades_format(mocker, default_conf, testdatadir):
|
||||
file = testdatadir / "XRP_ETH-trades.json.gz"
|
||||
file_new = testdatadir / "XRP_ETH-trades.json"
|
||||
_backup_file(file, copy_file=True)
|
||||
default_conf['datadir'] = testdatadir
|
||||
|
||||
assert not file_new.exists()
|
||||
|
||||
convert_trades_format(default_conf, convert_from='jsongz',
|
||||
convert_to='json', erase=False)
|
||||
|
||||
assert file_new.exists()
|
||||
assert file.exists()
|
||||
|
||||
# Remove original file
|
||||
file.unlink()
|
||||
# Convert back
|
||||
convert_trades_format(default_conf, convert_from='json',
|
||||
convert_to='jsongz', erase=True)
|
||||
|
||||
assert file.exists()
|
||||
assert not file_new.exists()
|
||||
|
||||
_clean_test_file(file)
|
||||
if file_new.exists():
|
||||
file_new.unlink()
|
||||
|
||||
|
||||
def test_convert_ohlcv_format(mocker, default_conf, testdatadir):
|
||||
file1 = testdatadir / "XRP_ETH-5m.json"
|
||||
file1_new = testdatadir / "XRP_ETH-5m.json.gz"
|
||||
file2 = testdatadir / "XRP_ETH-1m.json"
|
||||
file2_new = testdatadir / "XRP_ETH-1m.json.gz"
|
||||
_backup_file(file1, copy_file=True)
|
||||
_backup_file(file2, copy_file=True)
|
||||
default_conf['datadir'] = testdatadir
|
||||
default_conf['pairs'] = ['XRP_ETH']
|
||||
default_conf['timeframes'] = ['1m', '5m']
|
||||
|
||||
assert not file1_new.exists()
|
||||
assert not file2_new.exists()
|
||||
|
||||
convert_ohlcv_format(default_conf, convert_from='json',
|
||||
convert_to='jsongz', erase=False)
|
||||
|
||||
assert file1_new.exists()
|
||||
assert file2_new.exists()
|
||||
assert file1.exists()
|
||||
assert file2.exists()
|
||||
|
||||
# Remove original files
|
||||
file1.unlink()
|
||||
file2.unlink()
|
||||
# Convert back
|
||||
convert_ohlcv_format(default_conf, convert_from='jsongz',
|
||||
convert_to='json', erase=True)
|
||||
|
||||
assert file1.exists()
|
||||
assert file2.exists()
|
||||
assert not file1_new.exists()
|
||||
assert not file2_new.exists()
|
||||
|
||||
_clean_test_file(file1)
|
||||
_clean_test_file(file2)
|
||||
if file1_new.exists():
|
||||
file1_new.unlink()
|
||||
if file2_new.exists():
|
||||
file2_new.unlink()
|
||||
|
@ -7,21 +7,21 @@ from shutil import copyfile
|
||||
from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import arrow
|
||||
import pytest
|
||||
from pandas import DataFrame
|
||||
from pandas.testing import assert_frame_equal
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.data.history import (_download_pair_history,
|
||||
_download_trades_history,
|
||||
_load_cached_data_for_updating,
|
||||
convert_trades_to_ohlcv, get_timerange,
|
||||
load_data, load_pair_history,
|
||||
load_tickerdata_file, pair_data_filename,
|
||||
pair_trades_filename,
|
||||
refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data,
|
||||
refresh_data,
|
||||
trim_dataframe, trim_tickerlist,
|
||||
validate_backtest_data)
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.data.history.history_utils import (
|
||||
_download_pair_history, _download_trades_history,
|
||||
_load_cached_data_for_updating, convert_trades_to_ohlcv, get_timerange,
|
||||
load_data, load_pair_history, refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data, refresh_data, validate_backtest_data)
|
||||
from freqtrade.data.history.idatahandler import (IDataHandler, get_datahandler,
|
||||
get_datahandlerclass)
|
||||
from freqtrade.data.history.jsondatahandler import (JsonDataHandler,
|
||||
JsonGzDataHandler)
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
from freqtrade.misc import file_dump_json
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
@ -96,8 +96,9 @@ def test_load_data_1min_ticker(ticker_history, mocker, caplog, testdatadir) -> N
|
||||
|
||||
|
||||
def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) -> None:
|
||||
ltfmock = mocker.patch('freqtrade.data.history.load_tickerdata_file',
|
||||
MagicMock(return_value=None))
|
||||
ltfmock = mocker.patch(
|
||||
'freqtrade.data.history.jsondatahandler.JsonDataHandler._ohlcv_load',
|
||||
MagicMock(return_value=DataFrame()))
|
||||
timerange = TimeRange('date', None, 1510639620, 0)
|
||||
load_pair_history(pair='UNITTEST/BTC', timeframe='1m',
|
||||
datadir=testdatadir, timerange=timerange,
|
||||
@ -143,27 +144,52 @@ def test_testdata_path(testdatadir) -> None:
|
||||
assert str(Path('tests') / 'testdata') in str(testdatadir)
|
||||
|
||||
|
||||
def test_pair_data_filename():
|
||||
fn = pair_data_filename(Path('freqtrade/hello/world'), 'ETH/BTC', '5m')
|
||||
@pytest.mark.parametrize("pair,expected_result", [
|
||||
("ETH/BTC", 'freqtrade/hello/world/ETH_BTC-5m.json'),
|
||||
("Fabric Token/ETH", 'freqtrade/hello/world/Fabric_Token_ETH-5m.json'),
|
||||
("ETHH20", 'freqtrade/hello/world/ETHH20-5m.json'),
|
||||
(".XBTBON2H", 'freqtrade/hello/world/_XBTBON2H-5m.json'),
|
||||
("ETHUSD.d", 'freqtrade/hello/world/ETHUSD_d-5m.json'),
|
||||
("ACC_OLD/BTC", 'freqtrade/hello/world/ACC_OLD_BTC-5m.json'),
|
||||
])
|
||||
def test_json_pair_data_filename(pair, expected_result):
|
||||
fn = JsonDataHandler._pair_data_filename(Path('freqtrade/hello/world'), pair, '5m')
|
||||
assert isinstance(fn, Path)
|
||||
assert fn == Path('freqtrade/hello/world/ETH_BTC-5m.json')
|
||||
|
||||
|
||||
def test_pair_trades_filename():
|
||||
fn = pair_trades_filename(Path('freqtrade/hello/world'), 'ETH/BTC')
|
||||
assert fn == Path(expected_result)
|
||||
fn = JsonGzDataHandler._pair_data_filename(Path('freqtrade/hello/world'), pair, '5m')
|
||||
assert isinstance(fn, Path)
|
||||
assert fn == Path('freqtrade/hello/world/ETH_BTC-trades.json.gz')
|
||||
assert fn == Path(expected_result + '.gz')
|
||||
|
||||
|
||||
def test_load_cached_data_for_updating(mocker) -> None:
|
||||
datadir = Path(__file__).parent.parent.joinpath('testdata')
|
||||
@pytest.mark.parametrize("pair,expected_result", [
|
||||
("ETH/BTC", 'freqtrade/hello/world/ETH_BTC-trades.json'),
|
||||
("Fabric Token/ETH", 'freqtrade/hello/world/Fabric_Token_ETH-trades.json'),
|
||||
("ETHH20", 'freqtrade/hello/world/ETHH20-trades.json'),
|
||||
(".XBTBON2H", 'freqtrade/hello/world/_XBTBON2H-trades.json'),
|
||||
("ETHUSD.d", 'freqtrade/hello/world/ETHUSD_d-trades.json'),
|
||||
("ACC_OLD_BTC", 'freqtrade/hello/world/ACC_OLD_BTC-trades.json'),
|
||||
])
|
||||
def test_json_pair_trades_filename(pair, expected_result):
|
||||
fn = JsonDataHandler._pair_trades_filename(Path('freqtrade/hello/world'), pair)
|
||||
assert isinstance(fn, Path)
|
||||
assert fn == Path(expected_result)
|
||||
|
||||
fn = JsonGzDataHandler._pair_trades_filename(Path('freqtrade/hello/world'), pair)
|
||||
assert isinstance(fn, Path)
|
||||
assert fn == Path(expected_result + '.gz')
|
||||
|
||||
|
||||
def test_load_cached_data_for_updating(mocker, testdatadir) -> None:
|
||||
|
||||
data_handler = get_datahandler(testdatadir, 'json')
|
||||
|
||||
test_data = None
|
||||
test_filename = datadir.joinpath('UNITTEST_BTC-1m.json')
|
||||
test_filename = testdatadir.joinpath('UNITTEST_BTC-1m.json')
|
||||
with open(test_filename, "rt") as file:
|
||||
test_data = json.load(file)
|
||||
|
||||
# change now time to test 'line' cases
|
||||
test_data_df = parse_ticker_dataframe(test_data, '1m', 'UNITTEST/BTC',
|
||||
fill_missing=False, drop_incomplete=False)
|
||||
# now = last cached item + 1 hour
|
||||
now_ts = test_data[-1][0] / 1000 + 60 * 60
|
||||
mocker.patch('arrow.utcnow', return_value=arrow.get(now_ts))
|
||||
@ -171,72 +197,36 @@ def test_load_cached_data_for_updating(mocker) -> None:
|
||||
# timeframe starts earlier than the cached data
|
||||
# should fully update data
|
||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0)
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == []
|
||||
data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler)
|
||||
assert data.empty
|
||||
assert start_ts == test_data[0][0] - 1000
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 120
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m',
|
||||
TimeRange(None, 'line', 0, -num_lines))
|
||||
assert data == []
|
||||
assert start_ts < test_data[0][0] - 1
|
||||
|
||||
# timeframe starts in the center of the cached data
|
||||
# should return the chached data w/o the last item
|
||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0)
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler)
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
assert_frame_equal(data, test_data_df.iloc[:-1])
|
||||
assert test_data[-2][0] <= start_ts < test_data[-1][0]
|
||||
|
||||
# timeframe starts after the chached data
|
||||
# should return the chached data w/o the last item
|
||||
timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 1, 0)
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# Try loading last 30 lines.
|
||||
# Not supported by _load_cached_data_for_updating, we always need to get the full data.
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# no timeframe is set
|
||||
# should return the chached data w/o the last item
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 100, 0)
|
||||
data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler)
|
||||
assert_frame_equal(data, test_data_df.iloc[:-1])
|
||||
assert test_data[-2][0] <= start_ts < test_data[-1][0]
|
||||
|
||||
# no datafile exist
|
||||
# should return timestamp start time
|
||||
timerange = TimeRange('date', None, now_ts - 10000, 0)
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange)
|
||||
assert data == []
|
||||
data, start_ts = _load_cached_data_for_updating('NONEXIST/BTC', '1m', timerange, data_handler)
|
||||
assert data.empty
|
||||
assert start_ts == (now_ts - 10000) * 1000
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange)
|
||||
assert data == []
|
||||
assert start_ts == (now_ts - num_lines * 60) * 1000
|
||||
|
||||
# no datafile exist, no timeframe is set
|
||||
# should return an empty array and None
|
||||
data, start_ts = _load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', None)
|
||||
assert data == []
|
||||
data, start_ts = _load_cached_data_for_updating('NONEXIST/BTC', '1m', None, data_handler)
|
||||
assert data.empty
|
||||
assert start_ts is None
|
||||
|
||||
|
||||
@ -293,7 +283,9 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
|
||||
[1509836520000, 0.00162008, 0.00162008, 0.00162008, 0.00162008, 108.14853839],
|
||||
[1509836580000, 0.00161, 0.00161, 0.00161, 0.00161, 82.390199]
|
||||
]
|
||||
json_dump_mock = mocker.patch('freqtrade.misc.file_dump_json', return_value=None)
|
||||
json_dump_mock = mocker.patch(
|
||||
'freqtrade.data.history.jsondatahandler.JsonDataHandler.ohlcv_store',
|
||||
return_value=None)
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=tick)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
_download_pair_history(testdatadir, exchange, pair="UNITTEST/BTC", timeframe='1m')
|
||||
@ -325,17 +317,6 @@ def test_download_backtesting_data_exception(ticker_history, mocker, caplog,
|
||||
)
|
||||
|
||||
|
||||
def test_load_tickerdata_file(testdatadir) -> None:
|
||||
# 7 does not exist in either format.
|
||||
assert not load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '7m')
|
||||
# 1 exists only as a .json
|
||||
tickerdata = load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '1m')
|
||||
assert _BTC_UNITTEST_LENGTH == len(tickerdata)
|
||||
# 8 .json is empty and will fail if it's loaded. .json.gz is a copy of 1.json
|
||||
tickerdata = load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '8m')
|
||||
assert _BTC_UNITTEST_LENGTH == len(tickerdata)
|
||||
|
||||
|
||||
def test_load_partial_missing(testdatadir, caplog) -> None:
|
||||
# Make sure we start fresh - test missing data at start
|
||||
start = arrow.get('2018-01-01T00:00:00')
|
||||
@ -361,6 +342,7 @@ def test_load_partial_missing(testdatadir, caplog) -> None:
|
||||
# timedifference in 5 minutes
|
||||
td = ((end - start).total_seconds() // 60 // 5) + 1
|
||||
assert td != len(tickerdata['UNITTEST/BTC'])
|
||||
|
||||
# Shift endtime with +5 - as last candle is dropped (partial candle)
|
||||
end_real = arrow.get(tickerdata['UNITTEST/BTC'].iloc[-1, 0]).shift(minutes=5)
|
||||
assert log_has(f'Missing data at end for pair '
|
||||
@ -391,98 +373,6 @@ def test_init_with_refresh(default_conf, mocker) -> None:
|
||||
)
|
||||
|
||||
|
||||
def test_trim_tickerlist(testdatadir) -> None:
|
||||
file = testdatadir / 'UNITTEST_BTC-1m.json'
|
||||
with open(file) as data_file:
|
||||
ticker_list = json.load(data_file)
|
||||
ticker_list_len = len(ticker_list)
|
||||
|
||||
# Test the pattern ^(\d{8})-(\d{8})$
|
||||
# This pattern extract a window between the dates
|
||||
timerange = TimeRange('date', 'date', ticker_list[5][0] / 1000, ticker_list[10][0] / 1000 - 1)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 5
|
||||
assert ticker_list[0] is not ticker[0] # The first element should be different
|
||||
assert ticker_list[5] is ticker[0] # The list starts at the index 5
|
||||
assert ticker_list[9] is ticker[-1] # The list ends at the index 9 (5 elements)
|
||||
|
||||
# Test the pattern ^-(\d{8})$
|
||||
# This pattern extracts elements from the start to the date
|
||||
timerange = TimeRange(None, 'date', 0, ticker_list[10][0] / 1000 - 1)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == 10
|
||||
assert ticker_list[0] is ticker[0] # The start of the list is included
|
||||
assert ticker_list[9] is ticker[-1] # The element 10 is not included
|
||||
|
||||
# Test the pattern ^(\d{8})-$
|
||||
# This pattern extracts elements from the date to now
|
||||
timerange = TimeRange('date', None, ticker_list[10][0] / 1000 - 1, 0)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_len == ticker_list_len - 10
|
||||
assert ticker_list[10] is ticker[0] # The first element is element #10
|
||||
assert ticker_list[-1] is ticker[-1] # The last element is the same
|
||||
|
||||
# Test a wrong pattern
|
||||
# This pattern must return the list unchanged
|
||||
timerange = TimeRange(None, None, 0, 5)
|
||||
ticker = trim_tickerlist(ticker_list, timerange)
|
||||
ticker_len = len(ticker)
|
||||
|
||||
assert ticker_list_len == ticker_len
|
||||
|
||||
# passing empty list
|
||||
timerange = TimeRange(None, None, 0, 5)
|
||||
ticker = trim_tickerlist([], timerange)
|
||||
assert 0 == len(ticker)
|
||||
assert not ticker
|
||||
|
||||
|
||||
def test_trim_dataframe(testdatadir) -> None:
|
||||
data = load_data(
|
||||
datadir=testdatadir,
|
||||
timeframe='1m',
|
||||
pairs=['UNITTEST/BTC']
|
||||
)['UNITTEST/BTC']
|
||||
min_date = int(data.iloc[0]['date'].timestamp())
|
||||
max_date = int(data.iloc[-1]['date'].timestamp())
|
||||
data_modify = data.copy()
|
||||
|
||||
# Remove first 30 minutes (1800 s)
|
||||
tr = TimeRange('date', None, min_date + 1800, 0)
|
||||
data_modify = trim_dataframe(data_modify, tr)
|
||||
assert not data_modify.equals(data)
|
||||
assert len(data_modify) < len(data)
|
||||
assert len(data_modify) == len(data) - 30
|
||||
assert all(data_modify.iloc[-1] == data.iloc[-1])
|
||||
assert all(data_modify.iloc[0] == data.iloc[30])
|
||||
|
||||
data_modify = data.copy()
|
||||
# Remove last 30 minutes (1800 s)
|
||||
tr = TimeRange(None, 'date', 0, max_date - 1800)
|
||||
data_modify = trim_dataframe(data_modify, tr)
|
||||
assert not data_modify.equals(data)
|
||||
assert len(data_modify) < len(data)
|
||||
assert len(data_modify) == len(data) - 30
|
||||
assert all(data_modify.iloc[0] == data.iloc[0])
|
||||
assert all(data_modify.iloc[-1] == data.iloc[-31])
|
||||
|
||||
data_modify = data.copy()
|
||||
# Remove first 25 and last 30 minutes (1800 s)
|
||||
tr = TimeRange('date', 'date', min_date + 1500, max_date - 1800)
|
||||
data_modify = trim_dataframe(data_modify, tr)
|
||||
assert not data_modify.equals(data)
|
||||
assert len(data_modify) < len(data)
|
||||
assert len(data_modify) == len(data) - 55
|
||||
# first row matches 25th original row
|
||||
assert all(data_modify.iloc[0] == data.iloc[25])
|
||||
|
||||
|
||||
def test_file_dump_json_tofile(testdatadir) -> None:
|
||||
file = testdatadir / 'test_{id}.json'.format(id=str(uuid.uuid4()))
|
||||
data = {'bar': 'foo'}
|
||||
@ -573,7 +463,8 @@ def test_validate_backtest_data(default_conf, mocker, caplog, testdatadir) -> No
|
||||
|
||||
|
||||
def test_refresh_backtest_ohlcv_data(mocker, default_conf, markets, caplog, testdatadir):
|
||||
dl_mock = mocker.patch('freqtrade.data.history._download_pair_history', MagicMock())
|
||||
dl_mock = mocker.patch('freqtrade.data.history.history_utils._download_pair_history',
|
||||
MagicMock())
|
||||
mocker.patch(
|
||||
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value=markets)
|
||||
)
|
||||
@ -594,7 +485,8 @@ def test_refresh_backtest_ohlcv_data(mocker, default_conf, markets, caplog, test
|
||||
|
||||
|
||||
def test_download_data_no_markets(mocker, default_conf, caplog, testdatadir):
|
||||
dl_mock = mocker.patch('freqtrade.data.history._download_pair_history', MagicMock())
|
||||
dl_mock = mocker.patch('freqtrade.data.history.history_utils._download_pair_history',
|
||||
MagicMock())
|
||||
|
||||
ex = get_patched_exchange(mocker, default_conf)
|
||||
mocker.patch(
|
||||
@ -614,7 +506,8 @@ def test_download_data_no_markets(mocker, default_conf, caplog, testdatadir):
|
||||
|
||||
|
||||
def test_refresh_backtest_trades_data(mocker, default_conf, markets, caplog, testdatadir):
|
||||
dl_mock = mocker.patch('freqtrade.data.history._download_trades_history', MagicMock())
|
||||
dl_mock = mocker.patch('freqtrade.data.history.history_utils._download_trades_history',
|
||||
MagicMock())
|
||||
mocker.patch(
|
||||
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value=markets)
|
||||
)
|
||||
@ -644,12 +537,12 @@ def test_download_trades_history(trades_history, mocker, default_conf, testdatad
|
||||
ght_mock)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file1 = testdatadir / 'ETH_BTC-trades.json.gz'
|
||||
|
||||
data_handler = get_datahandler(testdatadir, data_format='jsongz')
|
||||
_backup_file(file1)
|
||||
|
||||
assert not file1.is_file()
|
||||
|
||||
assert _download_trades_history(datadir=testdatadir, exchange=exchange,
|
||||
assert _download_trades_history(data_handler=data_handler, exchange=exchange,
|
||||
pair='ETH/BTC')
|
||||
assert log_has("New Amount of trades: 5", caplog)
|
||||
assert file1.is_file()
|
||||
@ -660,7 +553,7 @@ def test_download_trades_history(trades_history, mocker, default_conf, testdatad
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_historic_trades',
|
||||
MagicMock(side_effect=ValueError))
|
||||
|
||||
assert not _download_trades_history(datadir=testdatadir, exchange=exchange,
|
||||
assert not _download_trades_history(data_handler=data_handler, exchange=exchange,
|
||||
pair='ETH/BTC')
|
||||
assert log_has_re('Failed to download historic trades for pair: "ETH/BTC".*', caplog)
|
||||
|
||||
@ -692,3 +585,73 @@ def test_convert_trades_to_ohlcv(mocker, default_conf, testdatadir, caplog):
|
||||
|
||||
_clean_test_file(file1)
|
||||
_clean_test_file(file5)
|
||||
|
||||
|
||||
def test_jsondatahandler_ohlcv_get_pairs(testdatadir):
|
||||
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '5m')
|
||||
# Convert to set to avoid failures due to sorting
|
||||
assert set(pairs) == {'UNITTEST/BTC', 'XLM/BTC', 'ETH/BTC', 'TRX/BTC', 'LTC/BTC',
|
||||
'XMR/BTC', 'ZEC/BTC', 'ADA/BTC', 'ETC/BTC', 'NXT/BTC',
|
||||
'DASH/BTC', 'XRP/ETH'}
|
||||
|
||||
pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '8m')
|
||||
assert set(pairs) == {'UNITTEST/BTC'}
|
||||
|
||||
|
||||
def test_jsondatahandler_trades_get_pairs(testdatadir):
|
||||
pairs = JsonGzDataHandler.trades_get_pairs(testdatadir)
|
||||
# Convert to set to avoid failures due to sorting
|
||||
assert set(pairs) == {'XRP/ETH'}
|
||||
|
||||
|
||||
def test_jsondatahandler_ohlcv_purge(mocker, testdatadir):
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||
mocker.patch.object(Path, "unlink", MagicMock())
|
||||
dh = JsonGzDataHandler(testdatadir)
|
||||
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
||||
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
||||
|
||||
|
||||
def test_jsondatahandler_trades_purge(mocker, testdatadir):
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||
mocker.patch.object(Path, "unlink", MagicMock())
|
||||
dh = JsonGzDataHandler(testdatadir)
|
||||
assert not dh.trades_purge('UNITTEST/NONEXIST')
|
||||
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||
assert dh.trades_purge('UNITTEST/NONEXIST')
|
||||
|
||||
|
||||
def test_jsondatahandler_ohlcv_append(testdatadir):
|
||||
dh = JsonGzDataHandler(testdatadir)
|
||||
with pytest.raises(NotImplementedError):
|
||||
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame())
|
||||
|
||||
|
||||
def test_jsondatahandler_trades_append(testdatadir):
|
||||
dh = JsonGzDataHandler(testdatadir)
|
||||
with pytest.raises(NotImplementedError):
|
||||
dh.trades_append('UNITTEST/ETH', [])
|
||||
|
||||
|
||||
def test_gethandlerclass():
|
||||
cl = get_datahandlerclass('json')
|
||||
assert cl == JsonDataHandler
|
||||
assert issubclass(cl, IDataHandler)
|
||||
cl = get_datahandlerclass('jsongz')
|
||||
assert cl == JsonGzDataHandler
|
||||
assert issubclass(cl, IDataHandler)
|
||||
assert issubclass(cl, JsonDataHandler)
|
||||
with pytest.raises(ValueError, match=r"No datahandler for .*"):
|
||||
get_datahandlerclass('DeadBeef')
|
||||
|
||||
|
||||
def test_get_datahandler(testdatadir):
|
||||
dh = get_datahandler(testdatadir, 'json')
|
||||
assert type(dh) == JsonDataHandler
|
||||
dh = get_datahandler(testdatadir, 'jsongz')
|
||||
assert type(dh) == JsonGzDataHandler
|
||||
dh1 = get_datahandler(testdatadir, 'jsongz', dh)
|
||||
assert id(dh1) == id(dh)
|
||||
|
@ -1,6 +1,5 @@
|
||||
# pragma pylint: disable=missing-docstring, W0212, line-too-long, C0103, unused-argument
|
||||
|
||||
import math
|
||||
import random
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
@ -15,7 +14,7 @@ from freqtrade.configuration import TimeRange
|
||||
from freqtrade.commands.optimize_commands import setup_optimize_configuration, start_backtesting
|
||||
from freqtrade.data import history
|
||||
from freqtrade.data.btanalysis import evaluate_result_multi
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.data.converter import clean_ohlcv_dataframe
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.data.history import get_timerange
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
@ -50,47 +49,33 @@ def trim_dictlist(dict_list, num):
|
||||
|
||||
def load_data_test(what, testdatadir):
|
||||
timerange = TimeRange.parse_timerange('1510694220-1510700340')
|
||||
pair = history.load_tickerdata_file(testdatadir, timeframe='1m',
|
||||
pair='UNITTEST/BTC', timerange=timerange)
|
||||
datalen = len(pair)
|
||||
data = history.load_pair_history(pair='UNITTEST/BTC', datadir=testdatadir,
|
||||
timeframe='1m', timerange=timerange,
|
||||
drop_incomplete=False,
|
||||
fill_up_missing=False)
|
||||
|
||||
base = 0.001
|
||||
if what == 'raise':
|
||||
data = [
|
||||
[
|
||||
pair[x][0], # Keep old dates
|
||||
x * base, # But replace O,H,L,C
|
||||
x * base + 0.0001,
|
||||
x * base - 0.0001,
|
||||
x * base,
|
||||
pair[x][5], # Keep old volume
|
||||
] for x in range(0, datalen)
|
||||
]
|
||||
data.loc[:, 'open'] = data.index * base
|
||||
data.loc[:, 'high'] = data.index * base + 0.0001
|
||||
data.loc[:, 'low'] = data.index * base - 0.0001
|
||||
data.loc[:, 'close'] = data.index * base
|
||||
|
||||
if what == 'lower':
|
||||
data = [
|
||||
[
|
||||
pair[x][0], # Keep old dates
|
||||
1 - x * base, # But replace O,H,L,C
|
||||
1 - x * base + 0.0001,
|
||||
1 - x * base - 0.0001,
|
||||
1 - x * base,
|
||||
pair[x][5] # Keep old volume
|
||||
] for x in range(0, datalen)
|
||||
]
|
||||
data.loc[:, 'open'] = 1 - data.index * base
|
||||
data.loc[:, 'high'] = 1 - data.index * base + 0.0001
|
||||
data.loc[:, 'low'] = 1 - data.index * base - 0.0001
|
||||
data.loc[:, 'close'] = 1 - data.index * base
|
||||
|
||||
if what == 'sine':
|
||||
hz = 0.1 # frequency
|
||||
data = [
|
||||
[
|
||||
pair[x][0], # Keep old dates
|
||||
math.sin(x * hz) / 1000 + base, # But replace O,H,L,C
|
||||
math.sin(x * hz) / 1000 + base + 0.0001,
|
||||
math.sin(x * hz) / 1000 + base - 0.0001,
|
||||
math.sin(x * hz) / 1000 + base,
|
||||
pair[x][5] # Keep old volume
|
||||
] for x in range(0, datalen)
|
||||
]
|
||||
return {'UNITTEST/BTC': parse_ticker_dataframe(data, '1m', pair="UNITTEST/BTC",
|
||||
fill_missing=True)}
|
||||
data.loc[:, 'open'] = np.sin(data.index * hz) / 1000 + base
|
||||
data.loc[:, 'high'] = np.sin(data.index * hz) / 1000 + base + 0.0001
|
||||
data.loc[:, 'low'] = np.sin(data.index * hz) / 1000 + base - 0.0001
|
||||
data.loc[:, 'close'] = np.sin(data.index * hz) / 1000 + base
|
||||
|
||||
return {'UNITTEST/BTC': clean_ohlcv_dataframe(data, timeframe='1m', pair='UNITTEST/BTC',
|
||||
fill_missing=True)}
|
||||
|
||||
|
||||
def simple_backtest(config, contour, num_results, mocker, testdatadir) -> None:
|
||||
@ -114,21 +99,6 @@ def simple_backtest(config, contour, num_results, mocker, testdatadir) -> None:
|
||||
assert len(results) == num_results
|
||||
|
||||
|
||||
def mocked_load_data(datadir, pairs=[], timeframe='0m',
|
||||
timerange=None, *args, **kwargs):
|
||||
tickerdata = history.load_tickerdata_file(datadir, 'UNITTEST/BTC', '1m', timerange=timerange)
|
||||
pairdata = {'UNITTEST/BTC': parse_ticker_dataframe(tickerdata, '1m', pair="UNITTEST/BTC",
|
||||
fill_missing=True)}
|
||||
return pairdata
|
||||
|
||||
|
||||
# use for mock ccxt.fetch_ohlvc'
|
||||
def _load_pair_as_ticks(pair, tickfreq):
|
||||
ticks = history.load_tickerdata_file(None, timeframe=tickfreq, pair=pair)
|
||||
ticks = ticks[-201:]
|
||||
return ticks
|
||||
|
||||
|
||||
# FIX: fixturize this?
|
||||
def _make_backtest_conf(mocker, datadir, conf=None, pair='UNITTEST/BTC'):
|
||||
data = history.load_data(datadir=datadir, timeframe='1m', pairs=[pair])
|
||||
@ -339,12 +309,9 @@ def test_tickerdata_with_fee(default_conf, mocker, testdatadir) -> None:
|
||||
|
||||
def test_tickerdata_to_dataframe_bt(default_conf, mocker, testdatadir) -> None:
|
||||
patch_exchange(mocker)
|
||||
# timerange = TimeRange(None, 'line', 0, -100)
|
||||
timerange = TimeRange.parse_timerange('1510694220-1510700340')
|
||||
tick = history.load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '1m', timerange=timerange)
|
||||
tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick, '1m', pair="UNITTEST/BTC",
|
||||
fill_missing=True)}
|
||||
|
||||
tickerlist = history.load_data(testdatadir, '1m', ['UNITTEST/BTC'], timerange=timerange,
|
||||
fill_up_missing=True)
|
||||
backtesting = Backtesting(default_conf)
|
||||
data = backtesting.strategy.tickerdata_to_dataframe(tickerlist)
|
||||
assert len(data['UNITTEST/BTC']) == 102
|
||||
@ -361,7 +328,6 @@ def test_backtesting_start(default_conf, mocker, testdatadir, caplog) -> None:
|
||||
def get_timerange(input1):
|
||||
return Arrow(2017, 11, 14, 21, 17), Arrow(2017, 11, 14, 22, 59)
|
||||
|
||||
mocker.patch('freqtrade.data.history.load_data', mocked_load_data)
|
||||
mocker.patch('freqtrade.data.history.get_timerange', get_timerange)
|
||||
mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', MagicMock())
|
||||
patch_exchange(mocker)
|
||||
@ -391,7 +357,8 @@ def test_backtesting_start_no_data(default_conf, mocker, caplog, testdatadir) ->
|
||||
def get_timerange(input1):
|
||||
return Arrow(2017, 11, 14, 21, 17), Arrow(2017, 11, 14, 22, 59)
|
||||
|
||||
mocker.patch('freqtrade.data.history.load_pair_history', MagicMock(return_value=pd.DataFrame()))
|
||||
mocker.patch('freqtrade.data.history.history_utils.load_pair_history',
|
||||
MagicMock(return_value=pd.DataFrame()))
|
||||
mocker.patch('freqtrade.data.history.get_timerange', get_timerange)
|
||||
mocker.patch('freqtrade.exchange.Exchange.refresh_latest_ohlcv', MagicMock())
|
||||
patch_exchange(mocker)
|
||||
@ -695,13 +662,7 @@ def test_backtest_record(default_conf, fee, mocker):
|
||||
def test_backtest_start_timerange(default_conf, mocker, caplog, testdatadir):
|
||||
default_conf['exchange']['pair_whitelist'] = ['UNITTEST/BTC']
|
||||
|
||||
async def load_pairs(pair, timeframe, since):
|
||||
return _load_pair_as_ticks(pair, timeframe)
|
||||
|
||||
api_mock = MagicMock()
|
||||
api_mock.fetch_ohlcv = load_pairs
|
||||
|
||||
patch_exchange(mocker, api_mock)
|
||||
patch_exchange(mocker)
|
||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.backtest', MagicMock())
|
||||
mocker.patch('freqtrade.optimize.backtesting.generate_text_table', MagicMock())
|
||||
|
||||
@ -741,12 +702,7 @@ def test_backtest_start_timerange(default_conf, mocker, caplog, testdatadir):
|
||||
def test_backtest_start_multi_strat(default_conf, mocker, caplog, testdatadir):
|
||||
default_conf['exchange']['pair_whitelist'] = ['UNITTEST/BTC']
|
||||
|
||||
async def load_pairs(pair, timeframe, since):
|
||||
return _load_pair_as_ticks(pair, timeframe)
|
||||
api_mock = MagicMock()
|
||||
api_mock.fetch_ohlcv = load_pairs
|
||||
|
||||
patch_exchange(mocker, api_mock)
|
||||
patch_exchange(mocker)
|
||||
backtestmock = MagicMock()
|
||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.backtest', backtestmock)
|
||||
gen_table_mock = MagicMock()
|
||||
|
@ -12,8 +12,7 @@ from filelock import Timeout
|
||||
|
||||
from freqtrade.commands.optimize_commands import (setup_optimize_configuration,
|
||||
start_hyperopt)
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.data.history import load_tickerdata_file
|
||||
from freqtrade.data.history import load_data
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.optimize.default_hyperopt import DefaultHyperOpt
|
||||
from freqtrade.optimize.default_hyperopt_loss import DefaultHyperOptLoss
|
||||
@ -577,9 +576,7 @@ def test_has_space(hyperopt, spaces, expected_results):
|
||||
|
||||
|
||||
def test_populate_indicators(hyperopt, testdatadir) -> None:
|
||||
tick = load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '1m')
|
||||
tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick, '1m', pair="UNITTEST/BTC",
|
||||
fill_missing=True)}
|
||||
tickerlist = load_data(testdatadir, '1m', ['UNITTEST/BTC'], fill_up_missing=True)
|
||||
dataframes = hyperopt.backtesting.strategy.tickerdata_to_dataframe(tickerlist)
|
||||
dataframe = hyperopt.custom_hyperopt.populate_indicators(dataframes['UNITTEST/BTC'],
|
||||
{'pair': 'UNITTEST/BTC'})
|
||||
@ -591,9 +588,7 @@ def test_populate_indicators(hyperopt, testdatadir) -> None:
|
||||
|
||||
|
||||
def test_buy_strategy_generator(hyperopt, testdatadir) -> None:
|
||||
tick = load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '1m')
|
||||
tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick, '1m', pair="UNITTEST/BTC",
|
||||
fill_missing=True)}
|
||||
tickerlist = load_data(testdatadir, '1m', ['UNITTEST/BTC'], fill_up_missing=True)
|
||||
dataframes = hyperopt.backtesting.strategy.tickerdata_to_dataframe(tickerlist)
|
||||
dataframe = hyperopt.custom_hyperopt.populate_indicators(dataframes['UNITTEST/BTC'],
|
||||
{'pair': 'UNITTEST/BTC'})
|
||||
|
@ -7,8 +7,7 @@ import arrow
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.data.history import load_tickerdata_file
|
||||
from freqtrade.data.history import load_data
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
from freqtrade.strategy.default_strategy import DefaultStrategy
|
||||
@ -109,9 +108,8 @@ def test_tickerdata_to_dataframe(default_conf, testdatadir) -> None:
|
||||
strategy = StrategyResolver.load_strategy(default_conf)
|
||||
|
||||
timerange = TimeRange.parse_timerange('1510694220-1510700340')
|
||||
tick = load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '1m', timerange=timerange)
|
||||
tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick, '1m', pair="UNITTEST/BTC",
|
||||
fill_missing=True)}
|
||||
tickerlist = load_data(testdatadir, '1m', ['UNITTEST/BTC'], timerange=timerange,
|
||||
fill_up_missing=True)
|
||||
data = strategy.tickerdata_to_dataframe(tickerlist)
|
||||
assert len(data['UNITTEST/BTC']) == 102 # partial candle was removed
|
||||
|
||||
|
@ -4,10 +4,12 @@ import datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
from freqtrade.data.history import pair_data_filename
|
||||
from freqtrade.misc import (datesarray_to_datetimearray, file_dump_json,
|
||||
file_load_json, format_ms_time, plural, shorten_date)
|
||||
file_load_json, format_ms_time, pair_to_filename,
|
||||
plural, shorten_date)
|
||||
|
||||
|
||||
def test_shorten_date() -> None:
|
||||
@ -48,16 +50,36 @@ def test_file_dump_json(mocker) -> None:
|
||||
def test_file_load_json(mocker, testdatadir) -> None:
|
||||
|
||||
# 7m .json does not exist
|
||||
ret = file_load_json(pair_data_filename(testdatadir, 'UNITTEST/BTC', '7m'))
|
||||
ret = file_load_json(testdatadir / 'UNITTEST_BTC-7m.json')
|
||||
assert not ret
|
||||
# 1m json exists (but no .gz exists)
|
||||
ret = file_load_json(pair_data_filename(testdatadir, 'UNITTEST/BTC', '1m'))
|
||||
ret = file_load_json(testdatadir / 'UNITTEST_BTC-1m.json')
|
||||
assert ret
|
||||
# 8 .json is empty and will fail if it's loaded. .json.gz is a copy of 1.json
|
||||
ret = file_load_json(pair_data_filename(testdatadir, 'UNITTEST/BTC', '8m'))
|
||||
ret = file_load_json(testdatadir / 'UNITTEST_BTC-8m.json')
|
||||
assert ret
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pair,expected_result", [
|
||||
("ETH/BTC", 'ETH_BTC'),
|
||||
("Fabric Token/ETH", 'Fabric_Token_ETH'),
|
||||
("ETHH20", 'ETHH20'),
|
||||
(".XBTBON2H", '_XBTBON2H'),
|
||||
("ETHUSD.d", 'ETHUSD_d'),
|
||||
("ADA-0327", 'ADA_0327'),
|
||||
("BTC-USD-200110", 'BTC_USD_200110'),
|
||||
("F-AKRO/USDT", 'F_AKRO_USDT'),
|
||||
("LC+/ETH", 'LC__ETH'),
|
||||
("CMT@18/ETH", 'CMT_18_ETH'),
|
||||
("LBTC:1022/SAI", 'LBTC_1022_SAI'),
|
||||
("$PAC/BTC", '_PAC_BTC'),
|
||||
("ACC_OLD/BTC", 'ACC_OLD_BTC'),
|
||||
])
|
||||
def test_pair_to_filename(pair, expected_result):
|
||||
pair_s = pair_to_filename(pair)
|
||||
assert pair_s == expected_result
|
||||
|
||||
|
||||
def test_format_ms_time() -> None:
|
||||
# Date 2018-04-10 18:02:01
|
||||
date_in_epoch_ms = 1523383321000
|
||||
|
Loading…
Reference in New Issue
Block a user