Merge pull request #956 from freqtrade/fix/download_backtest

slight rework of download script
This commit is contained in:
Michael Egger 2018-06-24 21:44:09 +02:00 committed by GitHub
commit 375ea940f4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 32 additions and 23 deletions

View File

@ -334,3 +334,10 @@ class Arguments(object):
nargs='+', nargs='+',
dest='timeframes', dest='timeframes',
) )
self.parser.add_argument(
'--erase',
help='Clean all existing data for the selected exchange/pairs/timeframes',
dest='erase',
action='store_true'
)

View File

@ -3,11 +3,14 @@
"""This script generate json data from bittrex""" """This script generate json data from bittrex"""
import json import json
import sys import sys
import os from pathlib import Path
import arrow import arrow
from freqtrade import (arguments, misc) from freqtrade import arguments
from freqtrade.arguments import TimeRange
from freqtrade.exchange import Exchange from freqtrade.exchange import Exchange
from freqtrade.optimize import download_backtesting_testdata
DEFAULT_DL_PATH = 'user_data/data' DEFAULT_DL_PATH = 'user_data/data'
@ -17,25 +20,27 @@ args = arguments.parse_args()
timeframes = args.timeframes timeframes = args.timeframes
dl_path = os.path.join(DEFAULT_DL_PATH, args.exchange) dl_path = Path(DEFAULT_DL_PATH).joinpath(args.exchange)
if args.export: if args.export:
dl_path = args.export dl_path = Path(args.export)
if not os.path.isdir(dl_path): if not dl_path.is_dir():
sys.exit(f'Directory {dl_path} does not exist.') sys.exit(f'Directory {dl_path} does not exist.')
pairs_file = args.pairs_file if args.pairs_file else os.path.join(dl_path, 'pairs.json') pairs_file = Path(args.pairs_file) if args.pairs_file else dl_path.joinpath('pairs.json')
if not os.path.isfile(pairs_file): if not pairs_file.exists():
sys.exit(f'No pairs file found with path {pairs_file}.') sys.exit(f'No pairs file found with path {pairs_file}.')
with open(pairs_file) as file: with pairs_file.open() as file:
PAIRS = list(set(json.load(file))) PAIRS = list(set(json.load(file)))
PAIRS.sort() PAIRS.sort()
since_time = None
timerange = TimeRange()
if args.days: if args.days:
since_time = arrow.utcnow().shift(days=-args.days).timestamp * 1000 time_since = arrow.utcnow().shift(days=-args.days).strftime("%Y%m%d")
timerange = arguments.parse_timerange(f'{time_since}-')
print(f'About to download pairs: {PAIRS} to {dl_path}') print(f'About to download pairs: {PAIRS} to {dl_path}')
@ -59,21 +64,18 @@ for pair in PAIRS:
print(f"skipping pair {pair}") print(f"skipping pair {pair}")
continue continue
for tick_interval in timeframes: for tick_interval in timeframes:
print(f'downloading pair {pair}, interval {tick_interval}')
data = exchange.get_ticker_history(pair, tick_interval, since_ms=since_time)
if not data:
print('\tNo data was downloaded')
break
print('\tData was downloaded for period %s - %s' % (
arrow.get(data[0][0] / 1000).format(),
arrow.get(data[-1][0] / 1000).format()))
# save data
pair_print = pair.replace('/', '_') pair_print = pair.replace('/', '_')
filename = f'{pair_print}-{tick_interval}.json' filename = f'{pair_print}-{tick_interval}.json'
misc.file_dump_json(os.path.join(dl_path, filename), data) dl_file = dl_path.joinpath(filename)
if args.erase and dl_file.exists():
print(f'Deleting existing data for pair {pair}, interval {tick_interval}')
dl_file.unlink()
print(f'downloading pair {pair}, interval {tick_interval}')
download_backtesting_testdata(str(dl_path), exchange=exchange,
pair=pair,
tick_interval=tick_interval,
timerange=timerange)
if pairs_not_available: if pairs_not_available: