2018-03-26 18:18:14 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
"""
|
|
|
|
Script to display when the bot will buy a specific pair
|
|
|
|
|
|
|
|
Mandatory Cli parameters:
|
|
|
|
-p / --pair: pair to examine
|
|
|
|
|
|
|
|
Optional Cli parameters
|
|
|
|
-d / --datadir: path to pair backtest data
|
|
|
|
--timerange: specify what timerange of data to use.
|
|
|
|
-l / --live: Live, to download the latest ticker for the pair
|
|
|
|
"""
|
2018-03-25 19:37:14 +00:00
|
|
|
import logging
|
2018-03-26 18:18:14 +00:00
|
|
|
import sys
|
|
|
|
from argparse import Namespace
|
|
|
|
from os import path
|
|
|
|
import glob
|
|
|
|
import json
|
|
|
|
import re
|
|
|
|
from typing import List, Dict
|
2018-03-30 21:30:23 +00:00
|
|
|
import gzip
|
2018-03-26 18:18:14 +00:00
|
|
|
|
|
|
|
from freqtrade.arguments import Arguments
|
|
|
|
from freqtrade import misc
|
|
|
|
from pandas import DataFrame
|
2018-04-08 11:11:12 +00:00
|
|
|
from freqtrade.constants import Constants
|
2018-03-26 18:18:14 +00:00
|
|
|
|
|
|
|
import dateutil.parser
|
|
|
|
|
2018-03-25 19:37:14 +00:00
|
|
|
logger = logging.getLogger('freqtrade')
|
2018-03-26 18:18:14 +00:00
|
|
|
|
|
|
|
|
2018-03-30 21:30:23 +00:00
|
|
|
def load_old_file(filename) -> (List[Dict], bool):
|
2018-03-26 18:18:14 +00:00
|
|
|
if not path.isfile(filename):
|
|
|
|
logger.warning("filename %s does not exist", filename)
|
2018-03-30 21:30:23 +00:00
|
|
|
return (None, False)
|
2018-03-26 18:18:14 +00:00
|
|
|
logger.debug('Loading ticker data from file %s', filename)
|
|
|
|
|
|
|
|
pairdata = None
|
2018-03-30 21:30:23 +00:00
|
|
|
|
|
|
|
if filename.endswith('.gz'):
|
|
|
|
logger.debug('Loading ticker data from file %s', filename)
|
|
|
|
is_zip = True
|
|
|
|
with gzip.open(filename) as tickerdata:
|
|
|
|
pairdata = json.load(tickerdata)
|
|
|
|
else:
|
|
|
|
is_zip = False
|
|
|
|
with open(filename) as tickerdata:
|
|
|
|
pairdata = json.load(tickerdata)
|
|
|
|
return (pairdata, is_zip)
|
2018-03-26 18:18:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
def parse_old_backtest_data(ticker) -> DataFrame:
|
|
|
|
"""
|
|
|
|
Reads old backtest data
|
|
|
|
Format: "O": 8.794e-05,
|
|
|
|
"H": 8.948e-05,
|
|
|
|
"L": 8.794e-05,
|
|
|
|
"C": 8.88e-05,
|
|
|
|
"V": 991.09056638,
|
|
|
|
"T": "2017-11-26T08:50:00",
|
|
|
|
"BV": 0.0877869
|
|
|
|
"""
|
|
|
|
|
|
|
|
columns = {'C': 'close', 'V': 'volume', 'O': 'open',
|
|
|
|
'H': 'high', 'L': 'low', 'T': 'date'}
|
|
|
|
|
|
|
|
frame = DataFrame(ticker) \
|
|
|
|
.rename(columns=columns)
|
|
|
|
if 'BV' in frame:
|
|
|
|
frame.drop('BV', 1, inplace=True)
|
2018-03-30 21:34:22 +00:00
|
|
|
if not 'date' in frame:
|
|
|
|
logger.warning("Date not in frame - probably not a Ticker file")
|
|
|
|
return None
|
2018-03-26 18:18:14 +00:00
|
|
|
frame.sort_values('date', inplace=True)
|
|
|
|
return frame
|
|
|
|
|
|
|
|
|
|
|
|
def convert_dataframe(frame: DataFrame):
|
|
|
|
"""Convert dataframe to new format"""
|
|
|
|
# reorder columns:
|
|
|
|
cols = ['date', 'open', 'high', 'low', 'close', 'volume']
|
|
|
|
frame = frame[cols]
|
|
|
|
|
2018-04-08 11:11:12 +00:00
|
|
|
# Make sure parsing/printing data is assumed to be UTC
|
2018-03-26 18:18:14 +00:00
|
|
|
frame['date'] = frame['date'].apply(
|
2018-04-08 11:11:12 +00:00
|
|
|
lambda d: int(dateutil.parser.parse(d+'+00:00').timestamp()) * 1000)
|
|
|
|
frame['date'] = frame['date'].astype('int64')
|
2018-03-26 18:18:14 +00:00
|
|
|
# Convert columns one by one to preserve type.
|
|
|
|
by_column = [frame[x].values.tolist() for x in frame.columns]
|
|
|
|
return list(list(x) for x in zip(*by_column))
|
|
|
|
|
|
|
|
|
2018-03-31 15:29:52 +00:00
|
|
|
def convert_file(filename: str, filename_new: str) -> None:
|
2018-03-26 21:16:41 +00:00
|
|
|
"""Converts a file from old format to ccxt format"""
|
2018-03-30 21:30:23 +00:00
|
|
|
(pairdata, is_zip) = load_old_file(filename)
|
2018-03-31 15:29:52 +00:00
|
|
|
if pairdata and type(pairdata) is list:
|
2018-03-26 18:18:14 +00:00
|
|
|
if type(pairdata[0]) is list:
|
|
|
|
logger.error("pairdata for %s already in new format", filename)
|
2018-03-31 15:29:52 +00:00
|
|
|
return
|
2018-03-26 18:18:14 +00:00
|
|
|
|
|
|
|
frame = parse_old_backtest_data(pairdata)
|
|
|
|
# Convert frame to new format
|
2018-03-30 21:34:22 +00:00
|
|
|
if frame is not None:
|
|
|
|
frame1 = convert_dataframe(frame)
|
|
|
|
misc.file_dump_json(filename_new, frame1, is_zip)
|
2018-03-26 18:18:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
def convert_main(args: Namespace) -> None:
|
|
|
|
"""
|
|
|
|
converts a folder given in --datadir from old to new format to support ccxt
|
|
|
|
"""
|
|
|
|
|
2018-03-31 15:24:25 +00:00
|
|
|
workdir = path.join(args.datadir, "")
|
2018-03-30 21:30:23 +00:00
|
|
|
logger.info("Workdir: %s", workdir)
|
2018-03-26 18:18:14 +00:00
|
|
|
|
|
|
|
for filename in glob.glob(workdir + "*.json"):
|
|
|
|
# swap currency names
|
|
|
|
ret = re.search(r'[A-Z_]{7,}', path.basename(filename))
|
|
|
|
if args.norename:
|
|
|
|
filename_new = filename
|
|
|
|
else:
|
|
|
|
if not ret:
|
|
|
|
logger.warning("file %s could not be converted, could not extract currencies",
|
|
|
|
filename)
|
|
|
|
continue
|
|
|
|
pair = ret.group(0)
|
|
|
|
currencies = pair.split("_")
|
|
|
|
if len(currencies) != 2:
|
|
|
|
logger.warning("file %s could not be converted, could not extract currencies",
|
|
|
|
filename)
|
|
|
|
continue
|
|
|
|
|
2018-04-08 11:11:12 +00:00
|
|
|
ret_integer = re.search(r'\d+(?=\.json)', path.basename(filename))
|
|
|
|
ret_string = re.search(r'(\d+[mhdw])(?=\.json)', path.basename(filename))
|
|
|
|
|
|
|
|
if ret_integer:
|
|
|
|
minutes = int(ret_integer.group(0))
|
|
|
|
interval = str(minutes) + 'm' # default to adding 'm' to end of minutes for new interval name
|
|
|
|
# but check if there is a mapping between int and string also
|
|
|
|
for str_interval, minutes_interval in Constants.TICKER_INTERVAL_MINUTES.items():
|
|
|
|
if minutes_interval == minutes:
|
|
|
|
interval = str_interval
|
|
|
|
break
|
|
|
|
# change order on pairs if old ticker interval found
|
|
|
|
filename_new = path.join(path.dirname(filename),
|
|
|
|
"{}_{}-{}.json".format(currencies[1],
|
|
|
|
currencies[0], interval))
|
|
|
|
|
|
|
|
elif ret_string:
|
|
|
|
interval = ret_string.group(0)
|
|
|
|
filename_new = path.join(path.dirname(filename),
|
|
|
|
"{}_{}-{}.json".format(currencies[0],
|
|
|
|
currencies[1], interval))
|
|
|
|
|
|
|
|
else:
|
2018-03-26 18:18:14 +00:00
|
|
|
logger.warning("file %s could not be converted, interval not found", filename)
|
|
|
|
continue
|
2018-03-31 15:24:25 +00:00
|
|
|
|
2018-03-26 18:18:14 +00:00
|
|
|
logger.debug("Converting and renaming %s to %s", filename, filename_new)
|
|
|
|
convert_file(filename, filename_new)
|
|
|
|
|
|
|
|
|
|
|
|
def convert_parse_args(args: List[str]) -> Namespace:
|
|
|
|
"""
|
|
|
|
Parse args passed to the script
|
|
|
|
:param args: Cli arguments
|
|
|
|
:return: args: Array with all arguments
|
|
|
|
"""
|
|
|
|
arguments = Arguments(args, 'Convert datafiles')
|
|
|
|
arguments.parser.add_argument(
|
|
|
|
'-d', '--datadir',
|
|
|
|
help='path to backtest data (default: %(default)s',
|
|
|
|
dest='datadir',
|
|
|
|
default=path.join('freqtrade', 'tests', 'testdata'),
|
|
|
|
type=str,
|
|
|
|
metavar='PATH',
|
|
|
|
)
|
|
|
|
arguments.parser.add_argument(
|
|
|
|
'-n', '--norename',
|
|
|
|
help='don''t rename files from BTC_<PAIR> to <PAIR>_BTC - '
|
|
|
|
'Note that not renaming will overwrite source files',
|
|
|
|
dest='norename',
|
|
|
|
default=False,
|
|
|
|
action='store_true'
|
|
|
|
)
|
|
|
|
|
|
|
|
return arguments.parse_args()
|
|
|
|
|
|
|
|
|
|
|
|
def main(sysargv: List[str]) -> None:
|
|
|
|
"""
|
|
|
|
This function will initiate the bot and start the trading loop.
|
|
|
|
:return: None
|
|
|
|
"""
|
|
|
|
logger.info('Starting Dataframe conversation')
|
|
|
|
convert_main(convert_parse_args(sysargv))
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main(sys.argv[1:])
|