merged remote
This commit is contained in:
201
scripts/convert_backtestdata.py
Executable file
201
scripts/convert_backtestdata.py
Executable file
@@ -0,0 +1,201 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to display when the bot will buy a specific pair
|
||||
|
||||
Mandatory Cli parameters:
|
||||
-p / --pair: pair to examine
|
||||
|
||||
Optional Cli parameters
|
||||
-d / --datadir: path to pair backtest data
|
||||
--timerange: specify what timerange of data to use.
|
||||
-l / --live: Live, to download the latest ticker for the pair
|
||||
"""
|
||||
import logging
|
||||
import sys
|
||||
from argparse import Namespace
|
||||
from os import path
|
||||
import glob
|
||||
import json
|
||||
import re
|
||||
from typing import List, Dict
|
||||
import gzip
|
||||
|
||||
from freqtrade.arguments import Arguments
|
||||
from freqtrade import misc, constants
|
||||
from pandas import DataFrame
|
||||
|
||||
import dateutil.parser
|
||||
|
||||
logger = logging.getLogger('freqtrade')
|
||||
|
||||
|
||||
def load_old_file(filename) -> (List[Dict], bool):
|
||||
if not path.isfile(filename):
|
||||
logger.warning("filename %s does not exist", filename)
|
||||
return (None, False)
|
||||
logger.debug('Loading ticker data from file %s', filename)
|
||||
|
||||
pairdata = None
|
||||
|
||||
if filename.endswith('.gz'):
|
||||
logger.debug('Loading ticker data from file %s', filename)
|
||||
is_zip = True
|
||||
with gzip.open(filename) as tickerdata:
|
||||
pairdata = json.load(tickerdata)
|
||||
else:
|
||||
is_zip = False
|
||||
with open(filename) as tickerdata:
|
||||
pairdata = json.load(tickerdata)
|
||||
return (pairdata, is_zip)
|
||||
|
||||
|
||||
def parse_old_backtest_data(ticker) -> DataFrame:
|
||||
"""
|
||||
Reads old backtest data
|
||||
Format: "O": 8.794e-05,
|
||||
"H": 8.948e-05,
|
||||
"L": 8.794e-05,
|
||||
"C": 8.88e-05,
|
||||
"V": 991.09056638,
|
||||
"T": "2017-11-26T08:50:00",
|
||||
"BV": 0.0877869
|
||||
"""
|
||||
|
||||
columns = {'C': 'close', 'V': 'volume', 'O': 'open',
|
||||
'H': 'high', 'L': 'low', 'T': 'date'}
|
||||
|
||||
frame = DataFrame(ticker) \
|
||||
.rename(columns=columns)
|
||||
if 'BV' in frame:
|
||||
frame.drop('BV', 1, inplace=True)
|
||||
if 'date' not in frame:
|
||||
logger.warning("Date not in frame - probably not a Ticker file")
|
||||
return None
|
||||
frame.sort_values('date', inplace=True)
|
||||
return frame
|
||||
|
||||
|
||||
def convert_dataframe(frame: DataFrame):
|
||||
"""Convert dataframe to new format"""
|
||||
# reorder columns:
|
||||
cols = ['date', 'open', 'high', 'low', 'close', 'volume']
|
||||
frame = frame[cols]
|
||||
|
||||
# Make sure parsing/printing data is assumed to be UTC
|
||||
frame['date'] = frame['date'].apply(
|
||||
lambda d: int(dateutil.parser.parse(d+'+00:00').timestamp()) * 1000)
|
||||
frame['date'] = frame['date'].astype('int64')
|
||||
# Convert columns one by one to preserve type.
|
||||
by_column = [frame[x].values.tolist() for x in frame.columns]
|
||||
return list(list(x) for x in zip(*by_column))
|
||||
|
||||
|
||||
def convert_file(filename: str, filename_new: str) -> None:
|
||||
"""Converts a file from old format to ccxt format"""
|
||||
(pairdata, is_zip) = load_old_file(filename)
|
||||
if pairdata and type(pairdata) is list:
|
||||
if type(pairdata[0]) is list:
|
||||
logger.error("pairdata for %s already in new format", filename)
|
||||
return
|
||||
|
||||
frame = parse_old_backtest_data(pairdata)
|
||||
# Convert frame to new format
|
||||
if frame is not None:
|
||||
frame1 = convert_dataframe(frame)
|
||||
misc.file_dump_json(filename_new, frame1, is_zip)
|
||||
|
||||
|
||||
def convert_main(args: Namespace) -> None:
|
||||
"""
|
||||
converts a folder given in --datadir from old to new format to support ccxt
|
||||
"""
|
||||
|
||||
workdir = path.join(args.datadir, "")
|
||||
logger.info("Workdir: %s", workdir)
|
||||
|
||||
for filename in glob.glob(workdir + "*.json"):
|
||||
# swap currency names
|
||||
ret = re.search(r'[A-Z_]{7,}', path.basename(filename))
|
||||
if args.norename:
|
||||
filename_new = filename
|
||||
else:
|
||||
if not ret:
|
||||
logger.warning("file %s could not be converted, could not extract currencies",
|
||||
filename)
|
||||
continue
|
||||
pair = ret.group(0)
|
||||
currencies = pair.split("_")
|
||||
if len(currencies) != 2:
|
||||
logger.warning("file %s could not be converted, could not extract currencies",
|
||||
filename)
|
||||
continue
|
||||
|
||||
ret_integer = re.search(r'\d+(?=\.json)', path.basename(filename))
|
||||
ret_string = re.search(r'(\d+[mhdw])(?=\.json)', path.basename(filename))
|
||||
|
||||
if ret_integer:
|
||||
minutes = int(ret_integer.group(0))
|
||||
# default to adding 'm' to end of minutes for new interval name
|
||||
interval = str(minutes) + 'm'
|
||||
# but check if there is a mapping between int and string also
|
||||
for str_interval, minutes_interval in constants.TICKER_INTERVAL_MINUTES.items():
|
||||
if minutes_interval == minutes:
|
||||
interval = str_interval
|
||||
break
|
||||
# change order on pairs if old ticker interval found
|
||||
filename_new = path.join(path.dirname(filename),
|
||||
"{}_{}-{}.json".format(currencies[1],
|
||||
currencies[0], interval))
|
||||
|
||||
elif ret_string:
|
||||
interval = ret_string.group(0)
|
||||
filename_new = path.join(path.dirname(filename),
|
||||
"{}_{}-{}.json".format(currencies[0],
|
||||
currencies[1], interval))
|
||||
|
||||
else:
|
||||
logger.warning("file %s could not be converted, interval not found", filename)
|
||||
continue
|
||||
|
||||
logger.debug("Converting and renaming %s to %s", filename, filename_new)
|
||||
convert_file(filename, filename_new)
|
||||
|
||||
|
||||
def convert_parse_args(args: List[str]) -> Namespace:
|
||||
"""
|
||||
Parse args passed to the script
|
||||
:param args: Cli arguments
|
||||
:return: args: Array with all arguments
|
||||
"""
|
||||
arguments = Arguments(args, 'Convert datafiles')
|
||||
arguments.parser.add_argument(
|
||||
'-d', '--datadir',
|
||||
help='path to backtest data (default: %(default)s',
|
||||
dest='datadir',
|
||||
default=path.join('freqtrade', 'tests', 'testdata'),
|
||||
type=str,
|
||||
metavar='PATH',
|
||||
)
|
||||
arguments.parser.add_argument(
|
||||
'-n', '--norename',
|
||||
help='don''t rename files from BTC_<PAIR> to <PAIR>_BTC - '
|
||||
'Note that not renaming will overwrite source files',
|
||||
dest='norename',
|
||||
default=False,
|
||||
action='store_true'
|
||||
)
|
||||
|
||||
return arguments.parse_args()
|
||||
|
||||
|
||||
def main(sysargv: List[str]) -> None:
|
||||
"""
|
||||
This function will initiate the bot and start the trading loop.
|
||||
:return: None
|
||||
"""
|
||||
logger.info('Starting Dataframe conversation')
|
||||
convert_main(convert_parse_args(sysargv))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
58
scripts/download_backtest_data.py
Executable file
58
scripts/download_backtest_data.py
Executable file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""This script generate json data from bittrex"""
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
import arrow
|
||||
|
||||
from freqtrade import (exchange, arguments, misc)
|
||||
|
||||
DEFAULT_DL_PATH = 'freqtrade/tests/testdata'
|
||||
|
||||
arguments = arguments.Arguments(sys.argv[1:], 'download utility')
|
||||
arguments.testdata_dl_options()
|
||||
args = arguments.parse_args()
|
||||
|
||||
TICKER_INTERVALS = ['1m', '5m']
|
||||
PAIRS = []
|
||||
|
||||
if args.pairs_file:
|
||||
with open(args.pairs_file) as file:
|
||||
PAIRS = json.load(file)
|
||||
PAIRS = list(set(PAIRS))
|
||||
|
||||
dl_path = DEFAULT_DL_PATH
|
||||
if args.export and os.path.exists(args.export):
|
||||
dl_path = args.export
|
||||
|
||||
since_time = None
|
||||
if args.days:
|
||||
since_time = arrow.utcnow().shift(days=-args.days).timestamp * 1000
|
||||
|
||||
|
||||
print(f'About to download pairs: {PAIRS} to {dl_path}')
|
||||
|
||||
# Init exchange
|
||||
exchange._API = exchange.init_ccxt({'key': '',
|
||||
'secret': '',
|
||||
'name': args.exchange})
|
||||
|
||||
|
||||
for pair in PAIRS:
|
||||
for tick_interval in TICKER_INTERVALS:
|
||||
print(f'downloading pair {pair}, interval {tick_interval}')
|
||||
|
||||
data = exchange.get_ticker_history(pair, tick_interval, since_ms=since_time)
|
||||
if not data:
|
||||
print('\tNo data was downloaded')
|
||||
break
|
||||
|
||||
print('\tData was downloaded for period %s - %s' % (
|
||||
arrow.get(data[0][0] / 1000).format(),
|
||||
arrow.get(data[-1][0] / 1000).format()))
|
||||
|
||||
# save data
|
||||
pair_print = pair.replace('/', '_')
|
||||
filename = f'{pair_print}-{tick_interval}.json'
|
||||
misc.file_dump_json(os.path.join(dl_path, filename), data)
|
@@ -10,6 +10,7 @@ Optional Cli parameters
|
||||
-d / --datadir: path to pair backtest data
|
||||
--timerange: specify what timerange of data to use.
|
||||
-l / --live: Live, to download the latest ticker for the pair
|
||||
-db / --db-url: Show trades stored in database
|
||||
"""
|
||||
import logging
|
||||
import sys
|
||||
@@ -21,14 +22,18 @@ from plotly import tools
|
||||
from plotly.offline import plot
|
||||
import plotly.graph_objs as go
|
||||
|
||||
from typing import Dict, List, Any
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
from freqtrade.arguments import Arguments
|
||||
from freqtrade.analyze import Analyze
|
||||
from freqtrade import exchange
|
||||
import freqtrade.optimize as optimize
|
||||
|
||||
from freqtrade import persistence
|
||||
from freqtrade.persistence import Trade
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CONF: Dict[str, Any] = {}
|
||||
|
||||
def plot_analyzed_dataframe(args: Namespace) -> None:
|
||||
"""
|
||||
@@ -54,7 +59,7 @@ def plot_analyzed_dataframe(args: Namespace) -> None:
|
||||
if args.live:
|
||||
logger.info('Downloading pair.')
|
||||
# Init Bittrex to use public API
|
||||
exchange._API = exchange.Bittrex({'key': '', 'secret': ''})
|
||||
exchange.init({'key': '', 'secret': ''})
|
||||
tickers[pair] = exchange.get_ticker_history(pair, tick_interval)
|
||||
else:
|
||||
tickers = optimize.load_data(
|
||||
@@ -69,6 +74,12 @@ def plot_analyzed_dataframe(args: Namespace) -> None:
|
||||
dataframe = analyze.populate_buy_trend(dataframe)
|
||||
dataframe = analyze.populate_sell_trend(dataframe)
|
||||
|
||||
trades = []
|
||||
if args.db_url:
|
||||
engine = create_engine('sqlite:///' + args.db_url)
|
||||
persistence.init(_CONF, engine)
|
||||
trades = Trade.query.filter(Trade.pair.is_(pair)).all()
|
||||
|
||||
if len(dataframe.index) > 750:
|
||||
logger.warning('Ticker contained more than 750 candles, clipping.')
|
||||
data = dataframe.tail(750)
|
||||
@@ -109,6 +120,31 @@ def plot_analyzed_dataframe(args: Namespace) -> None:
|
||||
)
|
||||
)
|
||||
|
||||
trade_buys = go.Scattergl(
|
||||
x=[t.open_date.isoformat() for t in trades],
|
||||
y=[t.open_rate for t in trades],
|
||||
mode='markers',
|
||||
name='trade_buy',
|
||||
marker=dict(
|
||||
symbol='square-open',
|
||||
size=11,
|
||||
line=dict(width=2),
|
||||
color='green'
|
||||
)
|
||||
)
|
||||
trade_sells = go.Scattergl(
|
||||
x=[t.close_date.isoformat() for t in trades],
|
||||
y=[t.close_rate for t in trades],
|
||||
mode='markers',
|
||||
name='trade_sell',
|
||||
marker=dict(
|
||||
symbol='square-open',
|
||||
size=11,
|
||||
line=dict(width=2),
|
||||
color='red'
|
||||
)
|
||||
)
|
||||
|
||||
bb_lower = go.Scatter(
|
||||
x=data.date,
|
||||
y=data.bb_lowerband,
|
||||
@@ -154,6 +190,8 @@ def plot_analyzed_dataframe(args: Namespace) -> None:
|
||||
fig.append_trace(volume, 2, 1)
|
||||
fig.append_trace(macd, 3, 1)
|
||||
fig.append_trace(macdsignal, 3, 1)
|
||||
fig.append_trace(trade_buys, 1, 1)
|
||||
fig.append_trace(trade_sells, 1, 1)
|
||||
|
||||
fig['layout'].update(title=args.pair)
|
||||
fig['layout']['yaxis1'].update(title='Price')
|
||||
|
@@ -24,6 +24,7 @@ import plotly.graph_objs as go
|
||||
from freqtrade.arguments import Arguments
|
||||
from freqtrade.configuration import Configuration
|
||||
from freqtrade.analyze import Analyze
|
||||
from freqtrade import constants
|
||||
|
||||
import freqtrade.optimize as optimize
|
||||
import freqtrade.misc as misc
|
||||
@@ -33,10 +34,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# data:: [ pair, profit-%, enter, exit, time, duration]
|
||||
# data:: ["BTC_ETH", 0.0023975, "1515598200", "1515602100", "2018-01-10 07:30:00+00:00", 65]
|
||||
def make_profit_array(
|
||||
data: List, px: int, min_date: int,
|
||||
interval: int, filter_pairs: Optional[List] = None) -> np.ndarray:
|
||||
# data:: ["ETH/BTC", 0.0023975, "1515598200", "1515602100", "2018-01-10 07:30:00+00:00", 65]
|
||||
def make_profit_array(data: List, px: int, min_date: int,
|
||||
interval: int,
|
||||
filter_pairs: Optional[List] = None) -> np.ndarray:
|
||||
pg = np.zeros(px)
|
||||
filter_pairs = filter_pairs or []
|
||||
# Go through the trades
|
||||
@@ -186,11 +187,12 @@ def plot_profit(args: Namespace) -> None:
|
||||
plot(fig, filename='freqtrade-profit-plot.html')
|
||||
|
||||
|
||||
def define_index(min_date: int, max_date: int, interval: int) -> int:
|
||||
def define_index(min_date: int, max_date: int, interval: str) -> int:
|
||||
"""
|
||||
Return the index of a specific date
|
||||
"""
|
||||
return int((max_date - min_date) / (interval * 60))
|
||||
interval_minutes = constants.TICKER_INTERVAL_MINUTES[interval]
|
||||
return int((max_date - min_date) / (interval_minutes * 60))
|
||||
|
||||
|
||||
def plot_parse_args(args: List[str]) -> Namespace:
|
||||
|
Reference in New Issue
Block a user