2018-02-04 07:33:54 +00:00
|
|
|
"""
|
|
|
|
Various tool function for Freqtrade and scripts
|
|
|
|
"""
|
|
|
|
|
2018-07-04 07:31:35 +00:00
|
|
|
import gzip
|
2017-11-08 21:43:47 +00:00
|
|
|
import logging
|
2018-03-17 21:44:47 +00:00
|
|
|
import re
|
2018-01-21 12:44:30 +00:00
|
|
|
from datetime import datetime
|
2018-03-17 21:12:42 +00:00
|
|
|
from typing import Dict
|
|
|
|
|
2018-01-21 12:44:30 +00:00
|
|
|
import numpy as np
|
2018-03-17 21:12:42 +00:00
|
|
|
from pandas import DataFrame
|
2018-12-28 09:01:16 +00:00
|
|
|
import rapidjson
|
2017-11-08 21:43:47 +00:00
|
|
|
|
2017-11-11 15:47:19 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2017-09-08 22:31:40 +00:00
|
|
|
|
2018-03-17 21:12:42 +00:00
|
|
|
def shorten_date(_date: str) -> str:
|
2018-02-04 07:33:54 +00:00
|
|
|
"""
|
|
|
|
Trim the date so it fits on small screens
|
|
|
|
"""
|
|
|
|
new_date = re.sub('seconds?', 'sec', _date)
|
|
|
|
new_date = re.sub('minutes?', 'min', new_date)
|
|
|
|
new_date = re.sub('hours?', 'h', new_date)
|
|
|
|
new_date = re.sub('days?', 'd', new_date)
|
|
|
|
new_date = re.sub('^an?', '1', new_date)
|
|
|
|
return new_date
|
2017-09-08 22:31:40 +00:00
|
|
|
|
|
|
|
|
2018-01-21 12:44:30 +00:00
|
|
|
############################################
|
|
|
|
# Used by scripts #
|
|
|
|
# Matplotlib doesn't support ::datetime64, #
|
|
|
|
# so we need to convert it into ::datetime #
|
|
|
|
############################################
|
2018-03-17 21:12:42 +00:00
|
|
|
def datesarray_to_datetimearray(dates: np.ndarray) -> np.ndarray:
|
2018-01-21 12:44:30 +00:00
|
|
|
"""
|
|
|
|
Convert an pandas-array of timestamps into
|
|
|
|
An numpy-array of datetimes
|
|
|
|
:return: numpy-array of datetime
|
|
|
|
"""
|
|
|
|
times = []
|
|
|
|
dates = dates.astype(datetime)
|
2018-02-04 07:33:54 +00:00
|
|
|
for index in range(0, dates.size):
|
|
|
|
date = dates[index].to_pydatetime()
|
2018-01-21 12:44:30 +00:00
|
|
|
times.append(date)
|
|
|
|
return np.array(times)
|
|
|
|
|
|
|
|
|
2018-03-17 21:12:42 +00:00
|
|
|
def common_datearray(dfs: Dict[str, DataFrame]) -> np.ndarray:
|
2018-02-04 07:33:54 +00:00
|
|
|
"""
|
|
|
|
Return dates from Dataframe
|
2018-03-17 21:12:42 +00:00
|
|
|
:param dfs: Dict with format pair: pair_data
|
2018-02-04 07:33:54 +00:00
|
|
|
:return: List of dates
|
|
|
|
"""
|
2018-01-21 12:44:30 +00:00
|
|
|
alldates = {}
|
|
|
|
for pair, pair_data in dfs.items():
|
|
|
|
dates = datesarray_to_datetimearray(pair_data['date'])
|
|
|
|
for date in dates:
|
|
|
|
alldates[date] = 1
|
|
|
|
lst = []
|
|
|
|
for date, _ in alldates.items():
|
|
|
|
lst.append(date)
|
|
|
|
arr = np.array(lst)
|
|
|
|
return np.sort(arr, axis=0)
|
|
|
|
|
|
|
|
|
2018-03-30 21:30:23 +00:00
|
|
|
def file_dump_json(filename, data, is_zip=False) -> None:
|
2017-09-08 22:31:40 +00:00
|
|
|
"""
|
2018-02-04 07:33:54 +00:00
|
|
|
Dump JSON data into a file
|
|
|
|
:param filename: file to create
|
|
|
|
:param data: JSON Data to save
|
2017-09-08 22:31:40 +00:00
|
|
|
:return:
|
|
|
|
"""
|
2018-04-22 07:57:48 +00:00
|
|
|
print(f'dumping json to "{filename}"')
|
|
|
|
|
2018-03-31 15:28:54 +00:00
|
|
|
if is_zip:
|
2018-03-30 21:30:23 +00:00
|
|
|
if not filename.endswith('.gz'):
|
|
|
|
filename = filename + '.gz'
|
|
|
|
with gzip.open(filename, 'w') as fp:
|
2018-12-28 09:01:16 +00:00
|
|
|
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
2018-03-31 15:28:54 +00:00
|
|
|
else:
|
|
|
|
with open(filename, 'w') as fp:
|
2018-12-28 09:01:16 +00:00
|
|
|
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
2018-03-25 11:38:17 +00:00
|
|
|
|
|
|
|
|
2018-05-30 20:38:09 +00:00
|
|
|
def format_ms_time(date: int) -> str:
|
2018-03-25 11:38:17 +00:00
|
|
|
"""
|
|
|
|
convert MS date to readable format.
|
|
|
|
: epoch-string in ms
|
|
|
|
"""
|
|
|
|
return datetime.fromtimestamp(date/1000.0).strftime('%Y-%m-%dT%H:%M:%S')
|