Merge pull request #2522 from freqtrade/replace_tickerinterval
Replace tickerinterval
This commit is contained in:
@@ -39,12 +39,12 @@ class TimeRange:
|
||||
if self.startts:
|
||||
self.startts = self.startts - seconds
|
||||
|
||||
def adjust_start_if_necessary(self, ticker_interval_secs: int, startup_candles: int,
|
||||
def adjust_start_if_necessary(self, timeframe_secs: int, startup_candles: int,
|
||||
min_date: arrow.Arrow) -> None:
|
||||
"""
|
||||
Adjust startts by <startup_candles> candles.
|
||||
Applies only if no startup-candles have been available.
|
||||
:param ticker_interval_secs: Ticker interval in seconds e.g. `timeframe_to_seconds('5m')`
|
||||
:param timeframe_secs: Ticker timeframe in seconds e.g. `timeframe_to_seconds('5m')`
|
||||
:param startup_candles: Number of candles to move start-date forward
|
||||
:param min_date: Minimum data date loaded. Key kriterium to decide if start-time
|
||||
has to be moved
|
||||
@@ -55,7 +55,7 @@ class TimeRange:
|
||||
# If no startts was defined, or backtest-data starts at the defined backtest-date
|
||||
logger.warning("Moving start-date by %s candles to account for startup time.",
|
||||
startup_candles)
|
||||
self.startts = (min_date.timestamp + ticker_interval_secs * startup_candles)
|
||||
self.startts = (min_date.timestamp + timeframe_secs * startup_candles)
|
||||
self.starttype = 'date'
|
||||
|
||||
@staticmethod
|
||||
|
@@ -24,7 +24,7 @@ AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList']
|
||||
DRY_RUN_WALLET = 999.9
|
||||
MATH_CLOSE_PREC = 1e-14 # Precision used for float comparisons
|
||||
|
||||
TICKER_INTERVALS = [
|
||||
TIMEFRAMES = [
|
||||
'1m', '3m', '5m', '15m', '30m',
|
||||
'1h', '2h', '4h', '6h', '8h', '12h',
|
||||
'1d', '3d', '1w',
|
||||
@@ -57,7 +57,7 @@ CONF_SCHEMA = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'max_open_trades': {'type': 'integer', 'minimum': -1},
|
||||
'ticker_interval': {'type': 'string', 'enum': TICKER_INTERVALS},
|
||||
'ticker_interval': {'type': 'string', 'enum': TIMEFRAMES},
|
||||
'stake_currency': {'type': 'string', 'enum': ['BTC', 'XBT', 'ETH', 'USDT', 'EUR', 'USD']},
|
||||
'stake_amount': {
|
||||
"type": ["number", "string"],
|
||||
|
@@ -178,9 +178,9 @@ def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
||||
:return: Returns df with one additional column, col_name, containing the cumulative profit.
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
ticker_minutes = timeframe_to_minutes(timeframe)
|
||||
# Resample to ticker_interval to make sure trades match candles
|
||||
_trades_sum = trades.resample(f'{ticker_minutes}min', on='close_time')[['profitperc']].sum()
|
||||
timeframe_minutes = timeframe_to_minutes(timeframe)
|
||||
# Resample to timeframe to make sure trades match candles
|
||||
_trades_sum = trades.resample(f'{timeframe_minutes}min', on='close_time')[['profitperc']].sum()
|
||||
df.loc[:, col_name] = _trades_sum.cumsum()
|
||||
# Set first value to 0
|
||||
df.loc[df.iloc[0].name, col_name] = 0
|
||||
|
@@ -10,13 +10,13 @@ from pandas import DataFrame, to_datetime
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parse_ticker_dataframe(ticker: list, ticker_interval: str, pair: str, *,
|
||||
def parse_ticker_dataframe(ticker: list, timeframe: str, pair: str, *,
|
||||
fill_missing: bool = True,
|
||||
drop_incomplete: bool = True) -> DataFrame:
|
||||
"""
|
||||
Converts a ticker-list (format ccxt.fetch_ohlcv) to a Dataframe
|
||||
:param ticker: ticker list, as returned by exchange.async_get_candle_history
|
||||
:param ticker_interval: ticker_interval (e.g. 5m). Used to fill up eventual missing data
|
||||
:param timeframe: timeframe (e.g. 5m). Used to fill up eventual missing data
|
||||
:param pair: Pair this data is for (used to warn if fillup was necessary)
|
||||
:param fill_missing: fill up missing candles with 0 candles
|
||||
(see ohlcv_fill_up_missing_data for details)
|
||||
@@ -52,12 +52,12 @@ def parse_ticker_dataframe(ticker: list, ticker_interval: str, pair: str, *,
|
||||
logger.debug('Dropping last candle')
|
||||
|
||||
if fill_missing:
|
||||
return ohlcv_fill_up_missing_data(frame, ticker_interval, pair)
|
||||
return ohlcv_fill_up_missing_data(frame, timeframe, pair)
|
||||
else:
|
||||
return frame
|
||||
|
||||
|
||||
def ohlcv_fill_up_missing_data(dataframe: DataFrame, ticker_interval: str, pair: str) -> DataFrame:
|
||||
def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str) -> DataFrame:
|
||||
"""
|
||||
Fills up missing data with 0 volume rows,
|
||||
using the previous close as price for "open", "high" "low" and "close", volume is set to 0
|
||||
@@ -72,7 +72,7 @@ def ohlcv_fill_up_missing_data(dataframe: DataFrame, ticker_interval: str, pair:
|
||||
'close': 'last',
|
||||
'volume': 'sum'
|
||||
}
|
||||
ticker_minutes = timeframe_to_minutes(ticker_interval)
|
||||
ticker_minutes = timeframe_to_minutes(timeframe)
|
||||
# Resample to create "NAN" values
|
||||
df = dataframe.resample(f'{ticker_minutes}min', on='date').agg(ohlc_dict)
|
||||
|
||||
|
@@ -37,52 +37,53 @@ class DataProvider:
|
||||
@property
|
||||
def available_pairs(self) -> List[Tuple[str, str]]:
|
||||
"""
|
||||
Return a list of tuples containing pair, ticker_interval for which data is currently cached.
|
||||
Return a list of tuples containing (pair, timeframe) for which data is currently cached.
|
||||
Should be whitelist + open trades.
|
||||
"""
|
||||
return list(self._exchange._klines.keys())
|
||||
|
||||
def ohlcv(self, pair: str, ticker_interval: str = None, copy: bool = True) -> DataFrame:
|
||||
def ohlcv(self, pair: str, timeframe: str = None, copy: bool = True) -> DataFrame:
|
||||
"""
|
||||
Get ohlcv data for the given pair as DataFrame
|
||||
Please use the `available_pairs` method to verify which pairs are currently cached.
|
||||
:param pair: pair to get the data for
|
||||
:param ticker_interval: ticker interval to get data for
|
||||
:param timeframe: Ticker timeframe to get data for
|
||||
:param copy: copy dataframe before returning if True.
|
||||
Use False only for read-only operations (where the dataframe is not modified)
|
||||
"""
|
||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
return self._exchange.klines((pair, ticker_interval or self._config['ticker_interval']),
|
||||
return self._exchange.klines((pair, timeframe or self._config['ticker_interval']),
|
||||
copy=copy)
|
||||
else:
|
||||
return DataFrame()
|
||||
|
||||
def historic_ohlcv(self, pair: str, ticker_interval: str = None) -> DataFrame:
|
||||
def historic_ohlcv(self, pair: str, timeframe: str = None) -> DataFrame:
|
||||
"""
|
||||
Get stored historic ohlcv data
|
||||
:param pair: pair to get the data for
|
||||
:param ticker_interval: ticker interval to get data for
|
||||
:param timeframe: timeframe to get data for
|
||||
"""
|
||||
return load_pair_history(pair=pair,
|
||||
ticker_interval=ticker_interval or self._config['ticker_interval'],
|
||||
timeframe=timeframe or self._config['ticker_interval'],
|
||||
datadir=Path(self._config['datadir'])
|
||||
)
|
||||
|
||||
def get_pair_dataframe(self, pair: str, ticker_interval: str = None) -> DataFrame:
|
||||
def get_pair_dataframe(self, pair: str, timeframe: str = None) -> DataFrame:
|
||||
"""
|
||||
Return pair ohlcv data, either live or cached historical -- depending
|
||||
on the runmode.
|
||||
:param pair: pair to get the data for
|
||||
:param ticker_interval: ticker interval to get data for
|
||||
:param timeframe: timeframe to get data for
|
||||
:return: Dataframe for this pair
|
||||
"""
|
||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
# Get live ohlcv data.
|
||||
data = self.ohlcv(pair=pair, ticker_interval=ticker_interval)
|
||||
data = self.ohlcv(pair=pair, timeframe=timeframe)
|
||||
else:
|
||||
# Get historic ohlcv data (cached on disk).
|
||||
data = self.historic_ohlcv(pair=pair, ticker_interval=ticker_interval)
|
||||
data = self.historic_ohlcv(pair=pair, timeframe=timeframe)
|
||||
if len(data) == 0:
|
||||
logger.warning(f"No data found for ({pair}, {ticker_interval}).")
|
||||
logger.warning(f"No data found for ({pair}, {timeframe}).")
|
||||
return data
|
||||
|
||||
def market(self, pair: str) -> Optional[Dict[str, Any]]:
|
||||
|
@@ -63,13 +63,13 @@ def trim_dataframe(df: DataFrame, timerange: TimeRange) -> DataFrame:
|
||||
return df
|
||||
|
||||
|
||||
def load_tickerdata_file(datadir: Path, pair: str, ticker_interval: str,
|
||||
def load_tickerdata_file(datadir: Path, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange] = None) -> Optional[list]:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
:return: tickerlist or None if unsuccessful
|
||||
"""
|
||||
filename = pair_data_filename(datadir, pair, ticker_interval)
|
||||
filename = pair_data_filename(datadir, pair, timeframe)
|
||||
pairdata = misc.file_load_json(filename)
|
||||
if not pairdata:
|
||||
return []
|
||||
@@ -80,11 +80,11 @@ def load_tickerdata_file(datadir: Path, pair: str, ticker_interval: str,
|
||||
|
||||
|
||||
def store_tickerdata_file(datadir: Path, pair: str,
|
||||
ticker_interval: str, data: list, is_zip: bool = False):
|
||||
timeframe: str, data: list, is_zip: bool = False):
|
||||
"""
|
||||
Stores tickerdata to file
|
||||
"""
|
||||
filename = pair_data_filename(datadir, pair, ticker_interval)
|
||||
filename = pair_data_filename(datadir, pair, timeframe)
|
||||
misc.file_dump_json(filename, data, is_zip=is_zip)
|
||||
|
||||
|
||||
@@ -121,7 +121,7 @@ def _validate_pairdata(pair, pairdata, timerange: TimeRange):
|
||||
|
||||
|
||||
def load_pair_history(pair: str,
|
||||
ticker_interval: str,
|
||||
timeframe: str,
|
||||
datadir: Path,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
refresh_pairs: bool = False,
|
||||
@@ -133,7 +133,7 @@ def load_pair_history(pair: str,
|
||||
"""
|
||||
Loads cached ticker history for the given pair.
|
||||
:param pair: Pair to load data for
|
||||
:param ticker_interval: Ticker-interval (e.g. "5m")
|
||||
:param timeframe: Ticker timeframe (e.g. "5m")
|
||||
:param datadir: Path to the data storage location.
|
||||
:param timerange: Limit data to be loaded to this timerange
|
||||
:param refresh_pairs: Refresh pairs from exchange.
|
||||
@@ -147,34 +147,34 @@ def load_pair_history(pair: str,
|
||||
|
||||
timerange_startup = deepcopy(timerange)
|
||||
if startup_candles > 0 and timerange_startup:
|
||||
timerange_startup.subtract_start(timeframe_to_seconds(ticker_interval) * startup_candles)
|
||||
timerange_startup.subtract_start(timeframe_to_seconds(timeframe) * startup_candles)
|
||||
|
||||
# The user forced the refresh of pairs
|
||||
if refresh_pairs:
|
||||
download_pair_history(datadir=datadir,
|
||||
exchange=exchange,
|
||||
pair=pair,
|
||||
ticker_interval=ticker_interval,
|
||||
timeframe=timeframe,
|
||||
timerange=timerange)
|
||||
|
||||
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange_startup)
|
||||
pairdata = load_tickerdata_file(datadir, pair, timeframe, timerange=timerange_startup)
|
||||
|
||||
if pairdata:
|
||||
if timerange_startup:
|
||||
_validate_pairdata(pair, pairdata, timerange_startup)
|
||||
return parse_ticker_dataframe(pairdata, ticker_interval, pair=pair,
|
||||
return parse_ticker_dataframe(pairdata, timeframe, pair=pair,
|
||||
fill_missing=fill_up_missing,
|
||||
drop_incomplete=drop_incomplete)
|
||||
else:
|
||||
logger.warning(
|
||||
f'No history data for pair: "{pair}", interval: {ticker_interval}. '
|
||||
f'No history data for pair: "{pair}", timeframe: {timeframe}. '
|
||||
'Use `freqtrade download-data` to download the data'
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def load_data(datadir: Path,
|
||||
ticker_interval: str,
|
||||
timeframe: str,
|
||||
pairs: List[str],
|
||||
refresh_pairs: bool = False,
|
||||
exchange: Optional[Exchange] = None,
|
||||
@@ -186,7 +186,7 @@ def load_data(datadir: Path,
|
||||
"""
|
||||
Loads ticker history data for a list of pairs
|
||||
:param datadir: Path to the data storage location.
|
||||
:param ticker_interval: Ticker-interval (e.g. "5m")
|
||||
:param timeframe: Ticker Timeframe (e.g. "5m")
|
||||
:param pairs: List of pairs to load
|
||||
:param refresh_pairs: Refresh pairs from exchange.
|
||||
(Note: Requires exchange to be passed as well.)
|
||||
@@ -206,7 +206,7 @@ def load_data(datadir: Path,
|
||||
logger.info(f'Using indicator startup period: {startup_candles} ...')
|
||||
|
||||
for pair in pairs:
|
||||
hist = load_pair_history(pair=pair, ticker_interval=ticker_interval,
|
||||
hist = load_pair_history(pair=pair, timeframe=timeframe,
|
||||
datadir=datadir, timerange=timerange,
|
||||
refresh_pairs=refresh_pairs,
|
||||
exchange=exchange,
|
||||
@@ -220,9 +220,9 @@ def load_data(datadir: Path,
|
||||
return result
|
||||
|
||||
|
||||
def pair_data_filename(datadir: Path, pair: str, ticker_interval: str) -> Path:
|
||||
def pair_data_filename(datadir: Path, pair: str, timeframe: str) -> Path:
|
||||
pair_s = pair.replace("/", "_")
|
||||
filename = datadir.joinpath(f'{pair_s}-{ticker_interval}.json')
|
||||
filename = datadir.joinpath(f'{pair_s}-{timeframe}.json')
|
||||
return filename
|
||||
|
||||
|
||||
@@ -232,7 +232,7 @@ def pair_trades_filename(datadir: Path, pair: str) -> Path:
|
||||
return filename
|
||||
|
||||
|
||||
def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: str,
|
||||
def _load_cached_data_for_updating(datadir: Path, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
||||
Optional[int]]:
|
||||
"""
|
||||
@@ -250,12 +250,12 @@ def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: st
|
||||
if timerange.starttype == 'date':
|
||||
since_ms = timerange.startts * 1000
|
||||
elif timerange.stoptype == 'line':
|
||||
num_minutes = timerange.stopts * timeframe_to_minutes(ticker_interval)
|
||||
num_minutes = timerange.stopts * timeframe_to_minutes(timeframe)
|
||||
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
||||
|
||||
# read the cached file
|
||||
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||
data = load_tickerdata_file(datadir, pair, ticker_interval)
|
||||
data = load_tickerdata_file(datadir, pair, timeframe)
|
||||
# remove the last item, could be incomplete candle
|
||||
if data:
|
||||
data.pop()
|
||||
@@ -276,18 +276,18 @@ def _load_cached_data_for_updating(datadir: Path, pair: str, ticker_interval: st
|
||||
def download_pair_history(datadir: Path,
|
||||
exchange: Optional[Exchange],
|
||||
pair: str,
|
||||
ticker_interval: str = '5m',
|
||||
timeframe: str = '5m',
|
||||
timerange: Optional[TimeRange] = None) -> bool:
|
||||
"""
|
||||
Download the latest ticker intervals from the exchange for the pair passed in parameters
|
||||
The data is downloaded starting from the last correct ticker interval data that
|
||||
Download latest candles from the exchange for the pair and timeframe passed in parameters
|
||||
The data is downloaded starting from the last correct data that
|
||||
exists in a cache. If timerange starts earlier than the data in the cache,
|
||||
the full data will be redownloaded
|
||||
|
||||
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
|
||||
|
||||
:param pair: pair to download
|
||||
:param ticker_interval: ticker interval
|
||||
:param timeframe: Ticker Timeframe (e.g 5m)
|
||||
:param timerange: range of time to download
|
||||
:return: bool with success state
|
||||
"""
|
||||
@@ -298,17 +298,17 @@ def download_pair_history(datadir: Path,
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
f'Download history data for pair: "{pair}", interval: {ticker_interval} '
|
||||
f'Download history data for pair: "{pair}", timeframe: {timeframe} '
|
||||
f'and store in {datadir}.'
|
||||
)
|
||||
|
||||
data, since_ms = _load_cached_data_for_updating(datadir, pair, ticker_interval, timerange)
|
||||
data, since_ms = _load_cached_data_for_updating(datadir, pair, timeframe, timerange)
|
||||
|
||||
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
|
||||
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
|
||||
|
||||
# Default since_ms to 30 days if nothing is given
|
||||
new_data = exchange.get_historic_ohlcv(pair=pair, ticker_interval=ticker_interval,
|
||||
new_data = exchange.get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
||||
since_ms=since_ms if since_ms
|
||||
else
|
||||
int(arrow.utcnow().shift(
|
||||
@@ -318,12 +318,12 @@ def download_pair_history(datadir: Path,
|
||||
logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
|
||||
logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
|
||||
|
||||
store_tickerdata_file(datadir, pair, ticker_interval, data=data)
|
||||
store_tickerdata_file(datadir, pair, timeframe, data=data)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f'Failed to download history data for pair: "{pair}", interval: {ticker_interval}. '
|
||||
f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}. '
|
||||
f'Error: {e}'
|
||||
)
|
||||
return False
|
||||
@@ -343,17 +343,17 @@ def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes
|
||||
pairs_not_available.append(pair)
|
||||
logger.info(f"Skipping pair {pair}...")
|
||||
continue
|
||||
for ticker_interval in timeframes:
|
||||
for timeframe in timeframes:
|
||||
|
||||
dl_file = pair_data_filename(dl_path, pair, ticker_interval)
|
||||
dl_file = pair_data_filename(dl_path, pair, timeframe)
|
||||
if erase and dl_file.exists():
|
||||
logger.info(
|
||||
f'Deleting existing data for pair {pair}, interval {ticker_interval}.')
|
||||
f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
dl_file.unlink()
|
||||
|
||||
logger.info(f'Downloading pair {pair}, interval {ticker_interval}.')
|
||||
logger.info(f'Downloading pair {pair}, interval {timeframe}.')
|
||||
download_pair_history(datadir=dl_path, exchange=exchange,
|
||||
pair=pair, ticker_interval=str(ticker_interval),
|
||||
pair=pair, timeframe=str(timeframe),
|
||||
timerange=timerange)
|
||||
return pairs_not_available
|
||||
|
||||
@@ -459,7 +459,7 @@ def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]
|
||||
|
||||
|
||||
def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime,
|
||||
max_date: datetime, ticker_interval_mins: int) -> bool:
|
||||
max_date: datetime, timeframe_mins: int) -> bool:
|
||||
"""
|
||||
Validates preprocessed backtesting data for missing values and shows warnings about it that.
|
||||
|
||||
@@ -467,10 +467,10 @@ def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime,
|
||||
:param pair: pair used for log output.
|
||||
:param min_date: start-date of the data
|
||||
:param max_date: end-date of the data
|
||||
:param ticker_interval_mins: ticker interval in minutes
|
||||
:param timeframe_mins: ticker Timeframe in minutes
|
||||
"""
|
||||
# total difference in minutes / interval-minutes
|
||||
expected_frames = int((max_date - min_date).total_seconds() // 60 // ticker_interval_mins)
|
||||
# total difference in minutes / timeframe-minutes
|
||||
expected_frames = int((max_date - min_date).total_seconds() // 60 // timeframe_mins)
|
||||
found_missing = False
|
||||
dflen = len(data)
|
||||
if dflen < expected_frames:
|
||||
|
@@ -97,7 +97,7 @@ class Edge:
|
||||
data = history.load_data(
|
||||
datadir=Path(self.config['datadir']),
|
||||
pairs=pairs,
|
||||
ticker_interval=self.strategy.ticker_interval,
|
||||
timeframe=self.strategy.ticker_interval,
|
||||
refresh_pairs=self._refresh_pairs,
|
||||
exchange=self.exchange,
|
||||
timerange=self._timerange,
|
||||
|
@@ -536,40 +536,40 @@ class Exchange:
|
||||
logger.info("returning cached ticker-data for %s", pair)
|
||||
return self._cached_ticker[pair]
|
||||
|
||||
def get_historic_ohlcv(self, pair: str, ticker_interval: str,
|
||||
def get_historic_ohlcv(self, pair: str, timeframe: str,
|
||||
since_ms: int) -> List:
|
||||
"""
|
||||
Gets candle history using asyncio and returns the list of candles.
|
||||
Handles all async doing.
|
||||
Async over one pair, assuming we get `_ohlcv_candle_limit` candles per call.
|
||||
:param pair: Pair to download
|
||||
:param ticker_interval: Interval to get
|
||||
:param timeframe: Ticker Timeframe to get
|
||||
:param since_ms: Timestamp in milliseconds to get history from
|
||||
:returns List of tickers
|
||||
"""
|
||||
return asyncio.get_event_loop().run_until_complete(
|
||||
self._async_get_historic_ohlcv(pair=pair, ticker_interval=ticker_interval,
|
||||
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
||||
since_ms=since_ms))
|
||||
|
||||
async def _async_get_historic_ohlcv(self, pair: str,
|
||||
ticker_interval: str,
|
||||
timeframe: str,
|
||||
since_ms: int) -> List:
|
||||
|
||||
one_call = timeframe_to_msecs(ticker_interval) * self._ohlcv_candle_limit
|
||||
one_call = timeframe_to_msecs(timeframe) * self._ohlcv_candle_limit
|
||||
logger.debug(
|
||||
"one_call: %s msecs (%s)",
|
||||
one_call,
|
||||
arrow.utcnow().shift(seconds=one_call // 1000).humanize(only_distance=True)
|
||||
)
|
||||
input_coroutines = [self._async_get_candle_history(
|
||||
pair, ticker_interval, since) for since in
|
||||
pair, timeframe, since) for since in
|
||||
range(since_ms, arrow.utcnow().timestamp * 1000, one_call)]
|
||||
|
||||
tickers = await asyncio.gather(*input_coroutines, return_exceptions=True)
|
||||
|
||||
# Combine tickers
|
||||
data: List = []
|
||||
for p, ticker_interval, ticker in tickers:
|
||||
for p, timeframe, ticker in tickers:
|
||||
if p == pair:
|
||||
data.extend(ticker)
|
||||
# Sort data again after extending the result - above calls return in "async order"
|
||||
@@ -589,14 +589,14 @@ class Exchange:
|
||||
input_coroutines = []
|
||||
|
||||
# Gather coroutines to run
|
||||
for pair, ticker_interval in set(pair_list):
|
||||
if (not ((pair, ticker_interval) in self._klines)
|
||||
or self._now_is_time_to_refresh(pair, ticker_interval)):
|
||||
input_coroutines.append(self._async_get_candle_history(pair, ticker_interval))
|
||||
for pair, timeframe in set(pair_list):
|
||||
if (not ((pair, timeframe) in self._klines)
|
||||
or self._now_is_time_to_refresh(pair, timeframe)):
|
||||
input_coroutines.append(self._async_get_candle_history(pair, timeframe))
|
||||
else:
|
||||
logger.debug(
|
||||
"Using cached ohlcv data for pair %s, interval %s ...",
|
||||
pair, ticker_interval
|
||||
"Using cached ohlcv data for pair %s, timeframe %s ...",
|
||||
pair, timeframe
|
||||
)
|
||||
|
||||
tickers = asyncio.get_event_loop().run_until_complete(
|
||||
@@ -608,40 +608,40 @@ class Exchange:
|
||||
logger.warning("Async code raised an exception: %s", res.__class__.__name__)
|
||||
continue
|
||||
pair = res[0]
|
||||
ticker_interval = res[1]
|
||||
timeframe = res[1]
|
||||
ticks = res[2]
|
||||
# keeping last candle time as last refreshed time of the pair
|
||||
if ticks:
|
||||
self._pairs_last_refresh_time[(pair, ticker_interval)] = ticks[-1][0] // 1000
|
||||
self._pairs_last_refresh_time[(pair, timeframe)] = ticks[-1][0] // 1000
|
||||
# keeping parsed dataframe in cache
|
||||
self._klines[(pair, ticker_interval)] = parse_ticker_dataframe(
|
||||
ticks, ticker_interval, pair=pair, fill_missing=True,
|
||||
self._klines[(pair, timeframe)] = parse_ticker_dataframe(
|
||||
ticks, timeframe, pair=pair, fill_missing=True,
|
||||
drop_incomplete=self._ohlcv_partial_candle)
|
||||
return tickers
|
||||
|
||||
def _now_is_time_to_refresh(self, pair: str, ticker_interval: str) -> bool:
|
||||
def _now_is_time_to_refresh(self, pair: str, timeframe: str) -> bool:
|
||||
# Calculating ticker interval in seconds
|
||||
interval_in_sec = timeframe_to_seconds(ticker_interval)
|
||||
interval_in_sec = timeframe_to_seconds(timeframe)
|
||||
|
||||
return not ((self._pairs_last_refresh_time.get((pair, ticker_interval), 0)
|
||||
return not ((self._pairs_last_refresh_time.get((pair, timeframe), 0)
|
||||
+ interval_in_sec) >= arrow.utcnow().timestamp)
|
||||
|
||||
@retrier_async
|
||||
async def _async_get_candle_history(self, pair: str, ticker_interval: str,
|
||||
async def _async_get_candle_history(self, pair: str, timeframe: str,
|
||||
since_ms: Optional[int] = None) -> Tuple[str, str, List]:
|
||||
"""
|
||||
Asynchronously gets candle histories using fetch_ohlcv
|
||||
returns tuple: (pair, ticker_interval, ohlcv_list)
|
||||
returns tuple: (pair, timeframe, ohlcv_list)
|
||||
"""
|
||||
try:
|
||||
# fetch ohlcv asynchronously
|
||||
s = '(' + arrow.get(since_ms // 1000).isoformat() + ') ' if since_ms is not None else ''
|
||||
logger.debug(
|
||||
"Fetching pair %s, interval %s, since %s %s...",
|
||||
pair, ticker_interval, since_ms, s
|
||||
pair, timeframe, since_ms, s
|
||||
)
|
||||
|
||||
data = await self._api_async.fetch_ohlcv(pair, timeframe=ticker_interval,
|
||||
data = await self._api_async.fetch_ohlcv(pair, timeframe=timeframe,
|
||||
since=since_ms)
|
||||
|
||||
# Because some exchange sort Tickers ASC and other DESC.
|
||||
@@ -653,9 +653,9 @@ class Exchange:
|
||||
data = sorted(data, key=lambda x: x[0])
|
||||
except IndexError:
|
||||
logger.exception("Error loading %s. Result was %s.", pair, data)
|
||||
return pair, ticker_interval, []
|
||||
logger.debug("Done fetching pair %s, interval %s ...", pair, ticker_interval)
|
||||
return pair, ticker_interval, data
|
||||
return pair, timeframe, []
|
||||
logger.debug("Done fetching pair %s, interval %s ...", pair, timeframe)
|
||||
return pair, timeframe, data
|
||||
|
||||
except ccxt.NotSupported as e:
|
||||
raise OperationalException(
|
||||
@@ -802,7 +802,6 @@ class Exchange:
|
||||
Handles all async doing.
|
||||
Async over one pair, assuming we get `_ohlcv_candle_limit` candles per call.
|
||||
:param pair: Pair to download
|
||||
:param ticker_interval: Interval to get
|
||||
:param since: Timestamp in milliseconds to get history from
|
||||
:param until: Timestamp in milliseconds. Defaults to current timestamp if not defined.
|
||||
:param from_id: Download data starting with ID (if id is known)
|
||||
@@ -958,27 +957,27 @@ def available_exchanges(ccxt_module=None) -> List[str]:
|
||||
return [x for x in exchanges if not is_exchange_bad(x)]
|
||||
|
||||
|
||||
def timeframe_to_seconds(ticker_interval: str) -> int:
|
||||
def timeframe_to_seconds(timeframe: str) -> int:
|
||||
"""
|
||||
Translates the timeframe interval value written in the human readable
|
||||
form ('1m', '5m', '1h', '1d', '1w', etc.) to the number
|
||||
of seconds for one timeframe interval.
|
||||
"""
|
||||
return ccxt.Exchange.parse_timeframe(ticker_interval)
|
||||
return ccxt.Exchange.parse_timeframe(timeframe)
|
||||
|
||||
|
||||
def timeframe_to_minutes(ticker_interval: str) -> int:
|
||||
def timeframe_to_minutes(timeframe: str) -> int:
|
||||
"""
|
||||
Same as timeframe_to_seconds, but returns minutes.
|
||||
"""
|
||||
return ccxt.Exchange.parse_timeframe(ticker_interval) // 60
|
||||
return ccxt.Exchange.parse_timeframe(timeframe) // 60
|
||||
|
||||
|
||||
def timeframe_to_msecs(ticker_interval: str) -> int:
|
||||
def timeframe_to_msecs(timeframe: str) -> int:
|
||||
"""
|
||||
Same as timeframe_to_seconds, but returns milliseconds.
|
||||
"""
|
||||
return ccxt.Exchange.parse_timeframe(ticker_interval) * 1000
|
||||
return ccxt.Exchange.parse_timeframe(timeframe) * 1000
|
||||
|
||||
|
||||
def timeframe_to_prev_date(timeframe: str, date: datetime = None) -> datetime:
|
||||
|
@@ -83,8 +83,8 @@ class Backtesting:
|
||||
if "ticker_interval" not in self.config:
|
||||
raise OperationalException("Ticker-interval needs to be set in either configuration "
|
||||
"or as cli argument `--ticker-interval 5m`")
|
||||
self.ticker_interval = str(self.config.get('ticker_interval'))
|
||||
self.ticker_interval_mins = timeframe_to_minutes(self.ticker_interval)
|
||||
self.timeframe = str(self.config.get('ticker_interval'))
|
||||
self.timeframe_mins = timeframe_to_minutes(self.timeframe)
|
||||
|
||||
# Get maximum required startup period
|
||||
self.required_startup = max([strat.startup_candle_count for strat in self.strategylist])
|
||||
@@ -108,7 +108,7 @@ class Backtesting:
|
||||
data = history.load_data(
|
||||
datadir=Path(self.config['datadir']),
|
||||
pairs=self.config['exchange']['pair_whitelist'],
|
||||
ticker_interval=self.ticker_interval,
|
||||
timeframe=self.timeframe,
|
||||
timerange=timerange,
|
||||
startup_candles=self.required_startup,
|
||||
fail_without_data=True,
|
||||
@@ -121,7 +121,7 @@ class Backtesting:
|
||||
min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
|
||||
)
|
||||
# Adjust startts forward if not enough data is available
|
||||
timerange.adjust_start_if_necessary(timeframe_to_seconds(self.ticker_interval),
|
||||
timerange.adjust_start_if_necessary(timeframe_to_seconds(self.timeframe),
|
||||
self.required_startup, min_date)
|
||||
|
||||
return data, timerange
|
||||
@@ -375,7 +375,7 @@ class Backtesting:
|
||||
lock_pair_until: Dict = {}
|
||||
# Indexes per pair, so some pairs are allowed to have a missing start.
|
||||
indexes: Dict = {}
|
||||
tmp = start_date + timedelta(minutes=self.ticker_interval_mins)
|
||||
tmp = start_date + timedelta(minutes=self.timeframe_mins)
|
||||
|
||||
# Loop timerange and get candle for each pair at that point in time
|
||||
while tmp < end_date:
|
||||
@@ -427,7 +427,7 @@ class Backtesting:
|
||||
lock_pair_until[pair] = end_date.datetime
|
||||
|
||||
# Move time one configured time_interval ahead.
|
||||
tmp += timedelta(minutes=self.ticker_interval_mins)
|
||||
tmp += timedelta(minutes=self.timeframe_mins)
|
||||
return DataFrame.from_records(trades, columns=BacktestResult._fields)
|
||||
|
||||
def start(self) -> None:
|
||||
|
@@ -106,10 +106,10 @@ class IHyperOpt(ABC):
|
||||
roi_t_alpha = 1.0
|
||||
roi_p_alpha = 1.0
|
||||
|
||||
ticker_interval_mins = timeframe_to_minutes(IHyperOpt.ticker_interval)
|
||||
timeframe_mins = timeframe_to_minutes(IHyperOpt.ticker_interval)
|
||||
|
||||
# We define here limits for the ROI space parameters automagically adapted to the
|
||||
# ticker_interval used by the bot:
|
||||
# timeframe used by the bot:
|
||||
#
|
||||
# * 'roi_t' (limits for the time intervals in the ROI tables) components
|
||||
# are scaled linearly.
|
||||
@@ -117,8 +117,8 @@ class IHyperOpt(ABC):
|
||||
#
|
||||
# The scaling is designed so that it maps exactly to the legacy Freqtrade roi_space()
|
||||
# method for the 5m ticker interval.
|
||||
roi_t_scale = ticker_interval_mins / 5
|
||||
roi_p_scale = math.log1p(ticker_interval_mins) / math.log1p(5)
|
||||
roi_t_scale = timeframe_mins / 5
|
||||
roi_p_scale = math.log1p(timeframe_mins) / math.log1p(5)
|
||||
roi_limits = {
|
||||
'roi_t1_min': int(10 * roi_t_scale * roi_t_alpha),
|
||||
'roi_t1_max': int(120 * roi_t_scale * roi_t_alpha),
|
||||
|
@@ -39,7 +39,7 @@ def init_plotscript(config):
|
||||
tickers = history.load_data(
|
||||
datadir=Path(str(config.get("datadir"))),
|
||||
pairs=pairs,
|
||||
ticker_interval=config.get('ticker_interval', '5m'),
|
||||
timeframe=config.get('ticker_interval', '5m'),
|
||||
timerange=timerange,
|
||||
)
|
||||
|
||||
@@ -300,12 +300,12 @@ def generate_profit_graph(pairs: str, tickers: Dict[str, pd.DataFrame],
|
||||
return fig
|
||||
|
||||
|
||||
def generate_plot_filename(pair, ticker_interval) -> str:
|
||||
def generate_plot_filename(pair, timeframe) -> str:
|
||||
"""
|
||||
Generate filenames per pair/ticker_interval to be used for storing plots
|
||||
Generate filenames per pair/timeframe to be used for storing plots
|
||||
"""
|
||||
pair_name = pair.replace("/", "_")
|
||||
file_name = 'freqtrade-plot-' + pair_name + '-' + ticker_interval + '.html'
|
||||
file_name = 'freqtrade-plot-' + pair_name + '-' + timeframe + '.html'
|
||||
|
||||
logger.info('Generate plot file for %s', pair)
|
||||
|
||||
@@ -316,8 +316,9 @@ def store_plot_file(fig, filename: str, directory: Path, auto_open: bool = False
|
||||
"""
|
||||
Generate a plot html file from pre populated fig plotly object
|
||||
:param fig: Plotly Figure to plot
|
||||
:param pair: Pair to plot (used as filename and Plot title)
|
||||
:param ticker_interval: Used as part of the filename
|
||||
:param filename: Name to store the file as
|
||||
:param directory: Directory to store the file in
|
||||
:param auto_open: Automatically open files saved
|
||||
:return: None
|
||||
"""
|
||||
directory.mkdir(parents=True, exist_ok=True)
|
||||
|
Reference in New Issue
Block a user