Move dataframe trim to within jsondatahandler

This commit is contained in:
Matthias 2019-12-26 09:51:03 +01:00
parent 552c93abf0
commit e861f05b75
4 changed files with 25 additions and 13 deletions

View File

@ -55,11 +55,7 @@ class IDataHandler(ABC):
f'No history data for pair: "{pair}", timeframe: {timeframe}. '
'Use `freqtrade download-data` to download the data'
)
return pairdf
else:
if timerange_startup:
self._validate_pairdata(pair, pairdf, timerange_startup)
return pairdf
return pairdf
def _validate_pairdata(self, pair, pairdata: DataFrame, timerange: TimeRange):
"""

View File

@ -78,13 +78,18 @@ class JsonDataHandler(IDataHandler):
utc=True,
infer_datetime_format=True)
enddate = pairdata.iloc[-1]['date']
if timerange:
self._validate_pairdata(pair, pairdata, timerange)
pairdata = trim_dataframe(pairdata, timerange)
# incomplete candles should only be dropped if we didn't trim the end beforehand.
return clean_ohlcv_dataframe(pairdata, timeframe,
pair=pair,
fill_missing=fill_missing,
drop_incomplete=drop_incomplete)
drop_incomplete=(drop_incomplete and
enddate == pairdata.iloc[-1]['date']))
def ohlcv_append(self, pair: str, timeframe: str, data: DataFrame) -> None:
"""
@ -140,6 +145,18 @@ class JsonDataHandler(IDataHandler):
return tradesdata
def trades_purge(self, pair: str) -> bool:
"""
Remove data for this pair
:param pair: Delete data for this pair.
:return: True when deleted, false if file did not exist.
"""
filename = self._pair_trades_filename(self._datadir, pair)
if filename.is_file():
filename.unlink()
return True
return False
@classmethod
def _pair_data_filename(cls, datadir: Path, pair: str, timeframe: str) -> Path:
pair_s = pair.replace("/", "_")

View File

@ -369,11 +369,9 @@ def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir:
logger.info(f"Skipping pair {pair}...")
continue
dl_file = pair_trades_filename(datadir, pair)
if erase and dl_file.exists():
logger.info(
f'Deleting existing data for pair {pair}.')
dl_file.unlink()
if erase:
if data_handler.trades_purge(pair):
logger.info(f'Deleting existing data for pair {pair}.')
logger.info(f'Downloading trades for pair {pair}.')
_download_trades_history(datadir=datadir, exchange=exchange,

View File

@ -369,8 +369,9 @@ def test_load_partial_missing(testdatadir, caplog) -> None:
# timedifference in 5 minutes
td = ((end - start).total_seconds() // 60 // 5) + 1
assert td != len(tickerdata['UNITTEST/BTC'])
# This validation happens now after parsing to pandas.
end_real = arrow.get(tickerdata['UNITTEST/BTC'].iloc[-1, 0])
# Shift endtime with +5 - as last candle is dropped (partial candle)
end_real = arrow.get(tickerdata['UNITTEST/BTC'].iloc[-1, 0]).shift(minutes=5)
assert log_has(f'Missing data at end for pair '
f'UNITTEST/BTC, data ends at {end_real.strftime("%Y-%m-%d %H:%M:%S")}',
caplog)