cosmetic: rename interval, tick_interval, etc --> ticker_interval
This commit is contained in:
parent
d6d16b4696
commit
ebf1126351
@ -37,23 +37,23 @@ class DataProvider(object):
|
||||
@property
|
||||
def available_pairs(self) -> List[Tuple[str, str]]:
|
||||
"""
|
||||
Return a list of tuples containing pair, tick_interval for which data is currently cached.
|
||||
Return a list of tuples containing pair, ticker_interval for which data is currently cached.
|
||||
Should be whitelist + open trades.
|
||||
"""
|
||||
return list(self._exchange._klines.keys())
|
||||
|
||||
def ohlcv(self, pair: str, tick_interval: str = None, copy: bool = True) -> DataFrame:
|
||||
def ohlcv(self, pair: str, ticker_interval: str = None, copy: bool = True) -> DataFrame:
|
||||
"""
|
||||
get ohlcv data for the given pair as DataFrame
|
||||
Please check `available_pairs` to verify which pairs are currently cached.
|
||||
:param pair: pair to get the data for
|
||||
:param tick_interval: ticker_interval to get pair for
|
||||
:param ticker_interval: ticker_interval to get pair for
|
||||
:param copy: copy dataframe before returning.
|
||||
Use false only for RO operations (where the dataframe is not modified)
|
||||
"""
|
||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
if tick_interval:
|
||||
pairtick = (pair, tick_interval)
|
||||
if ticker_interval:
|
||||
pairtick = (pair, ticker_interval)
|
||||
else:
|
||||
pairtick = (pair, self._config['ticker_interval'])
|
||||
|
||||
@ -65,7 +65,7 @@ class DataProvider(object):
|
||||
"""
|
||||
get stored historic ohlcv data
|
||||
:param pair: pair to get the data for
|
||||
:param tick_interval: ticker_interval to get pair for
|
||||
:param ticker_interval: ticker_interval to get pair for
|
||||
"""
|
||||
return load_pair_history(pair=pair,
|
||||
ticker_interval=ticker_interval,
|
||||
|
@ -101,7 +101,7 @@ def load_pair_history(pair: str,
|
||||
download_pair_history(datadir=datadir,
|
||||
exchange=exchange,
|
||||
pair=pair,
|
||||
tick_interval=ticker_interval,
|
||||
ticker_interval=ticker_interval,
|
||||
timerange=timerange)
|
||||
|
||||
pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)
|
||||
@ -151,7 +151,7 @@ def make_testdata_path(datadir: Optional[Path]) -> Path:
|
||||
return datadir or (Path(__file__).parent.parent / "tests" / "testdata").resolve()
|
||||
|
||||
|
||||
def load_cached_data_for_updating(filename: Path, tick_interval: str,
|
||||
def load_cached_data_for_updating(filename: Path, ticker_interval: str,
|
||||
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
||||
Optional[int]]:
|
||||
"""
|
||||
@ -165,7 +165,7 @@ def load_cached_data_for_updating(filename: Path, tick_interval: str,
|
||||
if timerange.starttype == 'date':
|
||||
since_ms = timerange.startts * 1000
|
||||
elif timerange.stoptype == 'line':
|
||||
num_minutes = timerange.stopts * timeframe_to_minutes(tick_interval)
|
||||
num_minutes = timerange.stopts * timeframe_to_minutes(ticker_interval)
|
||||
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
||||
|
||||
# read the cached file
|
||||
@ -192,7 +192,7 @@ def load_cached_data_for_updating(filename: Path, tick_interval: str,
|
||||
def download_pair_history(datadir: Optional[Path],
|
||||
exchange: Exchange,
|
||||
pair: str,
|
||||
tick_interval: str = '5m',
|
||||
ticker_interval: str = '5m',
|
||||
timerange: Optional[TimeRange] = None) -> bool:
|
||||
"""
|
||||
Download the latest ticker intervals from the exchange for the pair passed in parameters
|
||||
@ -202,7 +202,7 @@ def download_pair_history(datadir: Optional[Path],
|
||||
|
||||
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
|
||||
:param pair: pair to download
|
||||
:param tick_interval: ticker interval
|
||||
:param ticker_interval: ticker interval
|
||||
:param timerange: range of time to download
|
||||
:return: bool with success state
|
||||
|
||||
@ -210,17 +210,17 @@ def download_pair_history(datadir: Optional[Path],
|
||||
try:
|
||||
path = make_testdata_path(datadir)
|
||||
filepair = pair.replace("/", "_")
|
||||
filename = path.joinpath(f'{filepair}-{tick_interval}.json')
|
||||
filename = path.joinpath(f'{filepair}-{ticker_interval}.json')
|
||||
|
||||
logger.info('Download the pair: "%s", Interval: %s', pair, tick_interval)
|
||||
logger.info('Download the pair: "%s", Interval: %s', pair, ticker_interval)
|
||||
|
||||
data, since_ms = load_cached_data_for_updating(filename, tick_interval, timerange)
|
||||
data, since_ms = load_cached_data_for_updating(filename, ticker_interval, timerange)
|
||||
|
||||
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
|
||||
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
|
||||
|
||||
# Default since_ms to 30 days if nothing is given
|
||||
new_data = exchange.get_history(pair=pair, tick_interval=tick_interval,
|
||||
new_data = exchange.get_history(pair=pair, ticker_interval=ticker_interval,
|
||||
since_ms=since_ms if since_ms
|
||||
else
|
||||
int(arrow.utcnow().shift(days=-30).float_timestamp) * 1000)
|
||||
@ -233,5 +233,5 @@ def download_pair_history(datadir: Optional[Path],
|
||||
return True
|
||||
except BaseException:
|
||||
logger.info('Failed to download the pair: "%s", Interval: %s',
|
||||
pair, tick_interval)
|
||||
pair, ticker_interval)
|
||||
return False
|
||||
|
@ -489,26 +489,26 @@ class Exchange(object):
|
||||
logger.info("returning cached ticker-data for %s", pair)
|
||||
return self._cached_ticker[pair]
|
||||
|
||||
def get_history(self, pair: str, tick_interval: str,
|
||||
def get_history(self, pair: str, ticker_interval: str,
|
||||
since_ms: int) -> List:
|
||||
"""
|
||||
Gets candle history using asyncio and returns the list of candles.
|
||||
Handles all async doing.
|
||||
"""
|
||||
return asyncio.get_event_loop().run_until_complete(
|
||||
self._async_get_history(pair=pair, tick_interval=tick_interval,
|
||||
self._async_get_history(pair=pair, ticker_interval=ticker_interval,
|
||||
since_ms=since_ms))
|
||||
|
||||
async def _async_get_history(self, pair: str,
|
||||
tick_interval: str,
|
||||
ticker_interval: str,
|
||||
since_ms: int) -> List:
|
||||
# Assume exchange returns 500 candles
|
||||
_LIMIT = 500
|
||||
|
||||
one_call = timeframe_to_msecs(tick_interval) * _LIMIT
|
||||
one_call = timeframe_to_msecs(ticker_interval) * _LIMIT
|
||||
logger.debug("one_call: %s msecs", one_call)
|
||||
input_coroutines = [self._async_get_candle_history(
|
||||
pair, tick_interval, since) for since in
|
||||
pair, ticker_interval, since) for since in
|
||||
range(since_ms, arrow.utcnow().timestamp * 1000, one_call)]
|
||||
|
||||
tickers = await asyncio.gather(*input_coroutines, return_exceptions=True)
|
||||
@ -548,14 +548,14 @@ class Exchange(object):
|
||||
logger.warning("Async code raised an exception: %s", res.__class__.__name__)
|
||||
continue
|
||||
pair = res[0]
|
||||
tick_interval = res[1]
|
||||
ticker_interval = res[1]
|
||||
ticks = res[2]
|
||||
# keeping last candle time as last refreshed time of the pair
|
||||
if ticks:
|
||||
self._pairs_last_refresh_time[(pair, tick_interval)] = ticks[-1][0] // 1000
|
||||
self._pairs_last_refresh_time[(pair, ticker_interval)] = ticks[-1][0] // 1000
|
||||
# keeping parsed dataframe in cache
|
||||
self._klines[(pair, tick_interval)] = parse_ticker_dataframe(
|
||||
ticks, tick_interval, fill_missing=True)
|
||||
self._klines[(pair, ticker_interval)] = parse_ticker_dataframe(
|
||||
ticks, ticker_interval, fill_missing=True)
|
||||
return tickers
|
||||
|
||||
def _now_is_time_to_refresh(self, pair: str, ticker_interval: str) -> bool:
|
||||
@ -566,17 +566,17 @@ class Exchange(object):
|
||||
+ interval_in_sec) >= arrow.utcnow().timestamp)
|
||||
|
||||
@retrier_async
|
||||
async def _async_get_candle_history(self, pair: str, tick_interval: str,
|
||||
async def _async_get_candle_history(self, pair: str, ticker_interval: str,
|
||||
since_ms: Optional[int] = None) -> Tuple[str, str, List]:
|
||||
"""
|
||||
Asyncronously gets candle histories using fetch_ohlcv
|
||||
returns tuple: (pair, tick_interval, ohlcv_list)
|
||||
returns tuple: (pair, ticker_interval, ohlcv_list)
|
||||
"""
|
||||
try:
|
||||
# fetch ohlcv asynchronously
|
||||
logger.debug("fetching %s, %s since %s ...", pair, tick_interval, since_ms)
|
||||
logger.debug("fetching %s, %s since %s ...", pair, ticker_interval, since_ms)
|
||||
|
||||
data = await self._api_async.fetch_ohlcv(pair, timeframe=tick_interval,
|
||||
data = await self._api_async.fetch_ohlcv(pair, timeframe=ticker_interval,
|
||||
since=since_ms)
|
||||
|
||||
# Because some exchange sort Tickers ASC and other DESC.
|
||||
@ -588,9 +588,9 @@ class Exchange(object):
|
||||
data = sorted(data, key=lambda x: x[0])
|
||||
except IndexError:
|
||||
logger.exception("Error loading %s. Result was %s.", pair, data)
|
||||
return pair, tick_interval, []
|
||||
logger.debug("done fetching %s, %s ...", pair, tick_interval)
|
||||
return pair, tick_interval, data
|
||||
return pair, ticker_interval, []
|
||||
logger.debug("done fetching %s, %s ...", pair, ticker_interval)
|
||||
return pair, ticker_interval, data
|
||||
|
||||
except ccxt.NotSupported as e:
|
||||
raise OperationalException(
|
||||
|
@ -9,31 +9,31 @@ from freqtrade.tests.conftest import get_patched_exchange
|
||||
|
||||
def test_ohlcv(mocker, default_conf, ticker_history):
|
||||
default_conf["runmode"] = RunMode.DRY_RUN
|
||||
tick_interval = default_conf["ticker_interval"]
|
||||
ticker_interval = default_conf["ticker_interval"]
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
exchange._klines[("XRP/BTC", tick_interval)] = ticker_history
|
||||
exchange._klines[("UNITTEST/BTC", tick_interval)] = ticker_history
|
||||
exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history
|
||||
exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history
|
||||
dp = DataProvider(default_conf, exchange)
|
||||
assert dp.runmode == RunMode.DRY_RUN
|
||||
assert ticker_history.equals(dp.ohlcv("UNITTEST/BTC", tick_interval))
|
||||
assert isinstance(dp.ohlcv("UNITTEST/BTC", tick_interval), DataFrame)
|
||||
assert dp.ohlcv("UNITTEST/BTC", tick_interval) is not ticker_history
|
||||
assert dp.ohlcv("UNITTEST/BTC", tick_interval, copy=False) is ticker_history
|
||||
assert not dp.ohlcv("UNITTEST/BTC", tick_interval).empty
|
||||
assert dp.ohlcv("NONESENSE/AAA", tick_interval).empty
|
||||
assert ticker_history.equals(dp.ohlcv("UNITTEST/BTC", ticker_interval))
|
||||
assert isinstance(dp.ohlcv("UNITTEST/BTC", ticker_interval), DataFrame)
|
||||
assert dp.ohlcv("UNITTEST/BTC", ticker_interval) is not ticker_history
|
||||
assert dp.ohlcv("UNITTEST/BTC", ticker_interval, copy=False) is ticker_history
|
||||
assert not dp.ohlcv("UNITTEST/BTC", ticker_interval).empty
|
||||
assert dp.ohlcv("NONESENSE/AAA", ticker_interval).empty
|
||||
|
||||
# Test with and without parameter
|
||||
assert dp.ohlcv("UNITTEST/BTC", tick_interval).equals(dp.ohlcv("UNITTEST/BTC"))
|
||||
assert dp.ohlcv("UNITTEST/BTC", ticker_interval).equals(dp.ohlcv("UNITTEST/BTC"))
|
||||
|
||||
default_conf["runmode"] = RunMode.LIVE
|
||||
dp = DataProvider(default_conf, exchange)
|
||||
assert dp.runmode == RunMode.LIVE
|
||||
assert isinstance(dp.ohlcv("UNITTEST/BTC", tick_interval), DataFrame)
|
||||
assert isinstance(dp.ohlcv("UNITTEST/BTC", ticker_interval), DataFrame)
|
||||
|
||||
default_conf["runmode"] = RunMode.BACKTEST
|
||||
dp = DataProvider(default_conf, exchange)
|
||||
assert dp.runmode == RunMode.BACKTEST
|
||||
assert dp.ohlcv("UNITTEST/BTC", tick_interval).empty
|
||||
assert dp.ohlcv("UNITTEST/BTC", ticker_interval).empty
|
||||
|
||||
|
||||
def test_historic_ohlcv(mocker, default_conf, ticker_history):
|
||||
@ -54,15 +54,15 @@ def test_historic_ohlcv(mocker, default_conf, ticker_history):
|
||||
def test_available_pairs(mocker, default_conf, ticker_history):
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
|
||||
tick_interval = default_conf["ticker_interval"]
|
||||
exchange._klines[("XRP/BTC", tick_interval)] = ticker_history
|
||||
exchange._klines[("UNITTEST/BTC", tick_interval)] = ticker_history
|
||||
ticker_interval = default_conf["ticker_interval"]
|
||||
exchange._klines[("XRP/BTC", ticker_interval)] = ticker_history
|
||||
exchange._klines[("UNITTEST/BTC", ticker_interval)] = ticker_history
|
||||
dp = DataProvider(default_conf, exchange)
|
||||
|
||||
assert len(dp.available_pairs) == 2
|
||||
assert dp.available_pairs == [
|
||||
("XRP/BTC", tick_interval),
|
||||
("UNITTEST/BTC", tick_interval),
|
||||
("XRP/BTC", ticker_interval),
|
||||
("UNITTEST/BTC", ticker_interval),
|
||||
]
|
||||
|
||||
|
||||
@ -71,10 +71,10 @@ def test_refresh(mocker, default_conf, ticker_history):
|
||||
mocker.patch("freqtrade.exchange.Exchange.refresh_latest_ohlcv", refresh_mock)
|
||||
|
||||
exchange = get_patched_exchange(mocker, default_conf, id="binance")
|
||||
tick_interval = default_conf["ticker_interval"]
|
||||
pairs = [("XRP/BTC", tick_interval), ("UNITTEST/BTC", tick_interval)]
|
||||
ticker_interval = default_conf["ticker_interval"]
|
||||
pairs = [("XRP/BTC", ticker_interval), ("UNITTEST/BTC", ticker_interval)]
|
||||
|
||||
pairs_non_trad = [("ETH/USDT", tick_interval), ("BTC/TUSD", "1h")]
|
||||
pairs_non_trad = [("ETH/USDT", ticker_interval), ("BTC/TUSD", "1h")]
|
||||
|
||||
dp = DataProvider(default_conf, exchange)
|
||||
dp.refresh(pairs)
|
||||
|
@ -242,10 +242,10 @@ def test_download_pair_history(ticker_history_list, mocker, default_conf) -> Non
|
||||
|
||||
assert download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
tick_interval='1m')
|
||||
ticker_interval='1m')
|
||||
assert download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='CFI/BTC',
|
||||
tick_interval='1m')
|
||||
ticker_interval='1m')
|
||||
assert not exchange._pairs_last_refresh_time
|
||||
assert os.path.isfile(file1_1) is True
|
||||
assert os.path.isfile(file2_1) is True
|
||||
@ -259,10 +259,10 @@ def test_download_pair_history(ticker_history_list, mocker, default_conf) -> Non
|
||||
|
||||
assert download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
tick_interval='5m')
|
||||
ticker_interval='5m')
|
||||
assert download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='CFI/BTC',
|
||||
tick_interval='5m')
|
||||
ticker_interval='5m')
|
||||
assert not exchange._pairs_last_refresh_time
|
||||
assert os.path.isfile(file1_5) is True
|
||||
assert os.path.isfile(file2_5) is True
|
||||
@ -280,8 +280,8 @@ def test_download_pair_history2(mocker, default_conf) -> None:
|
||||
json_dump_mock = mocker.patch('freqtrade.misc.file_dump_json', return_value=None)
|
||||
mocker.patch('freqtrade.exchange.Exchange.get_history', return_value=tick)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
download_pair_history(None, exchange, pair="UNITTEST/BTC", tick_interval='1m')
|
||||
download_pair_history(None, exchange, pair="UNITTEST/BTC", tick_interval='3m')
|
||||
download_pair_history(None, exchange, pair="UNITTEST/BTC", ticker_interval='1m')
|
||||
download_pair_history(None, exchange, pair="UNITTEST/BTC", ticker_interval='3m')
|
||||
assert json_dump_mock.call_count == 2
|
||||
|
||||
|
||||
@ -298,7 +298,7 @@ def test_download_backtesting_data_exception(ticker_history, mocker, caplog, def
|
||||
|
||||
assert not download_pair_history(datadir=None, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
tick_interval='1m')
|
||||
ticker_interval='1m')
|
||||
# clean files freshly downloaded
|
||||
_clean_test_file(file1_1)
|
||||
_clean_test_file(file1_5)
|
||||
|
@ -940,8 +940,8 @@ def test_get_history(default_conf, mocker, caplog, exchange_name):
|
||||
]
|
||||
pair = 'ETH/BTC'
|
||||
|
||||
async def mock_candle_hist(pair, tick_interval, since_ms):
|
||||
return pair, tick_interval, tick
|
||||
async def mock_candle_hist(pair, ticker_interval, since_ms):
|
||||
return pair, ticker_interval, tick
|
||||
|
||||
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
||||
# one_call calculation * 1.8 should do 2 calls
|
||||
@ -1037,7 +1037,7 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
|
||||
# exchange = Exchange(default_conf)
|
||||
await async_ccxt_exception(mocker, default_conf, MagicMock(),
|
||||
"_async_get_candle_history", "fetch_ohlcv",
|
||||
pair='ABCD/BTC', tick_interval=default_conf['ticker_interval'])
|
||||
pair='ABCD/BTC', ticker_interval=default_conf['ticker_interval'])
|
||||
|
||||
api_mock = MagicMock()
|
||||
with pytest.raises(OperationalException, match=r'Could not fetch ticker data*'):
|
||||
|
@ -92,18 +92,18 @@ for pair in PAIRS:
|
||||
pairs_not_available.append(pair)
|
||||
print(f"skipping pair {pair}")
|
||||
continue
|
||||
for tick_interval in timeframes:
|
||||
for ticker_interval in timeframes:
|
||||
pair_print = pair.replace('/', '_')
|
||||
filename = f'{pair_print}-{tick_interval}.json'
|
||||
filename = f'{pair_print}-{ticker_interval}.json'
|
||||
dl_file = dl_path.joinpath(filename)
|
||||
if args.erase and dl_file.exists():
|
||||
print(f'Deleting existing data for pair {pair}, interval {tick_interval}')
|
||||
print(f'Deleting existing data for pair {pair}, interval {ticker_interval}')
|
||||
dl_file.unlink()
|
||||
|
||||
print(f'downloading pair {pair}, interval {tick_interval}')
|
||||
print(f'downloading pair {pair}, interval {ticker_interval}')
|
||||
download_pair_history(datadir=dl_path, exchange=exchange,
|
||||
pair=pair,
|
||||
tick_interval=tick_interval,
|
||||
ticker_interval=ticker_interval,
|
||||
timerange=timerange)
|
||||
|
||||
|
||||
|
@ -82,7 +82,7 @@ def load_trades(args: Namespace, pair: str, timerange: TimeRange) -> pd.DataFram
|
||||
return trades
|
||||
|
||||
|
||||
def generate_plot_file(fig, pair, tick_interval, is_last) -> None:
|
||||
def generate_plot_file(fig, pair, ticker_interval, is_last) -> None:
|
||||
"""
|
||||
Generate a plot html file from pre populated fig plotly object
|
||||
:return: None
|
||||
@ -90,7 +90,7 @@ def generate_plot_file(fig, pair, tick_interval, is_last) -> None:
|
||||
logger.info('Generate plot file for %s', pair)
|
||||
|
||||
pair_name = pair.replace("/", "_")
|
||||
file_name = 'freqtrade-plot-' + pair_name + '-' + tick_interval + '.html'
|
||||
file_name = 'freqtrade-plot-' + pair_name + '-' + ticker_interval + '.html'
|
||||
|
||||
Path("user_data/plots").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@ -135,20 +135,20 @@ def get_tickers_data(strategy, exchange, pairs: List[str], args):
|
||||
:return: dictinnary of tickers. output format: {'pair': tickersdata}
|
||||
"""
|
||||
|
||||
tick_interval = strategy.ticker_interval
|
||||
ticker_interval = strategy.ticker_interval
|
||||
timerange = Arguments.parse_timerange(args.timerange)
|
||||
|
||||
tickers = {}
|
||||
if args.live:
|
||||
logger.info('Downloading pairs.')
|
||||
exchange.refresh_latest_ohlcv([(pair, tick_interval) for pair in pairs])
|
||||
exchange.refresh_latest_ohlcv([(pair, ticker_interval) for pair in pairs])
|
||||
for pair in pairs:
|
||||
tickers[pair] = exchange.klines((pair, tick_interval))
|
||||
tickers[pair] = exchange.klines((pair, ticker_interval))
|
||||
else:
|
||||
tickers = history.load_data(
|
||||
datadir=Path(str(_CONF.get("datadir"))),
|
||||
pairs=pairs,
|
||||
ticker_interval=tick_interval,
|
||||
ticker_interval=ticker_interval,
|
||||
refresh_pairs=_CONF.get('refresh_pairs', False),
|
||||
timerange=timerange,
|
||||
exchange=Exchange(_CONF)
|
||||
@ -399,7 +399,7 @@ def analyse_and_plot_pairs(args: Namespace):
|
||||
strategy, exchange, pairs = get_trading_env(args)
|
||||
# Set timerange to use
|
||||
timerange = Arguments.parse_timerange(args.timerange)
|
||||
tick_interval = strategy.ticker_interval
|
||||
ticker_interval = strategy.ticker_interval
|
||||
|
||||
tickers = get_tickers_data(strategy, exchange, pairs, args)
|
||||
pair_counter = 0
|
||||
@ -422,7 +422,7 @@ def analyse_and_plot_pairs(args: Namespace):
|
||||
)
|
||||
|
||||
is_last = (False, True)[pair_counter == len(tickers)]
|
||||
generate_plot_file(fig, pair, tick_interval, is_last)
|
||||
generate_plot_file(fig, pair, ticker_interval, is_last)
|
||||
|
||||
logger.info('End of ploting process %s plots generated', pair_counter)
|
||||
|
||||
|
@ -76,7 +76,7 @@ def plot_profit(args: Namespace) -> None:
|
||||
in helping out to find a good algorithm.
|
||||
"""
|
||||
|
||||
# We need to use the same pairs, same tick_interval
|
||||
# We need to use the same pairs, same ticker_interval
|
||||
# and same timeperiod as used in backtesting
|
||||
# to match the tickerdata against the profits-results
|
||||
timerange = Arguments.parse_timerange(args.timerange)
|
||||
@ -112,7 +112,7 @@ def plot_profit(args: Namespace) -> None:
|
||||
else:
|
||||
filter_pairs = config['exchange']['pair_whitelist']
|
||||
|
||||
tick_interval = strategy.ticker_interval
|
||||
ticker_interval = strategy.ticker_interval
|
||||
pairs = config['exchange']['pair_whitelist']
|
||||
|
||||
if filter_pairs:
|
||||
@ -122,7 +122,7 @@ def plot_profit(args: Namespace) -> None:
|
||||
tickers = history.load_data(
|
||||
datadir=Path(str(config.get('datadir'))),
|
||||
pairs=pairs,
|
||||
ticker_interval=tick_interval,
|
||||
ticker_interval=ticker_interval,
|
||||
refresh_pairs=False,
|
||||
timerange=timerange
|
||||
)
|
||||
@ -134,7 +134,7 @@ def plot_profit(args: Namespace) -> None:
|
||||
dates = common_datearray(dataframes)
|
||||
min_date = int(min(dates).timestamp())
|
||||
max_date = int(max(dates).timestamp())
|
||||
num_iterations = define_index(min_date, max_date, tick_interval) + 1
|
||||
num_iterations = define_index(min_date, max_date, ticker_interval) + 1
|
||||
|
||||
# Make an average close price of all the pairs that was involved.
|
||||
# this could be useful to gauge the overall market trend
|
||||
@ -154,7 +154,7 @@ def plot_profit(args: Namespace) -> None:
|
||||
avgclose /= num
|
||||
|
||||
# make an profits-growth array
|
||||
pg = make_profit_array(data, num_iterations, min_date, tick_interval, filter_pairs)
|
||||
pg = make_profit_array(data, num_iterations, min_date, ticker_interval, filter_pairs)
|
||||
|
||||
#
|
||||
# Plot the pairs average close prices, and total profit growth
|
||||
@ -178,7 +178,7 @@ def plot_profit(args: Namespace) -> None:
|
||||
fig.append_trace(profit, 2, 1)
|
||||
|
||||
for pair in pairs:
|
||||
pg = make_profit_array(data, num_iterations, min_date, tick_interval, [pair])
|
||||
pg = make_profit_array(data, num_iterations, min_date, ticker_interval, [pair])
|
||||
pair_profit = go.Scattergl(
|
||||
x=dates,
|
||||
y=pg,
|
||||
@ -189,11 +189,11 @@ def plot_profit(args: Namespace) -> None:
|
||||
plot(fig, filename=str(Path('user_data').joinpath('freqtrade-profit-plot.html')))
|
||||
|
||||
|
||||
def define_index(min_date: int, max_date: int, interval: str) -> int:
|
||||
def define_index(min_date: int, max_date: int, ticker_interval: str) -> int:
|
||||
"""
|
||||
Return the index of a specific date
|
||||
"""
|
||||
interval_seconds = timeframe_to_seconds(interval)
|
||||
interval_seconds = timeframe_to_seconds(ticker_interval)
|
||||
return int((max_date - min_date) / interval_seconds)
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user