Merge branch 'develop' into pr/froggleston/7861

This commit is contained in:
Matthias
2023-03-19 15:00:20 +01:00
259 changed files with 23487 additions and 14834 deletions

View File

@@ -12,9 +12,11 @@ from freqtrade.data.btanalysis import (BT_DATA_COLUMNS, analyze_trade_parallelis
get_latest_hyperopt_file, load_backtest_data,
load_backtest_metadata, load_trades, load_trades_from_db)
from freqtrade.data.history import load_data, load_pair_history
from freqtrade.data.metrics import (calculate_cagr, calculate_csum, calculate_market_change,
calculate_max_drawdown, calculate_underwater,
combine_dataframes_with_mean, create_cum_profit)
from freqtrade.data.metrics import (calculate_cagr, calculate_calmar, calculate_csum,
calculate_expectancy, calculate_market_change,
calculate_max_drawdown, calculate_sharpe, calculate_sortino,
calculate_underwater, combine_dataframes_with_mean,
create_cum_profit)
from freqtrade.exceptions import OperationalException
from tests.conftest import CURRENT_TEST_STRATEGY, create_mock_trades
from tests.conftest_trades import MOCK_TRADE_COUNT
@@ -30,10 +32,10 @@ def test_get_latest_backtest_filename(testdatadir, mocker):
testdir_bt = testdatadir / "backtest_results"
res = get_latest_backtest_filename(testdir_bt)
assert res == 'backtest-result_new.json'
assert res == 'backtest-result.json'
res = get_latest_backtest_filename(str(testdir_bt))
assert res == 'backtest-result_new.json'
assert res == 'backtest-result.json'
mocker.patch("freqtrade.data.btanalysis.json_load", return_value={})
@@ -81,7 +83,7 @@ def test_load_backtest_data_old_format(testdatadir, mocker):
def test_load_backtest_data_new_format(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
assert isinstance(bt_data, DataFrame)
assert set(bt_data.columns) == set(BT_DATA_COLUMNS)
@@ -182,7 +184,7 @@ def test_extract_trades_of_period(testdatadir):
def test_analyze_trade_parallelism(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
res = analyze_trade_parallelism(bt_data, "5m")
@@ -256,7 +258,7 @@ def test_combine_dataframes_with_mean_no_data(testdatadir):
def test_create_cum_profit(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
timerange = TimeRange.parse_timerange("20180110-20180112")
@@ -268,11 +270,11 @@ def test_create_cum_profit(testdatadir):
"cum_profits", timeframe="5m")
assert "cum_profits" in cum_profits.columns
assert cum_profits.iloc[0]['cum_profits'] == 0
assert pytest.approx(cum_profits.iloc[-1]['cum_profits']) == 8.723007518796964e-06
assert pytest.approx(cum_profits.iloc[-1]['cum_profits']) == 9.0225563e-05
def test_create_cum_profit1(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
# Move close-time to "off" the candle, to make sure the logic still works
bt_data['close_date'] = bt_data.loc[:, 'close_date'] + DateOffset(seconds=20)
@@ -286,7 +288,7 @@ def test_create_cum_profit1(testdatadir):
"cum_profits", timeframe="5m")
assert "cum_profits" in cum_profits.columns
assert cum_profits.iloc[0]['cum_profits'] == 0
assert pytest.approx(cum_profits.iloc[-1]['cum_profits']) == 8.723007518796964e-06
assert pytest.approx(cum_profits.iloc[-1]['cum_profits']) == 9.0225563e-05
with pytest.raises(ValueError, match='Trade dataframe empty.'):
create_cum_profit(df.set_index('date'), bt_data[bt_data["pair"] == 'NOTAPAIR'],
@@ -294,18 +296,18 @@ def test_create_cum_profit1(testdatadir):
def test_calculate_max_drawdown(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
_, hdate, lowdate, hval, lval, drawdown = calculate_max_drawdown(
bt_data, value_col="profit_abs")
assert isinstance(drawdown, float)
assert pytest.approx(drawdown) == 0.12071099
assert pytest.approx(drawdown) == 0.29753914
assert isinstance(hdate, Timestamp)
assert isinstance(lowdate, Timestamp)
assert isinstance(hval, float)
assert isinstance(lval, float)
assert hdate == Timestamp('2018-01-25 01:30:00', tz='UTC')
assert lowdate == Timestamp('2018-01-25 03:50:00', tz='UTC')
assert hdate == Timestamp('2018-01-16 19:30:00', tz='UTC')
assert lowdate == Timestamp('2018-01-16 22:25:00', tz='UTC')
underwater = calculate_underwater(bt_data)
assert isinstance(underwater, DataFrame)
@@ -318,14 +320,15 @@ def test_calculate_max_drawdown(testdatadir):
def test_calculate_csum(testdatadir):
filename = testdatadir / "backtest_results/backtest-result_new.json"
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
csum_min, csum_max = calculate_csum(bt_data)
assert isinstance(csum_min, float)
assert isinstance(csum_max, float)
assert csum_min < 0.01
assert csum_max > 0.02
assert csum_min < csum_max
assert csum_min < 0.0001
assert csum_max > 0.0002
csum_min1, csum_max1 = calculate_csum(bt_data, 5)
assert csum_min1 == csum_min + 5
@@ -335,6 +338,69 @@ def test_calculate_csum(testdatadir):
csum_min, csum_max = calculate_csum(DataFrame())
def test_calculate_expectancy(testdatadir):
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
expectancy = calculate_expectancy(DataFrame())
assert expectancy == 0.0
expectancy = calculate_expectancy(bt_data)
assert isinstance(expectancy, float)
assert pytest.approx(expectancy) == 0.07151374226574791
def test_calculate_sortino(testdatadir):
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
sortino = calculate_sortino(DataFrame(), None, None, 0)
assert sortino == 0.0
sortino = calculate_sortino(
bt_data,
bt_data['open_date'].min(),
bt_data['close_date'].max(),
0.01,
)
assert isinstance(sortino, float)
assert pytest.approx(sortino) == 35.17722
def test_calculate_sharpe(testdatadir):
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
sharpe = calculate_sharpe(DataFrame(), None, None, 0)
assert sharpe == 0.0
sharpe = calculate_sharpe(
bt_data,
bt_data['open_date'].min(),
bt_data['close_date'].max(),
0.01,
)
assert isinstance(sharpe, float)
assert pytest.approx(sharpe) == 44.5078669
def test_calculate_calmar(testdatadir):
filename = testdatadir / "backtest_results/backtest-result.json"
bt_data = load_backtest_data(filename)
calmar = calculate_calmar(DataFrame(), None, None, 0)
assert calmar == 0.0
calmar = calculate_calmar(
bt_data,
bt_data['open_date'].min(),
bt_data['close_date'].max(),
0.01,
)
assert isinstance(calmar, float)
assert pytest.approx(calmar) == 559.040508
@pytest.mark.parametrize('start,end,days, expected', [
(64900, 176000, 3 * 365, 0.3945),
(64900, 176000, 365, 1.7119),

View File

@@ -294,8 +294,8 @@ def test_convert_trades_format(default_conf, testdatadir, tmpdir):
@pytest.mark.parametrize('file_base,candletype', [
(['XRP_ETH-5m', 'XRP_ETH-1m'], CandleType.SPOT),
(['UNITTEST_USDT-1h-mark', 'XRP_USDT-1h-mark'], CandleType.MARK),
(['XRP_USDT-1h-futures'], CandleType.FUTURES),
(['UNITTEST_USDT_USDT-1h-mark', 'XRP_USDT_USDT-1h-mark'], CandleType.MARK),
(['XRP_USDT_USDT-1h-futures'], CandleType.FUTURES),
])
def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, candletype):
tmpdir1 = Path(tmpdir)
@@ -315,7 +315,10 @@ def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, cand
files_new.append(file_new)
default_conf['datadir'] = tmpdir1
default_conf['pairs'] = ['XRP_ETH', 'XRP_USDT', 'UNITTEST_USDT']
if candletype == CandleType.SPOT:
default_conf['pairs'] = ['XRP/ETH', 'XRP/USDT', 'UNITTEST/USDT']
else:
default_conf['pairs'] = ['XRP/ETH:ETH', 'XRP/USDT:USDT', 'UNITTEST/USDT:USDT']
default_conf['timeframes'] = ['1m', '5m', '1h']
assert not file_new.exists()

View File

@@ -33,10 +33,10 @@ def test_datahandler_ohlcv_get_pairs(testdatadir):
assert set(pairs) == {'UNITTEST/BTC'}
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.MARK)
assert set(pairs) == {'UNITTEST/USDT', 'XRP/USDT'}
assert set(pairs) == {'UNITTEST/USDT:USDT', 'XRP/USDT:USDT'}
pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.FUTURES)
assert set(pairs) == {'XRP/USDT'}
assert set(pairs) == {'XRP/USDT:USDT'}
pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.MARK)
assert set(pairs) == {'UNITTEST/USDT:USDT'}
@@ -104,11 +104,12 @@ def test_datahandler_ohlcv_get_available_data(testdatadir):
paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir, TradingMode.FUTURES)
# Convert to set to avoid failures due to sorting
assert set(paircombs) == {
('UNITTEST/USDT', '1h', 'mark'),
('XRP/USDT', '1h', 'futures'),
('XRP/USDT', '1h', 'mark'),
('XRP/USDT', '8h', 'mark'),
('XRP/USDT', '8h', 'funding_rate'),
('UNITTEST/USDT:USDT', '1h', 'mark'),
('XRP/USDT:USDT', '5m', 'futures'),
('XRP/USDT:USDT', '1h', 'futures'),
('XRP/USDT:USDT', '1h', 'mark'),
('XRP/USDT:USDT', '8h', 'mark'),
('XRP/USDT:USDT', '8h', 'funding_rate'),
}
paircombs = JsonGzDataHandler.ohlcv_get_available_data(testdatadir, TradingMode.SPOT)
@@ -142,7 +143,7 @@ def test_jsondatahandler_ohlcv_load(testdatadir, caplog):
df = dh.ohlcv_load('XRP/ETH', '5m', 'spot')
assert len(df) == 712
df_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', candle_type="mark")
df_mark = dh.ohlcv_load('UNITTEST/USDT:USDT', '1h', candle_type="mark")
assert len(df_mark) == 100
df_no_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', 'spot')
@@ -424,7 +425,7 @@ def test_hdf5datahandler_ohlcv_load_and_resave(
# Data goes from 2018-01-10 - 2018-01-30
('UNITTEST/BTC', '5m', 'spot', '', '2018-01-15', '2018-01-19'),
# Mark data goes from to 2021-11-15 2021-11-19
('UNITTEST/USDT', '1h', 'mark', '-mark', '2021-11-16', '2021-11-18'),
('UNITTEST/USDT:USDT', '1h', 'mark', '-mark', '2021-11-16', '2021-11-18'),
])
@pytest.mark.parametrize('datahandler', ['hdf5', 'feather', 'parquet'])
def test_generic_datahandler_ohlcv_load_and_resave(

View File

@@ -2,13 +2,13 @@ from datetime import datetime, timezone
from unittest.mock import MagicMock
import pytest
from pandas import DataFrame
from pandas import DataFrame, Timestamp
from freqtrade.data.dataprovider import DataProvider
from freqtrade.enums import CandleType, RunMode
from freqtrade.exceptions import ExchangeError, OperationalException
from freqtrade.plugins.pairlistmanager import PairListManager
from tests.conftest import get_patched_exchange
from tests.conftest import EXMS, generate_test_data, get_patched_exchange
@pytest.mark.parametrize('candle_type', [
@@ -144,7 +144,7 @@ def test_available_pairs(mocker, default_conf, ohlcv_history):
assert dp.available_pairs == [("XRP/BTC", timeframe), ("UNITTEST/BTC", timeframe), ]
def test_producer_pairs(mocker, default_conf, ohlcv_history):
def test_producer_pairs(default_conf):
dataprovider = DataProvider(default_conf, None)
producer = "default"
@@ -161,9 +161,9 @@ def test_producer_pairs(mocker, default_conf, ohlcv_history):
assert dataprovider.get_producer_pairs("bad") == []
def test_get_producer_df(mocker, default_conf, ohlcv_history):
def test_get_producer_df(default_conf):
dataprovider = DataProvider(default_conf, None)
ohlcv_history = generate_test_data('5m', 150)
pair = 'BTC/USDT'
timeframe = default_conf['timeframe']
candle_type = CandleType.SPOT
@@ -221,9 +221,9 @@ def test_emit_df(mocker, default_conf, ohlcv_history):
assert send_mock.call_count == 0
def test_refresh(mocker, default_conf, ohlcv_history):
def test_refresh(mocker, default_conf):
refresh_mock = MagicMock()
mocker.patch("freqtrade.exchange.Exchange.refresh_latest_ohlcv", refresh_mock)
mocker.patch(f"{EXMS}.refresh_latest_ohlcv", refresh_mock)
exchange = get_patched_exchange(mocker, default_conf, id="binance")
timeframe = default_conf["timeframe"]
@@ -281,7 +281,7 @@ def test_market(mocker, default_conf, markets):
def test_ticker(mocker, default_conf, tickers):
ticker_mock = MagicMock(return_value=tickers()['ETH/BTC'])
mocker.patch("freqtrade.exchange.Exchange.fetch_ticker", ticker_mock)
mocker.patch(f"{EXMS}.fetch_ticker", ticker_mock)
exchange = get_patched_exchange(mocker, default_conf)
dp = DataProvider(default_conf, exchange)
res = dp.ticker('ETH/BTC')
@@ -290,7 +290,7 @@ def test_ticker(mocker, default_conf, tickers):
assert res['symbol'] == 'ETH/BTC'
ticker_mock = MagicMock(side_effect=ExchangeError('Pair not found'))
mocker.patch("freqtrade.exchange.Exchange.fetch_ticker", ticker_mock)
mocker.patch(f"{EXMS}.fetch_ticker", ticker_mock)
exchange = get_patched_exchange(mocker, default_conf)
dp = DataProvider(default_conf, exchange)
res = dp.ticker('UNITTEST/BTC')
@@ -301,7 +301,7 @@ def test_current_whitelist(mocker, default_conf, tickers):
# patch default conf to volumepairlist
default_conf['pairlists'][0] = {'method': 'VolumePairList', "number_assets": 5}
mocker.patch.multiple('freqtrade.exchange.Exchange',
mocker.patch.multiple(EXMS,
exchange_has=MagicMock(return_value=True),
get_tickers=tickers)
exchange = get_patched_exchange(mocker, default_conf)
@@ -412,3 +412,85 @@ def test_dp_send_msg(default_conf):
dp = DataProvider(default_conf, None)
dp.send_msg(msg, always_send=True)
assert msg not in dp._msg_queue
def test_dp__add_external_df(default_conf_usdt):
timeframe = '1h'
default_conf_usdt["timeframe"] = timeframe
dp = DataProvider(default_conf_usdt, None)
df = generate_test_data(timeframe, 24, '2022-01-01 00:00:00+00:00')
last_analyzed = datetime.now(timezone.utc)
res = dp._add_external_df('ETH/USDT', df, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is False
# Why 1000 ??
assert res[1] == 1000
# Hard add dataframe
dp._replace_external_df('ETH/USDT', df, last_analyzed, timeframe, CandleType.SPOT)
# BTC is not stored yet
res = dp._add_external_df('BTC/USDT', df, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is False
df_res, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
assert len(df_res) == 24
# Add the same dataframe again - dataframe size shall not change.
res = dp._add_external_df('ETH/USDT', df, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is True
assert isinstance(res[1], int)
assert res[1] == 0
df, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
assert len(df) == 24
# Add a new day.
df2 = generate_test_data(timeframe, 24, '2022-01-02 00:00:00+00:00')
res = dp._add_external_df('ETH/USDT', df2, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is True
assert isinstance(res[1], int)
assert res[1] == 0
df, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
assert len(df) == 48
# Add a dataframe with a 12 hour offset - so 12 candles are overlapping, and 12 valid.
df3 = generate_test_data(timeframe, 24, '2022-01-02 12:00:00+00:00')
res = dp._add_external_df('ETH/USDT', df3, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is True
assert isinstance(res[1], int)
assert res[1] == 0
df, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
# New length = 48 + 12 (since we have a 12 hour offset).
assert len(df) == 60
assert df.iloc[-1]['date'] == df3.iloc[-1]['date']
assert df.iloc[-1]['date'] == Timestamp('2022-01-03 11:00:00+00:00')
# Generate 1 new candle
df4 = generate_test_data(timeframe, 1, '2022-01-03 12:00:00+00:00')
res = dp._add_external_df('ETH/USDT', df4, last_analyzed, timeframe, CandleType.SPOT)
# assert res[0] is True
# assert res[1] == 0
df, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
# New length = 61 + 1
assert len(df) == 61
assert df.iloc[-2]['date'] == Timestamp('2022-01-03 11:00:00+00:00')
assert df.iloc[-1]['date'] == Timestamp('2022-01-03 12:00:00+00:00')
# Gap in the data ...
df4 = generate_test_data(timeframe, 1, '2022-01-05 00:00:00+00:00')
res = dp._add_external_df('ETH/USDT', df4, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is False
# 36 hours - from 2022-01-03 12:00:00+00:00 to 2022-01-05 00:00:00+00:00
assert isinstance(res[1], int)
assert res[1] == 36
df, _ = dp.get_producer_df('ETH/USDT', timeframe, CandleType.SPOT)
# New length = 61 + 1
assert len(df) == 61
# Empty dataframe
df4 = generate_test_data(timeframe, 0, '2022-01-05 00:00:00+00:00')
res = dp._add_external_df('ETH/USDT', df4, last_analyzed, timeframe, CandleType.SPOT)
assert res[0] is False
# 36 hours - from 2022-01-03 12:00:00+00:00 to 2022-01-05 00:00:00+00:00
assert isinstance(res[1], int)
assert res[1] == 0

9
tests/data/test_entryexitanalysis.py Executable file → Normal file
View File

@@ -190,6 +190,15 @@ def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, tmp
assert '1' in captured.out
assert '2.5' in captured.out
# test group 5
args = get_args(base_args + ['--analysis-groups', "5"])
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert 'exit_signal' in captured.out
assert 'roi' in captured.out
assert 'stop_loss' in captured.out
assert 'trailing_stop_loss' in captured.out
# test date filtering
args = get_args(base_args +
['--analysis-groups', "0", "1", "2",

View File

@@ -26,7 +26,7 @@ from freqtrade.enums import CandleType
from freqtrade.exchange import timeframe_to_minutes
from freqtrade.misc import file_dump_json
from freqtrade.resolvers import StrategyResolver
from tests.conftest import (CURRENT_TEST_STRATEGY, get_patched_exchange, log_has, log_has_re,
from tests.conftest import (CURRENT_TEST_STRATEGY, EXMS, get_patched_exchange, log_has, log_has_re,
patch_exchange)
@@ -66,7 +66,7 @@ def test_load_data_7min_timeframe(caplog, testdatadir) -> None:
def test_load_data_1min_timeframe(ohlcv_history, mocker, caplog, testdatadir) -> None:
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ohlcv_history)
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history)
file = testdatadir / 'UNITTEST_BTC-1m.json'
load_data(datadir=testdatadir, timeframe='1m', pairs=['UNITTEST/BTC'])
assert file.is_file()
@@ -77,12 +77,12 @@ def test_load_data_1min_timeframe(ohlcv_history, mocker, caplog, testdatadir) ->
def test_load_data_mark(ohlcv_history, mocker, caplog, testdatadir) -> None:
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ohlcv_history)
file = testdatadir / 'futures/UNITTEST_USDT-1h-mark.json'
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history)
file = testdatadir / 'futures/UNITTEST_USDT_USDT-1h-mark.json'
load_data(datadir=testdatadir, timeframe='1h', pairs=['UNITTEST/BTC'], candle_type='mark')
assert file.is_file()
assert not log_has(
'Download history data for pair: "UNITTEST/USDT", interval: 1m '
'Download history data for pair: "UNITTEST/USDT:USDT", interval: 1m '
'and store in None.', caplog
)
@@ -109,7 +109,7 @@ def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
Test load_pair_history() with 1 min timeframe
"""
tmpdir1 = Path(tmpdir)
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ohlcv_history_list)
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history_list)
exchange = get_patched_exchange(mocker, default_conf)
file = tmpdir1 / 'MEME_BTC-1m.json'
@@ -191,7 +191,7 @@ def test_load_cached_data_for_updating(mocker, testdatadir) -> None:
test_data = None
test_filename = testdatadir.joinpath('UNITTEST_BTC-1m.json')
with open(test_filename, "rt") as file:
with test_filename.open("rt") as file:
test_data = json.load(file)
test_data_df = ohlcv_to_dataframe(test_data, '1m', 'UNITTEST/BTC',
@@ -277,7 +277,7 @@ def test_download_pair_history(
subdir,
file_tail
) -> None:
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ohlcv_history_list)
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history_list)
exchange = get_patched_exchange(mocker, default_conf)
tmpdir1 = Path(tmpdir)
file1_1 = tmpdir1 / f'{subdir}MEME_BTC-1m{file_tail}.json'
@@ -328,7 +328,7 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
json_dump_mock = mocker.patch(
'freqtrade.data.history.jsondatahandler.JsonDataHandler.ohlcv_store',
return_value=None)
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=tick)
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=tick)
exchange = get_patched_exchange(mocker, default_conf)
_download_pair_history(datadir=testdatadir, exchange=exchange, pair="UNITTEST/BTC",
timeframe='1m', candle_type='spot')
@@ -340,7 +340,7 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmpdir) -> None:
mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv',
mocker.patch(f'{EXMS}.get_historic_ohlcv',
side_effect=Exception('File Error'))
tmpdir1 = Path(tmpdir)
exchange = get_patched_exchange(mocker, default_conf)
@@ -506,9 +506,7 @@ def test_refresh_backtest_ohlcv_data(
mocker, default_conf, markets, caplog, testdatadir, trademode, callcount):
dl_mock = mocker.patch('freqtrade.data.history.history_utils._download_pair_history',
MagicMock())
mocker.patch(
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value=markets)
)
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value=markets))
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
mocker.patch.object(Path, "unlink", MagicMock())
@@ -531,9 +529,7 @@ def test_download_data_no_markets(mocker, default_conf, caplog, testdatadir):
MagicMock())
ex = get_patched_exchange(mocker, default_conf)
mocker.patch(
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value={})
)
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value={}))
timerange = TimeRange.parse_timerange("20190101-20190102")
unav_pairs = refresh_backtest_ohlcv_data(exchange=ex, pairs=["BTT/BTC", "LTC/USDT"],
timeframes=["1m", "5m"],
@@ -551,9 +547,7 @@ def test_download_data_no_markets(mocker, default_conf, caplog, testdatadir):
def test_refresh_backtest_trades_data(mocker, default_conf, markets, caplog, testdatadir):
dl_mock = mocker.patch('freqtrade.data.history.history_utils._download_trades_history',
MagicMock())
mocker.patch(
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value=markets)
)
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value=markets))
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
mocker.patch.object(Path, "unlink", MagicMock())
@@ -577,8 +571,7 @@ def test_download_trades_history(trades_history, mocker, default_conf, testdatad
tmpdir) -> None:
tmpdir1 = Path(tmpdir)
ght_mock = MagicMock(side_effect=lambda pair, *args, **kwargs: (pair, trades_history))
mocker.patch('freqtrade.exchange.Exchange.get_historic_trades',
ght_mock)
mocker.patch(f'{EXMS}.get_historic_trades', ght_mock)
exchange = get_patched_exchange(mocker, default_conf)
file1 = tmpdir1 / 'ETH_BTC-trades.json.gz'
data_handler = get_datahandler(tmpdir1, data_format='jsongz')
@@ -604,8 +597,7 @@ def test_download_trades_history(trades_history, mocker, default_conf, testdatad
file1.unlink()
mocker.patch('freqtrade.exchange.Exchange.get_historic_trades',
MagicMock(side_effect=ValueError))
mocker.patch(f'{EXMS}.get_historic_trades', MagicMock(side_effect=ValueError))
assert not _download_trades_history(data_handler=data_handler, exchange=exchange,
pair='ETH/BTC')
@@ -615,8 +607,7 @@ def test_download_trades_history(trades_history, mocker, default_conf, testdatad
copyfile(testdatadir / file2.name, file2)
ght_mock.reset_mock()
mocker.patch('freqtrade.exchange.Exchange.get_historic_trades',
ght_mock)
mocker.patch(f'{EXMS}.get_historic_trades', ght_mock)
# Since before first start date
since_time = int(trades_history[0][0] // 1000) - 500
timerange = TimeRange('date', None, since_time, 0)