Merge branch 'develop' into feat/flask_rest_retry
This commit is contained in:
commit
6adc8f7ea7
@ -103,7 +103,7 @@ If the bot does not find your strategy file, it will display in an error
|
|||||||
message the reason (File not found, or errors in your code).
|
message the reason (File not found, or errors in your code).
|
||||||
|
|
||||||
Learn more about strategy file in
|
Learn more about strategy file in
|
||||||
[optimize your bot](bot-optimization.md).
|
[Strategy Customization](strategy-customization.md).
|
||||||
|
|
||||||
### How to use **--strategy-path**?
|
### How to use **--strategy-path**?
|
||||||
|
|
||||||
@ -296,4 +296,4 @@ in [misc.py](https://github.com/freqtrade/freqtrade/blob/develop/freqtrade/misc.
|
|||||||
## Next step
|
## Next step
|
||||||
|
|
||||||
The optimal strategy of the bot will change with time depending of the market trends. The next step is to
|
The optimal strategy of the bot will change with time depending of the market trends. The next step is to
|
||||||
[optimize your bot](bot-optimization.md).
|
[Strategy Customization](strategy-customization.md).
|
||||||
|
@ -122,6 +122,7 @@ So let's write the buy strategy using these values:
|
|||||||
dataframe['macd'], dataframe['macdsignal']
|
dataframe['macd'], dataframe['macdsignal']
|
||||||
))
|
))
|
||||||
|
|
||||||
|
if conditions:
|
||||||
dataframe.loc[
|
dataframe.loc[
|
||||||
reduce(lambda x, y: x & y, conditions),
|
reduce(lambda x, y: x & y, conditions),
|
||||||
'buy'] = 1
|
'buy'] = 1
|
||||||
|
@ -53,6 +53,12 @@ file as reference.**
|
|||||||
It is therefore best to use vectorized operations (across the whole dataframe, not loops) and
|
It is therefore best to use vectorized operations (across the whole dataframe, not loops) and
|
||||||
avoid index referencing (`df.iloc[-1]`), but instead use `df.shift()` to get to the previous candle.
|
avoid index referencing (`df.iloc[-1]`), but instead use `df.shift()` to get to the previous candle.
|
||||||
|
|
||||||
|
!!! Warning Using future data
|
||||||
|
Since backtesting passes the full time interval to the `populate_*()` methods, the strategy author
|
||||||
|
needs to take care to avoid having the strategy utilize data from the future.
|
||||||
|
Samples for usage of future data are `dataframe.shift(-1)`, `dataframe.resample("1h")` (this uses the left border of the interval, so moves data from an hour to the start of the hour).
|
||||||
|
They all use data which is not available during regular operations, so these strategies will perform well during backtesting, but will fail / perform badly in dry-runs.
|
||||||
|
|
||||||
### Customize Indicators
|
### Customize Indicators
|
||||||
|
|
||||||
Buy and sell strategies need indicators. You can add more indicators by extending the list contained in the method `populate_indicators()` from your strategy file.
|
Buy and sell strategies need indicators. You can add more indicators by extending the list contained in the method `populate_indicators()` from your strategy file.
|
@ -405,7 +405,7 @@ class Arguments(object):
|
|||||||
raise Exception('Incorrect syntax for timerange "%s"' % text)
|
raise Exception('Incorrect syntax for timerange "%s"' % text)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_int_positive(value) -> int:
|
def check_int_positive(value: str) -> int:
|
||||||
try:
|
try:
|
||||||
uint = int(value)
|
uint = int(value)
|
||||||
if uint <= 0:
|
if uint <= 0:
|
||||||
|
@ -63,12 +63,8 @@ def load_tickerdata_file(
|
|||||||
Load a pair from file, either .json.gz or .json
|
Load a pair from file, either .json.gz or .json
|
||||||
:return tickerlist or None if unsuccesful
|
:return tickerlist or None if unsuccesful
|
||||||
"""
|
"""
|
||||||
path = make_testdata_path(datadir)
|
filename = pair_data_filename(datadir, pair, ticker_interval)
|
||||||
pair_s = pair.replace('/', '_')
|
pairdata = misc.file_load_json(filename)
|
||||||
file = path.joinpath(f'{pair_s}-{ticker_interval}.json')
|
|
||||||
|
|
||||||
pairdata = misc.file_load_json(file)
|
|
||||||
|
|
||||||
if not pairdata:
|
if not pairdata:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -90,13 +86,8 @@ def load_pair_history(pair: str,
|
|||||||
:return: DataFrame with ohlcv data
|
:return: DataFrame with ohlcv data
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# If the user force the refresh of pairs
|
# The user forced the refresh of pairs
|
||||||
if refresh_pairs:
|
if refresh_pairs:
|
||||||
if not exchange:
|
|
||||||
raise OperationalException("Exchange needs to be initialized when "
|
|
||||||
"calling load_data with refresh_pairs=True")
|
|
||||||
|
|
||||||
logger.info('Download data for pair and store them in %s', datadir)
|
|
||||||
download_pair_history(datadir=datadir,
|
download_pair_history(datadir=datadir,
|
||||||
exchange=exchange,
|
exchange=exchange,
|
||||||
pair=pair,
|
pair=pair,
|
||||||
@ -115,10 +106,11 @@ def load_pair_history(pair: str,
|
|||||||
arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
|
arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
|
||||||
return parse_ticker_dataframe(pairdata, ticker_interval, fill_up_missing)
|
return parse_ticker_dataframe(pairdata, ticker_interval, fill_up_missing)
|
||||||
else:
|
else:
|
||||||
logger.warning('No data for pair: "%s", Interval: %s. '
|
logger.warning(
|
||||||
|
f'No history data for pair: "{pair}", interval: {ticker_interval}. '
|
||||||
'Use --refresh-pairs-cached option or download_backtest_data.py '
|
'Use --refresh-pairs-cached option or download_backtest_data.py '
|
||||||
'script to download the data',
|
'script to download the data'
|
||||||
pair, ticker_interval)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -151,6 +143,13 @@ def make_testdata_path(datadir: Optional[Path]) -> Path:
|
|||||||
return datadir or (Path(__file__).parent.parent / "tests" / "testdata").resolve()
|
return datadir or (Path(__file__).parent.parent / "tests" / "testdata").resolve()
|
||||||
|
|
||||||
|
|
||||||
|
def pair_data_filename(datadir: Optional[Path], pair: str, ticker_interval: str) -> Path:
|
||||||
|
path = make_testdata_path(datadir)
|
||||||
|
pair_s = pair.replace("/", "_")
|
||||||
|
filename = path.joinpath(f'{pair_s}-{ticker_interval}.json')
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def load_cached_data_for_updating(filename: Path, ticker_interval: str,
|
def load_cached_data_for_updating(filename: Path, ticker_interval: str,
|
||||||
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
||||||
Optional[int]]:
|
Optional[int]]:
|
||||||
@ -190,7 +189,7 @@ def load_cached_data_for_updating(filename: Path, ticker_interval: str,
|
|||||||
|
|
||||||
|
|
||||||
def download_pair_history(datadir: Optional[Path],
|
def download_pair_history(datadir: Optional[Path],
|
||||||
exchange: Exchange,
|
exchange: Optional[Exchange],
|
||||||
pair: str,
|
pair: str,
|
||||||
ticker_interval: str = '5m',
|
ticker_interval: str = '5m',
|
||||||
timerange: Optional[TimeRange] = None) -> bool:
|
timerange: Optional[TimeRange] = None) -> bool:
|
||||||
@ -201,18 +200,24 @@ def download_pair_history(datadir: Optional[Path],
|
|||||||
the full data will be redownloaded
|
the full data will be redownloaded
|
||||||
|
|
||||||
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
|
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
|
||||||
|
|
||||||
:param pair: pair to download
|
:param pair: pair to download
|
||||||
:param ticker_interval: ticker interval
|
:param ticker_interval: ticker interval
|
||||||
:param timerange: range of time to download
|
:param timerange: range of time to download
|
||||||
:return: bool with success state
|
:return: bool with success state
|
||||||
|
|
||||||
"""
|
"""
|
||||||
try:
|
if not exchange:
|
||||||
path = make_testdata_path(datadir)
|
raise OperationalException(
|
||||||
filepair = pair.replace("/", "_")
|
"Exchange needs to be initialized when downloading pair history data"
|
||||||
filename = path.joinpath(f'{filepair}-{ticker_interval}.json')
|
)
|
||||||
|
|
||||||
logger.info('Download the pair: "%s", Interval: %s', pair, ticker_interval)
|
try:
|
||||||
|
filename = pair_data_filename(datadir, pair, ticker_interval)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f'Download history data for pair: "{pair}", interval: {ticker_interval} '
|
||||||
|
f'and store in {datadir}.'
|
||||||
|
)
|
||||||
|
|
||||||
data, since_ms = load_cached_data_for_updating(filename, ticker_interval, timerange)
|
data, since_ms = load_cached_data_for_updating(filename, ticker_interval, timerange)
|
||||||
|
|
||||||
@ -231,7 +236,10 @@ def download_pair_history(datadir: Optional[Path],
|
|||||||
|
|
||||||
misc.file_dump_json(filename, data)
|
misc.file_dump_json(filename, data)
|
||||||
return True
|
return True
|
||||||
except BaseException:
|
|
||||||
logger.info('Failed to download the pair: "%s", Interval: %s',
|
except Exception as e:
|
||||||
pair, ticker_interval)
|
logger.error(
|
||||||
|
f'Failed to download history data for pair: "{pair}", interval: {ticker_interval}. '
|
||||||
|
f'Error: {e}'
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
@ -139,6 +139,7 @@ class Edge():
|
|||||||
|
|
||||||
# If no trade found then exit
|
# If no trade found then exit
|
||||||
if len(trades) == 0:
|
if len(trades) == 0:
|
||||||
|
logger.info("No trades found.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Fill missing, calculable columns, profit, duration , abs etc.
|
# Fill missing, calculable columns, profit, duration , abs etc.
|
||||||
|
@ -70,6 +70,7 @@ class DefaultHyperOpts(IHyperOpt):
|
|||||||
dataframe['close'], dataframe['sar']
|
dataframe['close'], dataframe['sar']
|
||||||
))
|
))
|
||||||
|
|
||||||
|
if conditions:
|
||||||
dataframe.loc[
|
dataframe.loc[
|
||||||
reduce(lambda x, y: x & y, conditions),
|
reduce(lambda x, y: x & y, conditions),
|
||||||
'buy'] = 1
|
'buy'] = 1
|
||||||
@ -129,6 +130,7 @@ class DefaultHyperOpts(IHyperOpt):
|
|||||||
dataframe['sar'], dataframe['close']
|
dataframe['sar'], dataframe['close']
|
||||||
))
|
))
|
||||||
|
|
||||||
|
if conditions:
|
||||||
dataframe.loc[
|
dataframe.loc[
|
||||||
reduce(lambda x, y: x & y, conditions),
|
reduce(lambda x, y: x & y, conditions),
|
||||||
'sell'] = 1
|
'sell'] = 1
|
||||||
|
@ -73,8 +73,9 @@ class EdgeCli(object):
|
|||||||
floatfmt=floatfmt, tablefmt="pipe")
|
floatfmt=floatfmt, tablefmt="pipe")
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
self.edge.calculate()
|
result = self.edge.calculate()
|
||||||
print('') # blank like for readability
|
if result:
|
||||||
|
print('') # blank line for readability
|
||||||
print(self._generate_edge_table(self.edge._cached_pairs))
|
print(self._generate_edge_table(self.edge._cached_pairs))
|
||||||
|
|
||||||
|
|
||||||
|
@ -238,7 +238,6 @@ class Trade(_DECL_BASE):
|
|||||||
"""
|
"""
|
||||||
Adjust the max_rate and min_rate.
|
Adjust the max_rate and min_rate.
|
||||||
"""
|
"""
|
||||||
logger.debug("Adjusting min/max rates")
|
|
||||||
self.max_rate = max(current_price, self.max_rate or self.open_rate)
|
self.max_rate = max(current_price, self.max_rate or self.open_rate)
|
||||||
self.min_rate = min(current_price, self.min_rate or self.open_rate)
|
self.min_rate = min(current_price, self.min_rate or self.open_rate)
|
||||||
|
|
||||||
|
@ -59,7 +59,11 @@ def _clean_test_file(file: str) -> None:
|
|||||||
def test_load_data_30min_ticker(mocker, caplog, default_conf) -> None:
|
def test_load_data_30min_ticker(mocker, caplog, default_conf) -> None:
|
||||||
ld = history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='30m', datadir=None)
|
ld = history.load_pair_history(pair='UNITTEST/BTC', ticker_interval='30m', datadir=None)
|
||||||
assert isinstance(ld, DataFrame)
|
assert isinstance(ld, DataFrame)
|
||||||
assert not log_has('Download the pair: "UNITTEST/BTC", Interval: 30m', caplog.record_tuples)
|
assert not log_has(
|
||||||
|
'Download history data for pair: "UNITTEST/BTC", interval: 30m '
|
||||||
|
'and store in None.',
|
||||||
|
caplog.record_tuples
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_load_data_7min_ticker(mocker, caplog, default_conf) -> None:
|
def test_load_data_7min_ticker(mocker, caplog, default_conf) -> None:
|
||||||
@ -67,7 +71,7 @@ def test_load_data_7min_ticker(mocker, caplog, default_conf) -> None:
|
|||||||
assert not isinstance(ld, DataFrame)
|
assert not isinstance(ld, DataFrame)
|
||||||
assert ld is None
|
assert ld is None
|
||||||
assert log_has(
|
assert log_has(
|
||||||
'No data for pair: "UNITTEST/BTC", Interval: 7m. '
|
'No history data for pair: "UNITTEST/BTC", interval: 7m. '
|
||||||
'Use --refresh-pairs-cached option or download_backtest_data.py '
|
'Use --refresh-pairs-cached option or download_backtest_data.py '
|
||||||
'script to download the data',
|
'script to download the data',
|
||||||
caplog.record_tuples
|
caplog.record_tuples
|
||||||
@ -80,7 +84,11 @@ def test_load_data_1min_ticker(ticker_history, mocker, caplog) -> None:
|
|||||||
_backup_file(file, copy_file=True)
|
_backup_file(file, copy_file=True)
|
||||||
history.load_data(datadir=None, ticker_interval='1m', pairs=['UNITTEST/BTC'])
|
history.load_data(datadir=None, ticker_interval='1m', pairs=['UNITTEST/BTC'])
|
||||||
assert os.path.isfile(file) is True
|
assert os.path.isfile(file) is True
|
||||||
assert not log_has('Download the pair: "UNITTEST/BTC", Interval: 1m', caplog.record_tuples)
|
assert not log_has(
|
||||||
|
'Download history data for pair: "UNITTEST/BTC", interval: 1m '
|
||||||
|
'and store in None.',
|
||||||
|
caplog.record_tuples
|
||||||
|
)
|
||||||
_clean_test_file(file)
|
_clean_test_file(file)
|
||||||
|
|
||||||
|
|
||||||
@ -100,7 +108,7 @@ def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog, defau
|
|||||||
pair='MEME/BTC')
|
pair='MEME/BTC')
|
||||||
assert os.path.isfile(file) is False
|
assert os.path.isfile(file) is False
|
||||||
assert log_has(
|
assert log_has(
|
||||||
'No data for pair: "MEME/BTC", Interval: 1m. '
|
'No history data for pair: "MEME/BTC", interval: 1m. '
|
||||||
'Use --refresh-pairs-cached option or download_backtest_data.py '
|
'Use --refresh-pairs-cached option or download_backtest_data.py '
|
||||||
'script to download the data',
|
'script to download the data',
|
||||||
caplog.record_tuples
|
caplog.record_tuples
|
||||||
@ -113,7 +121,11 @@ def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog, defau
|
|||||||
exchange=exchange,
|
exchange=exchange,
|
||||||
pair='MEME/BTC')
|
pair='MEME/BTC')
|
||||||
assert os.path.isfile(file) is True
|
assert os.path.isfile(file) is True
|
||||||
assert log_has('Download the pair: "MEME/BTC", Interval: 1m', caplog.record_tuples)
|
assert log_has(
|
||||||
|
'Download history data for pair: "MEME/BTC", interval: 1m '
|
||||||
|
'and store in None.',
|
||||||
|
caplog.record_tuples
|
||||||
|
)
|
||||||
with pytest.raises(OperationalException, match=r'Exchange needs to be initialized when.*'):
|
with pytest.raises(OperationalException, match=r'Exchange needs to be initialized when.*'):
|
||||||
history.load_pair_history(datadir=None,
|
history.load_pair_history(datadir=None,
|
||||||
ticker_interval='1m',
|
ticker_interval='1m',
|
||||||
@ -293,7 +305,7 @@ def test_download_pair_history2(mocker, default_conf) -> None:
|
|||||||
|
|
||||||
def test_download_backtesting_data_exception(ticker_history, mocker, caplog, default_conf) -> None:
|
def test_download_backtesting_data_exception(ticker_history, mocker, caplog, default_conf) -> None:
|
||||||
mocker.patch('freqtrade.exchange.Exchange.get_history',
|
mocker.patch('freqtrade.exchange.Exchange.get_history',
|
||||||
side_effect=BaseException('File Error'))
|
side_effect=Exception('File Error'))
|
||||||
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf)
|
exchange = get_patched_exchange(mocker, default_conf)
|
||||||
|
|
||||||
@ -308,7 +320,11 @@ def test_download_backtesting_data_exception(ticker_history, mocker, caplog, def
|
|||||||
# clean files freshly downloaded
|
# clean files freshly downloaded
|
||||||
_clean_test_file(file1_1)
|
_clean_test_file(file1_1)
|
||||||
_clean_test_file(file1_5)
|
_clean_test_file(file1_5)
|
||||||
assert log_has('Failed to download the pair: "MEME/BTC", Interval: 1m', caplog.record_tuples)
|
assert log_has(
|
||||||
|
'Failed to download history data for pair: "MEME/BTC", interval: 1m. '
|
||||||
|
'Error: File Error',
|
||||||
|
caplog.record_tuples
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_load_tickerdata_file() -> None:
|
def test_load_tickerdata_file() -> None:
|
||||||
|
@ -10,10 +10,11 @@ import numpy as np
|
|||||||
import pytest
|
import pytest
|
||||||
from pandas import DataFrame, to_datetime
|
from pandas import DataFrame, to_datetime
|
||||||
|
|
||||||
|
from freqtrade import OperationalException
|
||||||
from freqtrade.data.converter import parse_ticker_dataframe
|
from freqtrade.data.converter import parse_ticker_dataframe
|
||||||
from freqtrade.edge import Edge, PairInfo
|
from freqtrade.edge import Edge, PairInfo
|
||||||
from freqtrade.strategy.interface import SellType
|
from freqtrade.strategy.interface import SellType
|
||||||
from freqtrade.tests.conftest import get_patched_freqtradebot
|
from freqtrade.tests.conftest import get_patched_freqtradebot, log_has
|
||||||
from freqtrade.tests.optimize import (BTContainer, BTrade,
|
from freqtrade.tests.optimize import (BTContainer, BTrade,
|
||||||
_build_backtest_dataframe,
|
_build_backtest_dataframe,
|
||||||
_get_frame_time_from_offset)
|
_get_frame_time_from_offset)
|
||||||
@ -30,7 +31,50 @@ ticker_start_time = arrow.get(2018, 10, 3)
|
|||||||
ticker_interval_in_minute = 60
|
ticker_interval_in_minute = 60
|
||||||
_ohlc = {'date': 0, 'buy': 1, 'open': 2, 'high': 3, 'low': 4, 'close': 5, 'sell': 6, 'volume': 7}
|
_ohlc = {'date': 0, 'buy': 1, 'open': 2, 'high': 3, 'low': 4, 'close': 5, 'sell': 6, 'volume': 7}
|
||||||
|
|
||||||
|
# Helpers for this test file
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_ohlc(buy_ohlc_sell_matrice):
|
||||||
|
for index, ohlc in enumerate(buy_ohlc_sell_matrice):
|
||||||
|
# if not high < open < low or not high < close < low
|
||||||
|
if not ohlc[3] >= ohlc[2] >= ohlc[4] or not ohlc[3] >= ohlc[5] >= ohlc[4]:
|
||||||
|
raise Exception('Line ' + str(index + 1) + ' of ohlc has invalid values!')
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _build_dataframe(buy_ohlc_sell_matrice):
|
||||||
|
_validate_ohlc(buy_ohlc_sell_matrice)
|
||||||
|
tickers = []
|
||||||
|
for ohlc in buy_ohlc_sell_matrice:
|
||||||
|
ticker = {
|
||||||
|
'date': ticker_start_time.shift(
|
||||||
|
minutes=(
|
||||||
|
ohlc[0] *
|
||||||
|
ticker_interval_in_minute)).timestamp *
|
||||||
|
1000,
|
||||||
|
'buy': ohlc[1],
|
||||||
|
'open': ohlc[2],
|
||||||
|
'high': ohlc[3],
|
||||||
|
'low': ohlc[4],
|
||||||
|
'close': ohlc[5],
|
||||||
|
'sell': ohlc[6]}
|
||||||
|
tickers.append(ticker)
|
||||||
|
|
||||||
|
frame = DataFrame(tickers)
|
||||||
|
frame['date'] = to_datetime(frame['date'],
|
||||||
|
unit='ms',
|
||||||
|
utc=True,
|
||||||
|
infer_datetime_format=True)
|
||||||
|
|
||||||
|
return frame
|
||||||
|
|
||||||
|
|
||||||
|
def _time_on_candle(number):
|
||||||
|
return np.datetime64(ticker_start_time.shift(
|
||||||
|
minutes=(number * ticker_interval_in_minute)).timestamp * 1000, 'ms')
|
||||||
|
|
||||||
|
|
||||||
|
# End helper functions
|
||||||
# Open trade should be removed from the end
|
# Open trade should be removed from the end
|
||||||
tc0 = BTContainer(data=[
|
tc0 = BTContainer(data=[
|
||||||
# D O H L C V B S
|
# D O H L C V B S
|
||||||
@ -203,46 +247,6 @@ def test_nonexisting_stake_amount(mocker, edge_conf):
|
|||||||
assert edge.stake_amount('N/O', 1, 2, 1) == 0.15
|
assert edge.stake_amount('N/O', 1, 2, 1) == 0.15
|
||||||
|
|
||||||
|
|
||||||
def _validate_ohlc(buy_ohlc_sell_matrice):
|
|
||||||
for index, ohlc in enumerate(buy_ohlc_sell_matrice):
|
|
||||||
# if not high < open < low or not high < close < low
|
|
||||||
if not ohlc[3] >= ohlc[2] >= ohlc[4] or not ohlc[3] >= ohlc[5] >= ohlc[4]:
|
|
||||||
raise Exception('Line ' + str(index + 1) + ' of ohlc has invalid values!')
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def _build_dataframe(buy_ohlc_sell_matrice):
|
|
||||||
_validate_ohlc(buy_ohlc_sell_matrice)
|
|
||||||
tickers = []
|
|
||||||
for ohlc in buy_ohlc_sell_matrice:
|
|
||||||
ticker = {
|
|
||||||
'date': ticker_start_time.shift(
|
|
||||||
minutes=(
|
|
||||||
ohlc[0] *
|
|
||||||
ticker_interval_in_minute)).timestamp *
|
|
||||||
1000,
|
|
||||||
'buy': ohlc[1],
|
|
||||||
'open': ohlc[2],
|
|
||||||
'high': ohlc[3],
|
|
||||||
'low': ohlc[4],
|
|
||||||
'close': ohlc[5],
|
|
||||||
'sell': ohlc[6]}
|
|
||||||
tickers.append(ticker)
|
|
||||||
|
|
||||||
frame = DataFrame(tickers)
|
|
||||||
frame['date'] = to_datetime(frame['date'],
|
|
||||||
unit='ms',
|
|
||||||
utc=True,
|
|
||||||
infer_datetime_format=True)
|
|
||||||
|
|
||||||
return frame
|
|
||||||
|
|
||||||
|
|
||||||
def _time_on_candle(number):
|
|
||||||
return np.datetime64(ticker_start_time.shift(
|
|
||||||
minutes=(number * ticker_interval_in_minute)).timestamp * 1000, 'ms')
|
|
||||||
|
|
||||||
|
|
||||||
def test_edge_heartbeat_calculate(mocker, edge_conf):
|
def test_edge_heartbeat_calculate(mocker, edge_conf):
|
||||||
freqtrade = get_patched_freqtradebot(mocker, edge_conf)
|
freqtrade = get_patched_freqtradebot(mocker, edge_conf)
|
||||||
edge = Edge(edge_conf, freqtrade.exchange, freqtrade.strategy)
|
edge = Edge(edge_conf, freqtrade.exchange, freqtrade.strategy)
|
||||||
@ -298,6 +302,40 @@ def test_edge_process_downloaded_data(mocker, edge_conf):
|
|||||||
assert edge._last_updated <= arrow.utcnow().timestamp + 2
|
assert edge._last_updated <= arrow.utcnow().timestamp + 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_edge_process_no_data(mocker, edge_conf, caplog):
|
||||||
|
edge_conf['datadir'] = None
|
||||||
|
freqtrade = get_patched_freqtradebot(mocker, edge_conf)
|
||||||
|
mocker.patch('freqtrade.exchange.Exchange.get_fee', MagicMock(return_value=0.001))
|
||||||
|
mocker.patch('freqtrade.data.history.load_data', MagicMock(return_value={}))
|
||||||
|
edge = Edge(edge_conf, freqtrade.exchange, freqtrade.strategy)
|
||||||
|
|
||||||
|
assert not edge.calculate()
|
||||||
|
assert len(edge._cached_pairs) == 0
|
||||||
|
assert log_has("No data found. Edge is stopped ...", caplog.record_tuples)
|
||||||
|
assert edge._last_updated == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_edge_process_no_trades(mocker, edge_conf, caplog):
|
||||||
|
edge_conf['datadir'] = None
|
||||||
|
freqtrade = get_patched_freqtradebot(mocker, edge_conf)
|
||||||
|
mocker.patch('freqtrade.exchange.Exchange.get_fee', MagicMock(return_value=0.001))
|
||||||
|
mocker.patch('freqtrade.data.history.load_data', mocked_load_data)
|
||||||
|
# Return empty
|
||||||
|
mocker.patch('freqtrade.edge.Edge._find_trades_for_stoploss_range', MagicMock(return_value=[]))
|
||||||
|
edge = Edge(edge_conf, freqtrade.exchange, freqtrade.strategy)
|
||||||
|
|
||||||
|
assert not edge.calculate()
|
||||||
|
assert len(edge._cached_pairs) == 0
|
||||||
|
assert log_has("No trades found.", caplog.record_tuples)
|
||||||
|
|
||||||
|
|
||||||
|
def test_edge_init_error(mocker, edge_conf,):
|
||||||
|
edge_conf['stake_amount'] = 0.5
|
||||||
|
mocker.patch('freqtrade.exchange.Exchange.get_fee', MagicMock(return_value=0.001))
|
||||||
|
with pytest.raises(OperationalException, match='Edge works only with unlimited stake amount'):
|
||||||
|
get_patched_freqtradebot(mocker, edge_conf)
|
||||||
|
|
||||||
|
|
||||||
def test_process_expectancy(mocker, edge_conf):
|
def test_process_expectancy(mocker, edge_conf):
|
||||||
edge_conf['edge']['min_trade_number'] = 2
|
edge_conf['edge']['min_trade_number'] = 2
|
||||||
freqtrade = get_patched_freqtradebot(mocker, edge_conf)
|
freqtrade = get_patched_freqtradebot(mocker, edge_conf)
|
||||||
@ -360,3 +398,11 @@ def test_process_expectancy(mocker, edge_conf):
|
|||||||
assert round(final['TEST/BTC'].risk_reward_ratio, 10) == 306.5384615384
|
assert round(final['TEST/BTC'].risk_reward_ratio, 10) == 306.5384615384
|
||||||
assert round(final['TEST/BTC'].required_risk_reward, 10) == 2.0
|
assert round(final['TEST/BTC'].required_risk_reward, 10) == 2.0
|
||||||
assert round(final['TEST/BTC'].expectancy, 10) == 101.5128205128
|
assert round(final['TEST/BTC'].expectancy, 10) == 101.5128205128
|
||||||
|
|
||||||
|
# Pop last item so no trade is profitable
|
||||||
|
trades.pop()
|
||||||
|
trades_df = DataFrame(trades)
|
||||||
|
trades_df = edge._fill_calculable_fields(trades_df)
|
||||||
|
final = edge._process_expectancy(trades_df)
|
||||||
|
assert len(final) == 0
|
||||||
|
assert isinstance(final, dict)
|
||||||
|
@ -118,7 +118,7 @@ def test_rpc_status_table(default_conf, ticker, fee, markets, mocker) -> None:
|
|||||||
|
|
||||||
freqtradebot.create_trade()
|
freqtradebot.create_trade()
|
||||||
result = rpc._rpc_status_table()
|
result = rpc._rpc_status_table()
|
||||||
assert 'just now' in result['Since'].all()
|
assert 'instantly' in result['Since'].all()
|
||||||
assert 'ETH/BTC' in result['Pair'].all()
|
assert 'ETH/BTC' in result['Pair'].all()
|
||||||
assert '-0.59%' in result['Profit'].all()
|
assert '-0.59%' in result['Profit'].all()
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ def test_rpc_status_table(default_conf, ticker, fee, markets, mocker) -> None:
|
|||||||
# invalidate ticker cache
|
# invalidate ticker cache
|
||||||
rpc._freqtrade.exchange._cached_ticker = {}
|
rpc._freqtrade.exchange._cached_ticker = {}
|
||||||
result = rpc._rpc_status_table()
|
result = rpc._rpc_status_table()
|
||||||
assert 'just now' in result['Since'].all()
|
assert 'instantly' in result['Since'].all()
|
||||||
assert 'ETH/BTC' in result['Pair'].all()
|
assert 'ETH/BTC' in result['Pair'].all()
|
||||||
assert 'nan%' in result['Profit'].all()
|
assert 'nan%' in result['Profit'].all()
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
# pragma pylint: disable=missing-docstring, C0103
|
# pragma pylint: disable=missing-docstring, C0103
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@ -185,3 +184,22 @@ def test_testdata_dl_options() -> None:
|
|||||||
assert args.export == 'export/folder'
|
assert args.export == 'export/folder'
|
||||||
assert args.days == 30
|
assert args.days == 30
|
||||||
assert args.exchange == 'binance'
|
assert args.exchange == 'binance'
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_int_positive() -> None:
|
||||||
|
|
||||||
|
assert Arguments.check_int_positive("3") == 3
|
||||||
|
assert Arguments.check_int_positive("1") == 1
|
||||||
|
assert Arguments.check_int_positive("100") == 100
|
||||||
|
|
||||||
|
with pytest.raises(argparse.ArgumentTypeError):
|
||||||
|
Arguments.check_int_positive("-2")
|
||||||
|
|
||||||
|
with pytest.raises(argparse.ArgumentTypeError):
|
||||||
|
Arguments.check_int_positive("0")
|
||||||
|
|
||||||
|
with pytest.raises(argparse.ArgumentTypeError):
|
||||||
|
Arguments.check_int_positive("3.5")
|
||||||
|
|
||||||
|
with pytest.raises(argparse.ArgumentTypeError):
|
||||||
|
Arguments.check_int_positive("DeadBeef")
|
||||||
|
@ -6,7 +6,7 @@ from unittest.mock import MagicMock
|
|||||||
from freqtrade.data.converter import parse_ticker_dataframe
|
from freqtrade.data.converter import parse_ticker_dataframe
|
||||||
from freqtrade.misc import (common_datearray, datesarray_to_datetimearray,
|
from freqtrade.misc import (common_datearray, datesarray_to_datetimearray,
|
||||||
file_dump_json, file_load_json, format_ms_time, shorten_date)
|
file_dump_json, file_load_json, format_ms_time, shorten_date)
|
||||||
from freqtrade.data.history import load_tickerdata_file, make_testdata_path
|
from freqtrade.data.history import load_tickerdata_file, pair_data_filename
|
||||||
from freqtrade.strategy.default_strategy import DefaultStrategy
|
from freqtrade.strategy.default_strategy import DefaultStrategy
|
||||||
|
|
||||||
|
|
||||||
@ -60,13 +60,13 @@ def test_file_dump_json(mocker) -> None:
|
|||||||
def test_file_load_json(mocker) -> None:
|
def test_file_load_json(mocker) -> None:
|
||||||
|
|
||||||
# 7m .json does not exist
|
# 7m .json does not exist
|
||||||
ret = file_load_json(make_testdata_path(None).joinpath('UNITTEST_BTC-7m.json'))
|
ret = file_load_json(pair_data_filename(None, 'UNITTEST/BTC', '7m'))
|
||||||
assert not ret
|
assert not ret
|
||||||
# 1m json exists (but no .gz exists)
|
# 1m json exists (but no .gz exists)
|
||||||
ret = file_load_json(make_testdata_path(None).joinpath('UNITTEST_BTC-1m.json'))
|
ret = file_load_json(pair_data_filename(None, 'UNITTEST/BTC', '1m'))
|
||||||
assert ret
|
assert ret
|
||||||
# 8 .json is empty and will fail if it's loaded. .json.gz is a copy of 1.json
|
# 8 .json is empty and will fail if it's loaded. .json.gz is a copy of 1.json
|
||||||
ret = file_load_json(make_testdata_path(None).joinpath('UNITTEST_BTC-8m.json'))
|
ret = file_load_json(pair_data_filename(None, 'UNITTEST/BTC', '8m'))
|
||||||
assert ret
|
assert ret
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ nav:
|
|||||||
- About: index.md
|
- About: index.md
|
||||||
- Installation: installation.md
|
- Installation: installation.md
|
||||||
- Configuration: configuration.md
|
- Configuration: configuration.md
|
||||||
- Custom Strategy: bot-optimization.md
|
- Strategy Customization: strategy-customization.md
|
||||||
- Stoploss: stoploss.md
|
- Stoploss: stoploss.md
|
||||||
- Start the bot: bot-usage.md
|
- Start the bot: bot-usage.md
|
||||||
- Control the bot:
|
- Control the bot:
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
# requirements without requirements installable via conda
|
# requirements without requirements installable via conda
|
||||||
# mainly used for Raspberry pi installs
|
# mainly used for Raspberry pi installs
|
||||||
ccxt==1.18.523
|
ccxt==1.18.551
|
||||||
SQLAlchemy==1.3.3
|
SQLAlchemy==1.3.3
|
||||||
python-telegram-bot==11.1.0
|
python-telegram-bot==11.1.0
|
||||||
arrow==0.13.1
|
arrow==0.13.2
|
||||||
cachetools==3.1.0
|
cachetools==3.1.0
|
||||||
requests==2.21.0
|
requests==2.22.0
|
||||||
urllib3==1.24.2 # pyup: ignore
|
urllib3==1.24.2 # pyup: ignore
|
||||||
wrapt==1.11.1
|
wrapt==1.11.1
|
||||||
scikit-learn==0.21.0
|
scikit-learn==0.21.1
|
||||||
joblib==0.13.2
|
joblib==0.13.2
|
||||||
jsonschema==3.0.1
|
jsonschema==3.0.1
|
||||||
TA-Lib==0.4.17
|
TA-Lib==0.4.17
|
||||||
@ -17,7 +17,7 @@ coinmarketcap==5.0.3
|
|||||||
|
|
||||||
# Required for hyperopt
|
# Required for hyperopt
|
||||||
scikit-optimize==0.5.2
|
scikit-optimize==0.5.2
|
||||||
filelock==3.0.10
|
filelock==3.0.12
|
||||||
|
|
||||||
# find first, C search in arrays
|
# find first, C search in arrays
|
||||||
py_find_1st==1.1.3
|
py_find_1st==1.1.3
|
||||||
|
@ -3,4 +3,4 @@
|
|||||||
|
|
||||||
numpy==1.16.3
|
numpy==1.16.3
|
||||||
pandas==0.24.2
|
pandas==0.24.2
|
||||||
scipy==1.2.1
|
scipy==1.3.0
|
||||||
|
@ -79,6 +79,7 @@ class SampleHyperOpts(IHyperOpt):
|
|||||||
dataframe['close'], dataframe['sar']
|
dataframe['close'], dataframe['sar']
|
||||||
))
|
))
|
||||||
|
|
||||||
|
if conditions:
|
||||||
dataframe.loc[
|
dataframe.loc[
|
||||||
reduce(lambda x, y: x & y, conditions),
|
reduce(lambda x, y: x & y, conditions),
|
||||||
'buy'] = 1
|
'buy'] = 1
|
||||||
@ -138,6 +139,7 @@ class SampleHyperOpts(IHyperOpt):
|
|||||||
dataframe['sar'], dataframe['close']
|
dataframe['sar'], dataframe['close']
|
||||||
))
|
))
|
||||||
|
|
||||||
|
if conditions:
|
||||||
dataframe.loc[
|
dataframe.loc[
|
||||||
reduce(lambda x, y: x & y, conditions),
|
reduce(lambda x, y: x & y, conditions),
|
||||||
'sell'] = 1
|
'sell'] = 1
|
||||||
|
Loading…
Reference in New Issue
Block a user