Merge branch 'develop' of https://github.com/Bovhasselt/Tradingbotv1.1 into develop
This commit is contained in:
commit
929b42446b
@ -1,4 +1,4 @@
|
|||||||
FROM python:3.9.1-slim-buster as base
|
FROM python:3.9.2-slim-buster as base
|
||||||
|
|
||||||
# Setup env
|
# Setup env
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
|
13
README.md
13
README.md
@ -22,12 +22,21 @@ expect.
|
|||||||
We strongly recommend you to have coding and Python knowledge. Do not
|
We strongly recommend you to have coding and Python knowledge. Do not
|
||||||
hesitate to read the source code and understand the mechanism of this bot.
|
hesitate to read the source code and understand the mechanism of this bot.
|
||||||
|
|
||||||
## Exchange marketplaces supported
|
## Supported Exchange marketplaces
|
||||||
|
|
||||||
|
Please read the [exchange specific notes](docs/exchanges.md) to learn about eventual, special configurations needed for each exchange.
|
||||||
|
|
||||||
- [X] [Bittrex](https://bittrex.com/)
|
- [X] [Bittrex](https://bittrex.com/)
|
||||||
- [X] [Binance](https://www.binance.com/) ([*Note for binance users](docs/exchanges.md#blacklists))
|
- [X] [Binance](https://www.binance.com/) ([*Note for binance users](docs/exchanges.md#blacklists))
|
||||||
- [X] [Kraken](https://kraken.com/)
|
- [X] [Kraken](https://kraken.com/)
|
||||||
- [ ] [113 others to tests](https://github.com/ccxt/ccxt/). _(We cannot guarantee they will work)_
|
- [X] [FTX](https://ftx.com)
|
||||||
|
- [ ] [potentially many others](https://github.com/ccxt/ccxt/). _(We cannot guarantee they will work)_
|
||||||
|
|
||||||
|
### Community tested
|
||||||
|
|
||||||
|
Exchanges confirmed working by the community:
|
||||||
|
|
||||||
|
- [X] [Bitvavo](https://bitvavo.com/)
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
|
@ -51,6 +51,8 @@ fi
|
|||||||
docker images
|
docker images
|
||||||
|
|
||||||
docker push ${IMAGE_NAME}
|
docker push ${IMAGE_NAME}
|
||||||
|
docker push ${IMAGE_NAME}:$TAG_PLOT
|
||||||
|
docker push ${IMAGE_NAME}:$TAG
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
echo "failed pushing repo"
|
echo "failed pushing repo"
|
||||||
return 1
|
return 1
|
||||||
|
@ -40,6 +40,11 @@ For the sample below, you then need to add the command line parameter `--hyperop
|
|||||||
A sample of this can be found below, which is identical to the Default Hyperopt loss implementation. A full sample can be found in [userdata/hyperopts](https://github.com/freqtrade/freqtrade/blob/develop/freqtrade/templates/sample_hyperopt_loss.py).
|
A sample of this can be found below, which is identical to the Default Hyperopt loss implementation. A full sample can be found in [userdata/hyperopts](https://github.com/freqtrade/freqtrade/blob/develop/freqtrade/templates/sample_hyperopt_loss.py).
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from pandas import DataFrame
|
||||||
|
|
||||||
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
from freqtrade.optimize.hyperopt import IHyperOptLoss
|
||||||
|
|
||||||
TARGET_TRADES = 600
|
TARGET_TRADES = 600
|
||||||
@ -54,6 +59,7 @@ class SuperDuperHyperOptLoss(IHyperOptLoss):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
||||||
min_date: datetime, max_date: datetime,
|
min_date: datetime, max_date: datetime,
|
||||||
|
config: Dict, processed: Dict[str, DataFrame],
|
||||||
*args, **kwargs) -> float:
|
*args, **kwargs) -> float:
|
||||||
"""
|
"""
|
||||||
Objective function, returns smaller number for better results
|
Objective function, returns smaller number for better results
|
||||||
@ -81,6 +87,8 @@ Currently, the arguments are:
|
|||||||
* `trade_count`: Amount of trades (identical to `len(results)`)
|
* `trade_count`: Amount of trades (identical to `len(results)`)
|
||||||
* `min_date`: Start date of the timerange used
|
* `min_date`: Start date of the timerange used
|
||||||
* `min_date`: End date of the timerange used
|
* `min_date`: End date of the timerange used
|
||||||
|
* `config`: Config object used (Note: Not all strategy-related parameters will be updated here if they are part of a hyperopt space).
|
||||||
|
* `processed`: Dict of Dataframes with the pair as keys containing the data used for backtesting.
|
||||||
|
|
||||||
This function needs to return a floating point number (`float`). Smaller numbers will be interpreted as better results. The parameters and balancing for this is up to you.
|
This function needs to return a floating point number (`float`). Smaller numbers will be interpreted as better results. The parameters and balancing for this is up to you.
|
||||||
|
|
||||||
|
@ -40,7 +40,9 @@ All protection end times are rounded up to the next candle to avoid sudden, unex
|
|||||||
|
|
||||||
#### Stoploss Guard
|
#### Stoploss Guard
|
||||||
|
|
||||||
`StoplossGuard` selects all trades within `lookback_period` in minutes (or in candles when using `lookback_period_candles`), and determines if the amount of trades that resulted in stoploss are above `trade_limit` - in which case trading will stop for `stop_duration` in minutes (or in candles when using `stop_duration_candles`).
|
`StoplossGuard` selects all trades within `lookback_period` in minutes (or in candles when using `lookback_period_candles`).
|
||||||
|
If `trade_limit` or more trades resulted in stoploss, trading will stop for `stop_duration` in minutes (or in candles when using `stop_duration_candles`).
|
||||||
|
|
||||||
This applies across all pairs, unless `only_per_pair` is set to true, which will then only look at one pair at a time.
|
This applies across all pairs, unless `only_per_pair` is set to true, which will then only look at one pair at a time.
|
||||||
|
|
||||||
The below example stops trading for all pairs for 4 candles after the last trade if the bot hit stoploss 4 times within the last 24 candles.
|
The below example stops trading for all pairs for 4 candles after the last trade if the bot hit stoploss 4 times within the last 24 candles.
|
||||||
|
@ -35,6 +35,22 @@ Freqtrade is a crypto-currency algorithmic trading software developed in python
|
|||||||
- Control/Monitor: Use Telegram or a REST API (start/stop the bot, show profit/loss, daily summary, current open trades results, etc.).
|
- Control/Monitor: Use Telegram or a REST API (start/stop the bot, show profit/loss, daily summary, current open trades results, etc.).
|
||||||
- Analyse: Further analysis can be performed on either Backtesting data or Freqtrade trading history (SQL database), including automated standard plots, and methods to load the data into [interactive environments](data-analysis.md).
|
- Analyse: Further analysis can be performed on either Backtesting data or Freqtrade trading history (SQL database), including automated standard plots, and methods to load the data into [interactive environments](data-analysis.md).
|
||||||
|
|
||||||
|
## Supported exchange marketplaces
|
||||||
|
|
||||||
|
Please read the [exchange specific notes](exchanges.md) to learn about eventual, special configurations needed for each exchange.
|
||||||
|
|
||||||
|
- [X] [Binance](https://www.binance.com/) ([*Note for binance users](exchanges.md#blacklists))
|
||||||
|
- [X] [Bittrex](https://bittrex.com/)
|
||||||
|
- [X] [FTX](https://ftx.com)
|
||||||
|
- [X] [Kraken](https://kraken.com/)
|
||||||
|
- [ ] [potentially many others](https://github.com/ccxt/ccxt/). _(We cannot guarantee they will work)_
|
||||||
|
|
||||||
|
### Community tested
|
||||||
|
|
||||||
|
Exchanges confirmed working by the community:
|
||||||
|
|
||||||
|
- [X] [Bitvavo](https://bitvavo.com/)
|
||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
### Hardware requirements
|
### Hardware requirements
|
||||||
|
@ -188,7 +188,7 @@ Sample configuration with inline comments explaining the process:
|
|||||||
'senkou_a': {
|
'senkou_a': {
|
||||||
'color': 'green', #optional
|
'color': 'green', #optional
|
||||||
'fill_to': 'senkou_b',
|
'fill_to': 'senkou_b',
|
||||||
'fill_label': 'Ichimoku Cloud' #optional,
|
'fill_label': 'Ichimoku Cloud', #optional
|
||||||
'fill_color': 'rgba(255,76,46,0.2)', #optional
|
'fill_color': 'rgba(255,76,46,0.2)', #optional
|
||||||
},
|
},
|
||||||
# plot senkou_b, too. Not only the area to it.
|
# plot senkou_b, too. Not only the area to it.
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
mkdocs-material==6.2.7
|
mkdocs-material==6.2.8
|
||||||
mdx_truly_sane_lists==1.2
|
mdx_truly_sane_lists==1.2
|
||||||
pymdown-extensions==8.1.1
|
pymdown-extensions==8.1.1
|
||||||
|
@ -315,11 +315,11 @@ class AwesomeStrategy(IStrategy):
|
|||||||
|
|
||||||
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||||
# Check if the entry already exists
|
# Check if the entry already exists
|
||||||
if not metadata["pair"] in self._cust_info:
|
if not metadata["pair"] in self.cust_info:
|
||||||
# Create empty entry for this pair
|
# Create empty entry for this pair
|
||||||
self._cust_info[metadata["pair"]] = {}
|
self.cust_info[metadata["pair"]] = {}
|
||||||
|
|
||||||
if "crosstime" in self.cust_info[metadata["pair"]:
|
if "crosstime" in self.cust_info[metadata["pair"]]:
|
||||||
self.cust_info[metadata["pair"]]["crosstime"] += 1
|
self.cust_info[metadata["pair"]]["crosstime"] += 1
|
||||||
else:
|
else:
|
||||||
self.cust_info[metadata["pair"]]["crosstime"] = 1
|
self.cust_info[metadata["pair"]]["crosstime"] = 1
|
||||||
@ -444,14 +444,19 @@ It can also be used in specific callbacks to get the signal that caused the acti
|
|||||||
``` python
|
``` python
|
||||||
# fetch current dataframe
|
# fetch current dataframe
|
||||||
if self.dp:
|
if self.dp:
|
||||||
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=metadata['pair'],
|
if self.dp.runmode.value in ('live', 'dry_run'):
|
||||||
timeframe=self.timeframe)
|
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=metadata['pair'],
|
||||||
|
timeframe=self.timeframe)
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! Note "No data available"
|
!!! Note "No data available"
|
||||||
Returns an empty dataframe if the requested pair was not cached.
|
Returns an empty dataframe if the requested pair was not cached.
|
||||||
This should not happen when using whitelisted pairs.
|
This should not happen when using whitelisted pairs.
|
||||||
|
|
||||||
|
|
||||||
|
!!! Warning "Warning about backtesting"
|
||||||
|
This method will return an empty dataframe during backtesting.
|
||||||
|
|
||||||
### *orderbook(pair, maximum)*
|
### *orderbook(pair, maximum)*
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
@ -462,8 +467,8 @@ if self.dp:
|
|||||||
dataframe['best_ask'] = ob['asks'][0][0]
|
dataframe['best_ask'] = ob['asks'][0][0]
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! Warning
|
!!! Warning "Warning about backtesting"
|
||||||
The order book is not part of the historic data which means backtesting and hyperopt will not work correctly if this method is used.
|
The order book is not part of the historic data which means backtesting and hyperopt will not work correctly if this method is used, as the method will return uptodate values.
|
||||||
|
|
||||||
### *ticker(pair)*
|
### *ticker(pair)*
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ config["strategy"] = "SampleStrategy"
|
|||||||
# Location of the data
|
# Location of the data
|
||||||
data_location = Path(config['user_data_dir'], 'data', 'binance')
|
data_location = Path(config['user_data_dir'], 'data', 'binance')
|
||||||
# Pair to analyze - Only use one pair here
|
# Pair to analyze - Only use one pair here
|
||||||
pair = "BTC_USDT"
|
pair = "BTC/USDT"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
@ -34,7 +34,9 @@ from freqtrade.data.history import load_pair_history
|
|||||||
|
|
||||||
candles = load_pair_history(datadir=data_location,
|
candles = load_pair_history(datadir=data_location,
|
||||||
timeframe=config["timeframe"],
|
timeframe=config["timeframe"],
|
||||||
pair=pair)
|
pair=pair,
|
||||||
|
data_format = "hdf5",
|
||||||
|
)
|
||||||
|
|
||||||
# Confirm success
|
# Confirm success
|
||||||
print("Loaded " + str(len(candles)) + f" rows of data for {pair} from {data_location}")
|
print("Loaded " + str(len(candles)) + f" rows of data for {pair} from {data_location}")
|
||||||
|
@ -12,7 +12,7 @@ from freqtrade.commands.data_commands import (start_convert_data, start_download
|
|||||||
start_list_data)
|
start_list_data)
|
||||||
from freqtrade.commands.deploy_commands import (start_create_userdir, start_install_ui,
|
from freqtrade.commands.deploy_commands import (start_create_userdir, start_install_ui,
|
||||||
start_new_hyperopt, start_new_strategy)
|
start_new_hyperopt, start_new_strategy)
|
||||||
from freqtrade.commands.automation_commands import start_build_hyperopt
|
from freqtrade.commands.automation_commands import (start_build_hyperopt, start_custom_hyperopt, start_extract_strategy)
|
||||||
from freqtrade.commands.hyperopt_commands import start_hyperopt_list, start_hyperopt_show
|
from freqtrade.commands.hyperopt_commands import start_hyperopt_list, start_hyperopt_show
|
||||||
from freqtrade.commands.list_commands import (start_list_exchanges, start_list_hyperopts,
|
from freqtrade.commands.list_commands import (start_list_exchanges, start_list_hyperopts,
|
||||||
start_list_markets, start_list_strategies,
|
start_list_markets, start_list_strategies,
|
||||||
|
@ -55,8 +55,13 @@ ARGS_BUILD_STRATEGY = ["user_data_dir", "strategy", "template"]
|
|||||||
|
|
||||||
ARGS_BUILD_HYPEROPT = ["user_data_dir", "hyperopt", "template"]
|
ARGS_BUILD_HYPEROPT = ["user_data_dir", "hyperopt", "template"]
|
||||||
|
|
||||||
|
# Automation
|
||||||
ARGS_BUILD_CUSTOM_HYPEROPT = ["buy_indicators", "sell_indicators", "hyperopt"]
|
ARGS_BUILD_CUSTOM_HYPEROPT = ["buy_indicators", "sell_indicators", "hyperopt"]
|
||||||
|
|
||||||
|
ARGS_EXTRACT_STRATEGY = ["strategy", "extract_name"]
|
||||||
|
|
||||||
|
ARGS_BUILD_BUILD_HYPEROPT = ["strategy", "hyperopt"]
|
||||||
|
|
||||||
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase"]
|
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase"]
|
||||||
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes"]
|
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes"]
|
||||||
|
|
||||||
@ -175,7 +180,7 @@ class Arguments:
|
|||||||
start_list_data, start_list_exchanges, start_list_hyperopts,
|
start_list_data, start_list_exchanges, start_list_hyperopts,
|
||||||
start_list_markets, start_list_strategies,
|
start_list_markets, start_list_strategies,
|
||||||
start_list_timeframes, start_new_config, start_new_hyperopt,
|
start_list_timeframes, start_new_config, start_new_hyperopt,
|
||||||
start_build_hyperopt,
|
start_build_hyperopt, start_custom_hyperopt, start_extract_strategy,
|
||||||
start_new_strategy, start_plot_dataframe, start_plot_profit,
|
start_new_strategy, start_plot_dataframe, start_plot_profit,
|
||||||
start_show_trades, start_test_pairlist, start_trading)
|
start_show_trades, start_test_pairlist, start_trading)
|
||||||
|
|
||||||
@ -210,12 +215,24 @@ class Arguments:
|
|||||||
build_hyperopt_cmd.set_defaults(func=start_new_hyperopt)
|
build_hyperopt_cmd.set_defaults(func=start_new_hyperopt)
|
||||||
self._build_args(optionlist=ARGS_BUILD_HYPEROPT, parser=build_hyperopt_cmd)
|
self._build_args(optionlist=ARGS_BUILD_HYPEROPT, parser=build_hyperopt_cmd)
|
||||||
|
|
||||||
# add build-hyperopt subcommand
|
# add custom-hyperopt subcommand
|
||||||
build_custom_hyperopt_cmd = subparsers.add_parser('build-hyperopt',
|
build_custom_hyperopt_cmd = subparsers.add_parser('custom-hyperopt',
|
||||||
help="Build a custom hyperopt")
|
help="Build a custom hyperopt")
|
||||||
build_custom_hyperopt_cmd.set_defaults(func=start_build_hyperopt)
|
build_custom_hyperopt_cmd.set_defaults(func=start_custom_hyperopt)
|
||||||
self._build_args(optionlist=ARGS_BUILD_CUSTOM_HYPEROPT, parser=build_custom_hyperopt_cmd)
|
self._build_args(optionlist=ARGS_BUILD_CUSTOM_HYPEROPT, parser=build_custom_hyperopt_cmd)
|
||||||
|
|
||||||
|
# add extract-strategy subcommand
|
||||||
|
extract_strategy_cmd = subparsers.add_parser('extract-strategy',
|
||||||
|
help="Extract data dictionaries for custom-hyperopt from strategy")
|
||||||
|
extract_strategy_cmd.set_defaults(func=start_extract_strategy)
|
||||||
|
self._build_args(optionlist=ARGS_EXTRACT_STRATEGY, parser=extract_strategy_cmd)
|
||||||
|
|
||||||
|
# add build-hyperopt subcommand
|
||||||
|
build_extracted_hyperopt_cmd = subparsers.add_parser('build-hyperopt',
|
||||||
|
help="Create a hyperopt for a strategy")
|
||||||
|
build_extracted_hyperopt_cmd.set_defaults(func=start_build_hyperopt)
|
||||||
|
self._build_args(optionlist=ARGS_BUILD_BUILD_HYPEROPT, parser=build_extracted_hyperopt_cmd)
|
||||||
|
|
||||||
# add new-strategy subcommand
|
# add new-strategy subcommand
|
||||||
build_strategy_cmd = subparsers.add_parser('new-strategy',
|
build_strategy_cmd = subparsers.add_parser('new-strategy',
|
||||||
help="Create new strategy")
|
help="Create new strategy")
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
import ast
|
import ast
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
from freqtrade.constants import USERPATH_HYPEROPTS
|
from freqtrade.constants import (USERPATH_HYPEROPTS,
|
||||||
|
USERPATH_STRATEGIES)
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.state import RunMode
|
from freqtrade.state import RunMode
|
||||||
from freqtrade.configuration import setup_utils_configuration
|
from freqtrade.configuration import setup_utils_configuration
|
||||||
@ -11,20 +12,139 @@ from freqtrade.misc import render_template
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
'''
|
|
||||||
TODO
|
|
||||||
-make the code below more dynamic with a large list of indicators and aims
|
|
||||||
-buy_space integer values variation based on aim(later deep learning)
|
|
||||||
-add --mode , see notes
|
|
||||||
-when making the strategy reading tool, make sure that the populate indicators gets copied to here
|
|
||||||
'''
|
|
||||||
|
|
||||||
POSSIBLE_GUARDS = ["rsi", "mfi", "fastd"]
|
# ---------------------------------------------------extract-strategy------------------------------------------------------
|
||||||
POSSIBLE_TRIGGERS = ["bb_lowerband", "bb_upperband"]
|
|
||||||
POSSIBLE_VALUES = {"above": ">", "below": "<"}
|
def get_indicator_info(file: List, indicators: Dict) -> None:
|
||||||
|
"""
|
||||||
|
Get all necessary information to build a custom hyperopt space using
|
||||||
|
the file and a dictionary filled with the indicators and their corropsonding line numbers.
|
||||||
|
"""
|
||||||
|
info_list = []
|
||||||
|
for indicator in indicators:
|
||||||
|
indicator_info = []
|
||||||
|
|
||||||
|
# find the corrosponding aim
|
||||||
|
for position, line in enumerate(file):
|
||||||
|
if position == indicators[indicator]:
|
||||||
|
# use split twice to remove the context around the indicator
|
||||||
|
back_of_line = line.split(f"(dataframe['{indicator}'] ", 1)[1]
|
||||||
|
aim = back_of_line.split()[0]
|
||||||
|
|
||||||
|
# add the indicator and aim to the info
|
||||||
|
indicator_info.append(indicator)
|
||||||
|
indicator_info.append(aim)
|
||||||
|
|
||||||
|
# check if first character after aim is a d in which case the indicator is a trigger
|
||||||
|
if back_of_line.split()[1][0] == "d":
|
||||||
|
indicator_info.append("trigger")
|
||||||
|
|
||||||
|
# add the second indicator of the guard to the info list
|
||||||
|
back_of_line = back_of_line.split("dataframe['")[1]
|
||||||
|
second_indicator = back_of_line.split("'])")[0]
|
||||||
|
indicator_info.append(second_indicator)
|
||||||
|
|
||||||
|
# elif indicator[0:3] == "CDL":
|
||||||
|
# indicator_info.append("guard")
|
||||||
|
|
||||||
|
# else it is a regular guard
|
||||||
|
else:
|
||||||
|
indicator_info.append("guard")
|
||||||
|
|
||||||
|
value = back_of_line.split()[1]
|
||||||
|
value = value[:-1]
|
||||||
|
value = float(value)
|
||||||
|
|
||||||
|
indicator_info.append(value)
|
||||||
|
info_list.append(indicator_info)
|
||||||
|
|
||||||
|
return info_list
|
||||||
|
|
||||||
|
|
||||||
def build_hyperopt_buyelements(buy_indicators: Dict[str, str]):
|
def extract_lists(strategypath: Path) -> None:
|
||||||
|
"""
|
||||||
|
Get the indicators, their aims and the stoploss and format them into lists
|
||||||
|
"""
|
||||||
|
|
||||||
|
# store the file in a list for reference
|
||||||
|
stored_file = []
|
||||||
|
with open(strategypath) as file:
|
||||||
|
for line in file:
|
||||||
|
stored_file.append(line)
|
||||||
|
|
||||||
|
# find the start and end of buy trend
|
||||||
|
for position, line in enumerate(stored_file):
|
||||||
|
if "populate_buy_trend(" in line:
|
||||||
|
start_buy_number = position
|
||||||
|
elif "populate_sell_trend(" in line:
|
||||||
|
end_buy_number = position
|
||||||
|
|
||||||
|
# list the numbers between the start and end of buy trend
|
||||||
|
buy_lines = []
|
||||||
|
for i in range(start_buy_number, end_buy_number):
|
||||||
|
buy_lines.append(i)
|
||||||
|
|
||||||
|
# populate the indicators dictionaries with indicators attached to the line they are on
|
||||||
|
buyindicators = {}
|
||||||
|
sellindicators = {}
|
||||||
|
|
||||||
|
for position, line in enumerate(stored_file):
|
||||||
|
# check the lines in buy trend for indicator and add them
|
||||||
|
if position in buy_lines and "(dataframe['" in line:
|
||||||
|
# use split twice to remove the context around the indicator
|
||||||
|
back_of_line = line.split("(dataframe['", 1)[1]
|
||||||
|
buyindicator = back_of_line.split("'] ", 1)[0]
|
||||||
|
buyindicators[buyindicator] = position
|
||||||
|
|
||||||
|
# check the lines in sell trend for indicator and add them
|
||||||
|
elif position > end_buy_number and "(dataframe['" in line:
|
||||||
|
# use split twice to remove the context around the indicator
|
||||||
|
back_of_line = line.split("(dataframe['", 1)[1]
|
||||||
|
sellindicator = back_of_line.split("'] ", 1)[0]
|
||||||
|
sellindicators[sellindicator] = position
|
||||||
|
|
||||||
|
# build the final lists
|
||||||
|
buy_info_list = get_indicator_info(stored_file, buyindicators)
|
||||||
|
sell_info_list = get_indicator_info(stored_file, sellindicators)
|
||||||
|
|
||||||
|
# put the final lists into a tuple
|
||||||
|
final_lists = (buy_info_list, sell_info_list)
|
||||||
|
|
||||||
|
return final_lists
|
||||||
|
|
||||||
|
|
||||||
|
def start_extract_strategy(args: Dict) -> None:
|
||||||
|
"""
|
||||||
|
Check if the right subcommands where passed and start extracting the strategy data
|
||||||
|
"""
|
||||||
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||||
|
|
||||||
|
# check if all required options are filled in
|
||||||
|
if not 'strategy' in args or not args['strategy']:
|
||||||
|
raise OperationalException("`extract-strategy` requires --strategy to be set.")
|
||||||
|
else:
|
||||||
|
# if the name is not specified use (strategy)_extract
|
||||||
|
if not 'extract_name' in args or not args['extract_name']:
|
||||||
|
args['extract_name'] = args['strategy'] + "_extract"
|
||||||
|
|
||||||
|
new_path = config['user_data_dir'] / USERPATH_STRATEGIES / (args['extract_name'] + '.txt')
|
||||||
|
if new_path.exists():
|
||||||
|
raise OperationalException(f"`{new_path}` already exists. "
|
||||||
|
"Please choose another name.")
|
||||||
|
# the path of the chosen strategy
|
||||||
|
strategy_path = config['user_data_dir'] / USERPATH_STRATEGIES / (args['strategy'] + '.py')
|
||||||
|
|
||||||
|
# extract the buy and sell indicators as dicts
|
||||||
|
extracted_lists = str(extract_lists(strategy_path))
|
||||||
|
|
||||||
|
# save the dicts in a file
|
||||||
|
logger.info(f"Writing custom hyperopt to `{new_path}`.")
|
||||||
|
new_path.write_text(extracted_lists)
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------custom-hyperopt------------------------------------------------------
|
||||||
|
|
||||||
|
def custom_hyperopt_buyelements(buy_indicators: List):
|
||||||
"""
|
"""
|
||||||
Build the arguments with the placefillers for the buygenerator
|
Build the arguments with the placefillers for the buygenerator
|
||||||
"""
|
"""
|
||||||
@ -32,39 +152,44 @@ def build_hyperopt_buyelements(buy_indicators: Dict[str, str]):
|
|||||||
buy_triggers = ""
|
buy_triggers = ""
|
||||||
buy_space = ""
|
buy_space = ""
|
||||||
|
|
||||||
for indicator in buy_indicators:
|
for indicator_info in buy_indicators:
|
||||||
# Error handling
|
indicator = indicator_info[0]
|
||||||
if not indicator in POSSIBLE_GUARDS and not indicator in POSSIBLE_TRIGGERS:
|
aim = indicator_info[1]
|
||||||
raise OperationalException(
|
usage = indicator_info[2]
|
||||||
f"`{indicator}` is not part of the available indicators. The current options are {POSSIBLE_GUARDS + POSSIBLE_TRIGGERS}.")
|
|
||||||
elif not buy_indicators[indicator] in POSSIBLE_VALUES:
|
|
||||||
raise OperationalException(
|
|
||||||
f"`{buy_indicators[indicator]}` is not part of the available indicator options. The current options are {POSSIBLE_VALUES}.")
|
|
||||||
# If the indicator is a guard
|
# If the indicator is a guard
|
||||||
elif indicator in POSSIBLE_GUARDS:
|
if usage == "guard":
|
||||||
# get the symbol corrosponding to the value
|
value = indicator_info[3]
|
||||||
aim = POSSIBLE_VALUES[buy_indicators[indicator]]
|
|
||||||
|
if value >= -1.0 and value <= 1.0:
|
||||||
|
lower_bound = value - 0.3
|
||||||
|
upper_bound = value + 0.3
|
||||||
|
else:
|
||||||
|
lower_bound = value - 30.0
|
||||||
|
upper_bound = value + 30.0
|
||||||
|
|
||||||
# add the guard to its argument
|
# add the guard to its argument
|
||||||
buy_guards += f"if '{indicator}-enabled' in params and params['{indicator}-enabled']: conditions.append(dataframe['{indicator}'] {aim} params['{indicator}-value'])"
|
buy_guards += f"if params.get('{indicator}-enabled'):\n conditions.append(dataframe['{indicator}'] {aim} params['{indicator}-value'])\n"
|
||||||
|
|
||||||
# add the space to its argument
|
# add the space to its argument
|
||||||
buy_space += f"Integer(10, 90, name='{indicator}-value'), Categorical([True, False], name='{indicator}-enabled'),"
|
buy_space += f"Integer({lower_bound}, {upper_bound}, name='{indicator}-value'),\nCategorical([True, False], name='{indicator}-enabled'),\n"
|
||||||
# If the indicator is a trigger
|
|
||||||
elif indicator in POSSIBLE_TRIGGERS:
|
|
||||||
# get the symbol corrosponding to the value
|
|
||||||
aim = POSSIBLE_VALUES[buy_indicators[indicator]]
|
|
||||||
|
|
||||||
|
# If the indicator is a trigger
|
||||||
|
elif usage == "trigger":
|
||||||
|
secondindicator = indicator_info[3]
|
||||||
# add the trigger to its argument
|
# add the trigger to its argument
|
||||||
buy_triggers += f"if params['trigger'] == '{indicator}': conditions.append(dataframe['{indicator}'] {aim} dataframe['close'])"
|
buy_triggers += f"if params['trigger'] == '{indicator}':\n conditions.append(dataframe['{indicator}'] {aim} dataframe['{secondindicator}'])\n"
|
||||||
|
|
||||||
# Final line of indicator space makes all triggers
|
# Final line of indicator space makes all triggers
|
||||||
|
|
||||||
buy_space += "Categorical(["
|
buy_space += "Categorical(["
|
||||||
|
|
||||||
# adding all triggers to the list
|
# adding all triggers to the list
|
||||||
for indicator in buy_indicators:
|
for indicator_info in buy_indicators:
|
||||||
if indicator in POSSIBLE_TRIGGERS:
|
indicator = indicator_info[0]
|
||||||
|
usage = indicator_info[2]
|
||||||
|
|
||||||
|
if usage == "trigger":
|
||||||
buy_space += f"'{indicator}', "
|
buy_space += f"'{indicator}', "
|
||||||
|
|
||||||
# Deleting the last ", "
|
# Deleting the last ", "
|
||||||
@ -74,7 +199,7 @@ def build_hyperopt_buyelements(buy_indicators: Dict[str, str]):
|
|||||||
return {"buy_guards": buy_guards, "buy_triggers": buy_triggers, "buy_space": buy_space}
|
return {"buy_guards": buy_guards, "buy_triggers": buy_triggers, "buy_space": buy_space}
|
||||||
|
|
||||||
|
|
||||||
def build_hyperopt_sellelements(sell_indicators: Dict[str, str]):
|
def custom_hyperopt_sellelements(sell_indicators: Dict[str, str]):
|
||||||
"""
|
"""
|
||||||
Build the arguments with the placefillers for the sellgenerator
|
Build the arguments with the placefillers for the sellgenerator
|
||||||
"""
|
"""
|
||||||
@ -82,44 +207,50 @@ def build_hyperopt_sellelements(sell_indicators: Dict[str, str]):
|
|||||||
sell_triggers = ""
|
sell_triggers = ""
|
||||||
sell_space = ""
|
sell_space = ""
|
||||||
|
|
||||||
for indicator in sell_indicators:
|
for indicator_info in sell_indicators:
|
||||||
# Error handling
|
indicator = indicator_info[0]
|
||||||
if not indicator in POSSIBLE_GUARDS and not indicator in POSSIBLE_TRIGGERS:
|
aim = indicator_info[1]
|
||||||
raise OperationalException(
|
usage = indicator_info[2]
|
||||||
f"`{indicator}` is not part of the available indicators. The current options are {POSSIBLE_GUARDS + POSSIBLE_TRIGGERS}.")
|
|
||||||
elif not sell_indicators[indicator] in POSSIBLE_VALUES:
|
# If the indicator is a guard
|
||||||
raise OperationalException(
|
if usage == "guard":
|
||||||
f"`{sell_indicators[indicator]}` is not part of the available indicator options. The current options are {POSSIBLE_VALUES}.")
|
value = indicator_info[3]
|
||||||
# If indicator is a guard
|
|
||||||
elif indicator in POSSIBLE_GUARDS:
|
if value >= -1 and value <= 1:
|
||||||
# get the symbol corrosponding to the value
|
lower_bound = value - 0.3
|
||||||
aim = POSSIBLE_VALUES[sell_indicators[indicator]]
|
upper_bound = value + 0.3
|
||||||
|
else:
|
||||||
|
lower_bound = value - 30
|
||||||
|
upper_bound = value + 30
|
||||||
|
|
||||||
# add the guard to its argument
|
# add the guard to its argument
|
||||||
sell_guards += f"if '{indicator}-enabled' in params and params['sell-{indicator}-enabled']: conditions.append(dataframe['{indicator}'] {aim} params['sell-{indicator}-value'])"
|
sell_guards += f"if params.get('sell-{indicator}-enabled'):\n conditions.append(dataframe['{indicator}'] {aim} params['sell-{indicator}-value'])\n"
|
||||||
|
|
||||||
# add the space to its argument
|
# add the space to its argument
|
||||||
sell_space += f"Integer(10, 90, name='sell-{indicator}-value'), Categorical([True, False], name='sell-{indicator}-enabled'),"
|
sell_space += f"Integer({lower_bound}, {upper_bound}, name='sell-{indicator}-value'),\nCategorical([True, False], name='sell-{indicator}-enabled'),\n"
|
||||||
|
|
||||||
# If the indicator is a trigger
|
# If the indicator is a trigger
|
||||||
elif indicator in POSSIBLE_TRIGGERS:
|
elif usage == "trigger":
|
||||||
# get the symbol corrosponding to the value
|
secondindicator = indicator_info[3]
|
||||||
aim = POSSIBLE_VALUES[sell_indicators[indicator]]
|
|
||||||
|
|
||||||
# add the trigger to its argument
|
# add the trigger to its argument
|
||||||
sell_triggers += f"if params['sell-trigger'] == 'sell-{indicator}': conditions.append(dataframe['{indicator}'] {aim} dataframe['close'])"
|
sell_triggers += f"if params['sell-trigger'] == 'sell-{indicator}':\n conditions.append(dataframe['{indicator}'] {aim} dataframe['{secondindicator}'])\n"
|
||||||
|
|
||||||
# Final line of indicator space makes all triggers
|
# Final line of indicator space makes all triggers
|
||||||
|
|
||||||
sell_space += "Categorical(["
|
sell_space += "Categorical(["
|
||||||
|
|
||||||
# Adding all triggers to the list
|
# adding all triggers to the list
|
||||||
for indicator in sell_indicators:
|
for indicator_info in sell_indicators:
|
||||||
if indicator in POSSIBLE_TRIGGERS:
|
indicator = indicator_info[0]
|
||||||
|
usage = indicator_info[2]
|
||||||
|
|
||||||
|
if usage == "trigger":
|
||||||
sell_space += f"'sell-{indicator}', "
|
sell_space += f"'sell-{indicator}', "
|
||||||
|
|
||||||
# Deleting the last ", "
|
# Deleting the last ", "
|
||||||
sell_space = sell_space[:-2]
|
sell_space = sell_space[:-2]
|
||||||
sell_space += "], name='trigger')"
|
sell_space += "], name='sell-trigger')"
|
||||||
|
|
||||||
return {"sell_guards": sell_guards, "sell_triggers": sell_triggers, "sell_space": sell_space}
|
return {"sell_guards": sell_guards, "sell_triggers": sell_triggers, "sell_space": sell_space}
|
||||||
|
|
||||||
@ -130,11 +261,11 @@ def deploy_custom_hyperopt(hyperopt_name: str, hyperopt_path: Path, buy_indicato
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Build the arguments for the buy and sell generators
|
# Build the arguments for the buy and sell generators
|
||||||
buy_args = build_hyperopt_buyelements(buy_indicators)
|
buy_args = custom_hyperopt_buyelements(buy_indicators)
|
||||||
sell_args = build_hyperopt_sellelements(sell_indicators)
|
sell_args = custom_hyperopt_sellelements(sell_indicators)
|
||||||
|
|
||||||
# Build the final template
|
# Build the final template
|
||||||
strategy_text = render_template(templatefile='base_hyperopt.py.j2',
|
strategy_text = render_template(templatefile='base_custom_hyperopt.py.j2',
|
||||||
arguments={"hyperopt": hyperopt_name,
|
arguments={"hyperopt": hyperopt_name,
|
||||||
"buy_guards": buy_args["buy_guards"],
|
"buy_guards": buy_args["buy_guards"],
|
||||||
"buy_triggers": buy_args["buy_triggers"],
|
"buy_triggers": buy_args["buy_triggers"],
|
||||||
@ -148,19 +279,20 @@ def deploy_custom_hyperopt(hyperopt_name: str, hyperopt_path: Path, buy_indicato
|
|||||||
hyperopt_path.write_text(strategy_text)
|
hyperopt_path.write_text(strategy_text)
|
||||||
|
|
||||||
|
|
||||||
def start_build_hyperopt(args: Dict[str, Any]) -> None:
|
def start_custom_hyperopt(args: Dict[str, Any]) -> None:
|
||||||
"""
|
"""
|
||||||
Check if the right subcommands where passed and start building the hyperopt
|
Check if the right subcommands where passed and start building the hyperopt
|
||||||
"""
|
"""
|
||||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||||
|
|
||||||
# check what the name of the hyperopt should be
|
# check what the name of the hyperopt should be
|
||||||
|
|
||||||
if not 'hyperopt' in args or not args['hyperopt']:
|
if not 'hyperopt' in args or not args['hyperopt']:
|
||||||
raise OperationalException("`build-hyperopt` requires --hyperopt to be set.")
|
raise OperationalException("`custom-hyperopt` requires --hyperopt to be set.")
|
||||||
elif not 'buy_indicators' in args or not args['buy_indicators']:
|
elif not 'buy_indicators' in args or not args['buy_indicators']:
|
||||||
raise OperationalException("`build-hyperopt` requires --buy-indicators to be set.")
|
raise OperationalException("`custom-hyperopt` requires --buy-indicators to be set.")
|
||||||
elif not 'sell_indicators' in args or not args['sell_indicators']:
|
elif not 'sell_indicators' in args or not args['sell_indicators']:
|
||||||
raise OperationalException("`build-hyperopt` requires --sell-indicators to be set.")
|
raise OperationalException("`custom-hyperopt` requires --sell-indicators to be set.")
|
||||||
else:
|
else:
|
||||||
if args['hyperopt'] == 'DefaultHyperopt':
|
if args['hyperopt'] == 'DefaultHyperopt':
|
||||||
raise OperationalException("DefaultHyperopt is not allowed as name.")
|
raise OperationalException("DefaultHyperopt is not allowed as name.")
|
||||||
@ -175,3 +307,40 @@ def start_build_hyperopt(args: Dict[str, Any]) -> None:
|
|||||||
|
|
||||||
deploy_custom_hyperopt(args['hyperopt'], new_path,
|
deploy_custom_hyperopt(args['hyperopt'], new_path,
|
||||||
buy_indicators, sell_indicators)
|
buy_indicators, sell_indicators)
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------build-hyperopt------------------------------------------------------
|
||||||
|
|
||||||
|
def start_build_hyperopt(args: Dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
Check if the right subcommands where passed and start building the hyperopt
|
||||||
|
"""
|
||||||
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||||
|
|
||||||
|
# strategy and hyperopt need to be defined
|
||||||
|
if not 'strategy' in args or not args['strategy']:
|
||||||
|
raise OperationalException("`build-hyperopt` requires --strategy to be set.")
|
||||||
|
if not 'hyperopt' in args or not args['hyperopt']:
|
||||||
|
args['hyperopt'] = args['strategy'] + "opt"
|
||||||
|
else:
|
||||||
|
if args['hyperopt'] == 'DefaultHyperopt':
|
||||||
|
raise OperationalException("DefaultHyperopt is not allowed as name.")
|
||||||
|
|
||||||
|
# the path of the chosen strategy
|
||||||
|
strategy_path = config['user_data_dir'] / USERPATH_STRATEGIES / (args['strategy'] + '.py')
|
||||||
|
|
||||||
|
# the path where the hyperopt should be written
|
||||||
|
new_path = config['user_data_dir'] / USERPATH_HYPEROPTS / (args['hyperopt'] + '.py')
|
||||||
|
if new_path.exists():
|
||||||
|
raise OperationalException(f"`{new_path}` already exists. "
|
||||||
|
"Please choose another Hyperopt Name.")
|
||||||
|
|
||||||
|
# extract the buy and sell indicators as dicts
|
||||||
|
extracted_lists = extract_lists(strategy_path)
|
||||||
|
|
||||||
|
buy_indicators = extracted_lists[0]
|
||||||
|
sell_indicators = extracted_lists[1]
|
||||||
|
|
||||||
|
# use the dicts to write the hyperopt
|
||||||
|
deploy_custom_hyperopt(args['hyperopt'], new_path,
|
||||||
|
buy_indicators, sell_indicators)
|
||||||
|
@ -279,14 +279,21 @@ AVAILABLE_CLI_OPTIONS = {
|
|||||||
"buy_indicators": Arg(
|
"buy_indicators": Arg(
|
||||||
'-b', '--buy-indicators',
|
'-b', '--buy-indicators',
|
||||||
help='Specify the buy indicators the hyperopt should build. '
|
help='Specify the buy indicators the hyperopt should build. '
|
||||||
'Example: --buy-indicators `{"rsi":"above","bb_lowerband":"below"}`',
|
'Example: --buy-indicators `[["rsi","<","trigger",30.0],["bb_lowerband",">","guard","close"]]`'
|
||||||
metavar='DICT',
|
'Check the documentation for specific requirements for the lists.',
|
||||||
|
metavar='LIST',
|
||||||
),
|
),
|
||||||
"sell_indicators": Arg(
|
"sell_indicators": Arg(
|
||||||
'-s', '--sell-indicators',
|
'-s', '--sell-indicators',
|
||||||
help='Specify the sell indicators the hyperopt should build. '
|
help='Specify the sell indicators the hyperopt should build. '
|
||||||
'Example: --sell-indicators `{"rsi":"above","bb_lowerband":"below"}`',
|
'Example: --sell-indicators [["rsi",">","trigger",70.0],["bb_lowerband","<","guard","close"]]'
|
||||||
metavar='DICT',
|
'Check the documentation for specific requirements for the lists.',
|
||||||
|
metavar='LIST',
|
||||||
|
),
|
||||||
|
"extract_name": Arg(
|
||||||
|
'--extract-name',
|
||||||
|
help='Specify the name of the file to which the data should be extracted. ',
|
||||||
|
metavar='FILENAME',
|
||||||
),
|
),
|
||||||
# List exchanges
|
# List exchanges
|
||||||
"print_one_column": Arg(
|
"print_one_column": Arg(
|
||||||
|
@ -45,6 +45,16 @@ USERPATH_NOTEBOOKS = 'notebooks'
|
|||||||
|
|
||||||
TELEGRAM_SETTING_OPTIONS = ['on', 'off', 'silent']
|
TELEGRAM_SETTING_OPTIONS = ['on', 'off', 'silent']
|
||||||
|
|
||||||
|
|
||||||
|
# Define decimals per coin for outputs
|
||||||
|
# Only used for outputs.
|
||||||
|
DECIMAL_PER_COIN_FALLBACK = 3 # Should be low to avoid listing all possible FIAT's
|
||||||
|
DECIMALS_PER_COIN = {
|
||||||
|
'BTC': 8,
|
||||||
|
'ETH': 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# Soure files with destination directories within user-directory
|
# Soure files with destination directories within user-directory
|
||||||
USER_DATA_FILES = {
|
USER_DATA_FILES = {
|
||||||
'sample_strategy.py': USERPATH_STRATEGIES,
|
'sample_strategy.py': USERPATH_STRATEGIES,
|
||||||
|
@ -383,3 +383,21 @@ def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_date'
|
|||||||
high_date = profit_results.loc[max_drawdown_df.iloc[:idxmin]['high_value'].idxmax(), date_col]
|
high_date = profit_results.loc[max_drawdown_df.iloc[:idxmin]['high_value'].idxmax(), date_col]
|
||||||
low_date = profit_results.loc[idxmin, date_col]
|
low_date = profit_results.loc[idxmin, date_col]
|
||||||
return abs(min(max_drawdown_df['drawdown'])), high_date, low_date
|
return abs(min(max_drawdown_df['drawdown'])), high_date, low_date
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_csum(trades: pd.DataFrame) -> Tuple[float, float]:
|
||||||
|
"""
|
||||||
|
Calculate min/max cumsum of trades, to show if the wallet/stake amount ratio is sane
|
||||||
|
:param trades: DataFrame containing trades (requires columns close_date and profit_percent)
|
||||||
|
:return: Tuple (float, float) with cumsum of profit_abs
|
||||||
|
:raise: ValueError if trade-dataframe was found empty.
|
||||||
|
"""
|
||||||
|
if len(trades) == 0:
|
||||||
|
raise ValueError("Trade dataframe empty.")
|
||||||
|
|
||||||
|
csum_df = pd.DataFrame()
|
||||||
|
csum_df['sum'] = trades['profit_abs'].cumsum()
|
||||||
|
csum_min = csum_df['sum'].min()
|
||||||
|
csum_max = csum_df['sum'].max()
|
||||||
|
|
||||||
|
return csum_min, csum_max
|
||||||
|
@ -104,6 +104,7 @@ class Edge:
|
|||||||
exchange=self.exchange,
|
exchange=self.exchange,
|
||||||
timeframe=self.strategy.timeframe,
|
timeframe=self.strategy.timeframe,
|
||||||
timerange=self._timerange,
|
timerange=self._timerange,
|
||||||
|
data_format=self.config.get('dataformat_ohlcv', 'json'),
|
||||||
)
|
)
|
||||||
|
|
||||||
data = load_data(
|
data = load_data(
|
||||||
@ -159,7 +160,8 @@ class Edge:
|
|||||||
available_capital = (total_capital + capital_in_trade) * self._capital_ratio
|
available_capital = (total_capital + capital_in_trade) * self._capital_ratio
|
||||||
allowed_capital_at_risk = available_capital * self._allowed_risk
|
allowed_capital_at_risk = available_capital * self._allowed_risk
|
||||||
max_position_size = abs(allowed_capital_at_risk / stoploss)
|
max_position_size = abs(allowed_capital_at_risk / stoploss)
|
||||||
position_size = min(max_position_size, free_capital)
|
# Position size must be below available capital.
|
||||||
|
position_size = min(min(max_position_size, free_capital), available_capital)
|
||||||
if pair in self._cached_pairs:
|
if pair in self._cached_pairs:
|
||||||
logger.info(
|
logger.info(
|
||||||
'winrate: %s, expectancy: %s, position size: %s, pair: %s,'
|
'winrate: %s, expectancy: %s, position size: %s, pair: %s,'
|
||||||
|
@ -19,5 +19,11 @@ class Bittrex(Exchange):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: Dict = {
|
||||||
|
"ohlcv_candle_limit_per_timeframe": {
|
||||||
|
'1m': 1440,
|
||||||
|
'5m': 288,
|
||||||
|
'1h': 744,
|
||||||
|
'1d': 365,
|
||||||
|
},
|
||||||
"l2_limit_range": [1, 25, 500],
|
"l2_limit_range": [1, 25, 500],
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
Cryptocurrency Exchanges support
|
Cryptocurrency Exchanges support
|
||||||
"""
|
"""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import http
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
@ -34,6 +35,12 @@ CcxtModuleType = Any
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# Workaround for adding samesite support to pre 3.8 python
|
||||||
|
# Only applies to python3.7, and only on certain exchanges (kraken)
|
||||||
|
# Replicates the fix from starlette (which is actually causing this problem)
|
||||||
|
http.cookies.Morsel._reserved["samesite"] = "SameSite" # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class Exchange:
|
class Exchange:
|
||||||
|
|
||||||
_config: Dict = {}
|
_config: Dict = {}
|
||||||
@ -94,7 +101,6 @@ class Exchange:
|
|||||||
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
||||||
|
|
||||||
# Assign this directly for easy access
|
# Assign this directly for easy access
|
||||||
self._ohlcv_candle_limit = self._ft_has['ohlcv_candle_limit']
|
|
||||||
self._ohlcv_partial_candle = self._ft_has['ohlcv_partial_candle']
|
self._ohlcv_partial_candle = self._ft_has['ohlcv_partial_candle']
|
||||||
|
|
||||||
self._trades_pagination = self._ft_has['trades_pagination']
|
self._trades_pagination = self._ft_has['trades_pagination']
|
||||||
@ -130,7 +136,8 @@ class Exchange:
|
|||||||
self.validate_pairs(config['exchange']['pair_whitelist'])
|
self.validate_pairs(config['exchange']['pair_whitelist'])
|
||||||
self.validate_ordertypes(config.get('order_types', {}))
|
self.validate_ordertypes(config.get('order_types', {}))
|
||||||
self.validate_order_time_in_force(config.get('order_time_in_force', {}))
|
self.validate_order_time_in_force(config.get('order_time_in_force', {}))
|
||||||
self.validate_required_startup_candles(config.get('startup_candle_count', 0))
|
self.validate_required_startup_candles(config.get('startup_candle_count', 0),
|
||||||
|
config.get('timeframe', ''))
|
||||||
|
|
||||||
# Converts the interval provided in minutes in config to seconds
|
# Converts the interval provided in minutes in config to seconds
|
||||||
self.markets_refresh_interval: int = exchange_config.get(
|
self.markets_refresh_interval: int = exchange_config.get(
|
||||||
@ -191,11 +198,6 @@ class Exchange:
|
|||||||
def timeframes(self) -> List[str]:
|
def timeframes(self) -> List[str]:
|
||||||
return list((self._api.timeframes or {}).keys())
|
return list((self._api.timeframes or {}).keys())
|
||||||
|
|
||||||
@property
|
|
||||||
def ohlcv_candle_limit(self) -> int:
|
|
||||||
"""exchange ohlcv candle limit"""
|
|
||||||
return int(self._ohlcv_candle_limit)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def markets(self) -> Dict:
|
def markets(self) -> Dict:
|
||||||
"""exchange ccxt markets"""
|
"""exchange ccxt markets"""
|
||||||
@ -209,6 +211,17 @@ class Exchange:
|
|||||||
"""exchange ccxt precisionMode"""
|
"""exchange ccxt precisionMode"""
|
||||||
return self._api.precisionMode
|
return self._api.precisionMode
|
||||||
|
|
||||||
|
def ohlcv_candle_limit(self, timeframe: str) -> int:
|
||||||
|
"""
|
||||||
|
Exchange ohlcv candle limit
|
||||||
|
Uses ohlcv_candle_limit_per_timeframe if the exchange has different limts
|
||||||
|
per timeframe (e.g. bittrex), otherwise falls back to ohlcv_candle_limit
|
||||||
|
:param timeframe: Timeframe to check
|
||||||
|
:return: Candle limit as integer
|
||||||
|
"""
|
||||||
|
return int(self._ft_has.get('ohlcv_candle_limit_per_timeframe', {}).get(
|
||||||
|
timeframe, self._ft_has.get('ohlcv_candle_limit')))
|
||||||
|
|
||||||
def get_markets(self, base_currencies: List[str] = None, quote_currencies: List[str] = None,
|
def get_markets(self, base_currencies: List[str] = None, quote_currencies: List[str] = None,
|
||||||
pairs_only: bool = False, active_only: bool = False) -> Dict[str, Any]:
|
pairs_only: bool = False, active_only: bool = False) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
@ -421,15 +434,16 @@ class Exchange:
|
|||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
f'Time in force policies are not supported for {self.name} yet.')
|
f'Time in force policies are not supported for {self.name} yet.')
|
||||||
|
|
||||||
def validate_required_startup_candles(self, startup_candles: int) -> None:
|
def validate_required_startup_candles(self, startup_candles: int, timeframe: str) -> None:
|
||||||
"""
|
"""
|
||||||
Checks if required startup_candles is more than ohlcv_candle_limit.
|
Checks if required startup_candles is more than ohlcv_candle_limit().
|
||||||
Requires a grace-period of 5 candles - so a startup-period up to 494 is allowed by default.
|
Requires a grace-period of 5 candles - so a startup-period up to 494 is allowed by default.
|
||||||
"""
|
"""
|
||||||
if startup_candles + 5 > self._ft_has['ohlcv_candle_limit']:
|
candle_limit = self.ohlcv_candle_limit(timeframe)
|
||||||
|
if startup_candles + 5 > candle_limit:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
f"This strategy requires {startup_candles} candles to start. "
|
f"This strategy requires {startup_candles} candles to start. "
|
||||||
f"{self.name} only provides {self._ft_has['ohlcv_candle_limit']}.")
|
f"{self.name} only provides {candle_limit} for {timeframe}.")
|
||||||
|
|
||||||
def exchange_has(self, endpoint: str) -> bool:
|
def exchange_has(self, endpoint: str) -> bool:
|
||||||
"""
|
"""
|
||||||
@ -714,7 +728,7 @@ class Exchange:
|
|||||||
"""
|
"""
|
||||||
Get candle history using asyncio and returns the list of candles.
|
Get candle history using asyncio and returns the list of candles.
|
||||||
Handles all async work for this.
|
Handles all async work for this.
|
||||||
Async over one pair, assuming we get `self._ohlcv_candle_limit` candles per call.
|
Async over one pair, assuming we get `self.ohlcv_candle_limit()` candles per call.
|
||||||
:param pair: Pair to download
|
:param pair: Pair to download
|
||||||
:param timeframe: Timeframe to get data for
|
:param timeframe: Timeframe to get data for
|
||||||
:param since_ms: Timestamp in milliseconds to get history from
|
:param since_ms: Timestamp in milliseconds to get history from
|
||||||
@ -744,7 +758,7 @@ class Exchange:
|
|||||||
Download historic ohlcv
|
Download historic ohlcv
|
||||||
"""
|
"""
|
||||||
|
|
||||||
one_call = timeframe_to_msecs(timeframe) * self._ohlcv_candle_limit
|
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(timeframe)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"one_call: %s msecs (%s)",
|
"one_call: %s msecs (%s)",
|
||||||
one_call,
|
one_call,
|
||||||
@ -846,7 +860,7 @@ class Exchange:
|
|||||||
|
|
||||||
data = await self._api_async.fetch_ohlcv(pair, timeframe=timeframe,
|
data = await self._api_async.fetch_ohlcv(pair, timeframe=timeframe,
|
||||||
since=since_ms,
|
since=since_ms,
|
||||||
limit=self._ohlcv_candle_limit)
|
limit=self.ohlcv_candle_limit(timeframe))
|
||||||
|
|
||||||
# Some exchanges sort OHLCV in ASC order and others in DESC.
|
# Some exchanges sort OHLCV in ASC order and others in DESC.
|
||||||
# Ex: Bittrex returns the list of OHLCV in ASC order (oldest first, newest last)
|
# Ex: Bittrex returns the list of OHLCV in ASC order (oldest first, newest last)
|
||||||
@ -1019,7 +1033,7 @@ class Exchange:
|
|||||||
"""
|
"""
|
||||||
Get trade history data using asyncio.
|
Get trade history data using asyncio.
|
||||||
Handles all async work and returns the list of candles.
|
Handles all async work and returns the list of candles.
|
||||||
Async over one pair, assuming we get `self._ohlcv_candle_limit` candles per call.
|
Async over one pair, assuming we get `self.ohlcv_candle_limit()` candles per call.
|
||||||
:param pair: Pair to download
|
:param pair: Pair to download
|
||||||
:param since: Timestamp in milliseconds to get history from
|
:param since: Timestamp in milliseconds to get history from
|
||||||
:param until: Timestamp in milliseconds. Defaults to current timestamp if not defined.
|
:param until: Timestamp in milliseconds. Defaults to current timestamp if not defined.
|
||||||
|
@ -179,6 +179,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
# Without this, freqtrade my try to recreate stoploss_on_exchange orders
|
# Without this, freqtrade my try to recreate stoploss_on_exchange orders
|
||||||
# while selling is in process, since telegram messages arrive in an different thread.
|
# while selling is in process, since telegram messages arrive in an different thread.
|
||||||
with self._sell_lock:
|
with self._sell_lock:
|
||||||
|
trades = Trade.get_open_trades()
|
||||||
# First process current opened trades (positions)
|
# First process current opened trades (positions)
|
||||||
self.exit_positions(trades)
|
self.exit_positions(trades)
|
||||||
|
|
||||||
@ -1183,6 +1184,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
trade.orders.append(order_obj)
|
trade.orders.append(order_obj)
|
||||||
|
|
||||||
trade.open_order_id = order['id']
|
trade.open_order_id = order['id']
|
||||||
|
trade.sell_order_status = ''
|
||||||
trade.close_rate_requested = limit
|
trade.close_rate_requested = limit
|
||||||
trade.sell_reason = sell_reason.value
|
trade.sell_reason = sell_reason.value
|
||||||
# In case of market sell orders the order can be closed immediately
|
# In case of market sell orders the order can be closed immediately
|
||||||
|
@ -11,10 +11,35 @@ from typing.io import IO
|
|||||||
|
|
||||||
import rapidjson
|
import rapidjson
|
||||||
|
|
||||||
|
from freqtrade.constants import DECIMAL_PER_COIN_FALLBACK, DECIMALS_PER_COIN
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def decimals_per_coin(coin: str):
|
||||||
|
"""
|
||||||
|
Helper method getting decimal amount for this coin
|
||||||
|
example usage: f".{decimals_per_coin('USD')}f"
|
||||||
|
:param coin: Which coin are we printing the price / value for
|
||||||
|
"""
|
||||||
|
return DECIMALS_PER_COIN.get(coin, DECIMAL_PER_COIN_FALLBACK)
|
||||||
|
|
||||||
|
|
||||||
|
def round_coin_value(value: float, coin: str, show_coin_name=True) -> str:
|
||||||
|
"""
|
||||||
|
Get price value for this coin
|
||||||
|
:param value: Value to be printed
|
||||||
|
:param coin: Which coin are we printing the price / value for
|
||||||
|
:param show_coin_name: Return string in format: "222.22 USDT" or "222.22"
|
||||||
|
:return: Formatted / rounded value (with or without coin name)
|
||||||
|
"""
|
||||||
|
if show_coin_name:
|
||||||
|
return f"{value:.{decimals_per_coin(coin)}f} {coin}"
|
||||||
|
else:
|
||||||
|
return f"{value:.{decimals_per_coin(coin)}f}"
|
||||||
|
|
||||||
|
|
||||||
def shorten_date(_date: str) -> str:
|
def shorten_date(_date: str) -> str:
|
||||||
"""
|
"""
|
||||||
Trim the date so it fits on small screens
|
Trim the date so it fits on small screens
|
||||||
|
@ -546,10 +546,11 @@ class Hyperopt:
|
|||||||
|
|
||||||
)
|
)
|
||||||
return self._get_results_dict(backtesting_results, min_date, max_date,
|
return self._get_results_dict(backtesting_results, min_date, max_date,
|
||||||
params_dict, params_details)
|
params_dict, params_details,
|
||||||
|
processed=processed)
|
||||||
|
|
||||||
def _get_results_dict(self, backtesting_results, min_date, max_date,
|
def _get_results_dict(self, backtesting_results, min_date, max_date,
|
||||||
params_dict, params_details):
|
params_dict, params_details, processed: Dict[str, DataFrame]):
|
||||||
results_metrics = self._calculate_results_metrics(backtesting_results)
|
results_metrics = self._calculate_results_metrics(backtesting_results)
|
||||||
results_explanation = self._format_results_explanation_string(results_metrics)
|
results_explanation = self._format_results_explanation_string(results_metrics)
|
||||||
|
|
||||||
@ -563,7 +564,8 @@ class Hyperopt:
|
|||||||
loss: float = MAX_LOSS
|
loss: float = MAX_LOSS
|
||||||
if trade_count >= self.config['hyperopt_min_trades']:
|
if trade_count >= self.config['hyperopt_min_trades']:
|
||||||
loss = self.calculate_loss(results=backtesting_results, trade_count=trade_count,
|
loss = self.calculate_loss(results=backtesting_results, trade_count=trade_count,
|
||||||
min_date=min_date.datetime, max_date=max_date.datetime)
|
min_date=min_date.datetime, max_date=max_date.datetime,
|
||||||
|
config=self.config, processed=processed)
|
||||||
return {
|
return {
|
||||||
'loss': loss,
|
'loss': loss,
|
||||||
'params_dict': params_dict,
|
'params_dict': params_dict,
|
||||||
|
@ -5,6 +5,7 @@ This module defines the interface for the loss-function for hyperopt
|
|||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
@ -19,7 +20,9 @@ class IHyperOptLoss(ABC):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
||||||
min_date: datetime, max_date: datetime, *args, **kwargs) -> float:
|
min_date: datetime, max_date: datetime,
|
||||||
|
config: Dict, processed: Dict[str, DataFrame],
|
||||||
|
*args, **kwargs) -> float:
|
||||||
"""
|
"""
|
||||||
Objective function, returns smaller number for better results
|
Objective function, returns smaller number for better results
|
||||||
"""
|
"""
|
||||||
|
@ -9,8 +9,9 @@ from pandas import DataFrame
|
|||||||
from tabulate import tabulate
|
from tabulate import tabulate
|
||||||
|
|
||||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN
|
from freqtrade.constants import DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN
|
||||||
from freqtrade.data.btanalysis import calculate_market_change, calculate_max_drawdown
|
from freqtrade.data.btanalysis import (calculate_csum, calculate_market_change,
|
||||||
from freqtrade.misc import file_dump_json
|
calculate_max_drawdown)
|
||||||
|
from freqtrade.misc import decimals_per_coin, file_dump_json, round_coin_value
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -38,11 +39,12 @@ def store_backtest_stats(recordfilename: Path, stats: Dict[str, DataFrame]) -> N
|
|||||||
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
|
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
|
||||||
|
|
||||||
|
|
||||||
def _get_line_floatfmt() -> List[str]:
|
def _get_line_floatfmt(stake_currency: str) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Generate floatformat (goes in line with _generate_result_line())
|
Generate floatformat (goes in line with _generate_result_line())
|
||||||
"""
|
"""
|
||||||
return ['s', 'd', '.2f', '.2f', '.8f', '.2f', 'd', 'd', 'd', 'd']
|
return ['s', 'd', '.2f', '.2f', f'.{decimals_per_coin(stake_currency)}f',
|
||||||
|
'.2f', 'd', 'd', 'd', 'd']
|
||||||
|
|
||||||
|
|
||||||
def _get_line_header(first_column: str, stake_currency: str) -> List[str]:
|
def _get_line_header(first_column: str, stake_currency: str) -> List[str]:
|
||||||
@ -323,6 +325,13 @@ def generate_backtest_stats(btdata: Dict[str, DataFrame],
|
|||||||
'drawdown_end': drawdown_end,
|
'drawdown_end': drawdown_end,
|
||||||
'drawdown_end_ts': drawdown_end.timestamp() * 1000,
|
'drawdown_end_ts': drawdown_end.timestamp() * 1000,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
csum_min, csum_max = calculate_csum(results)
|
||||||
|
strat_stats.update({
|
||||||
|
'csum_min': csum_min,
|
||||||
|
'csum_max': csum_max
|
||||||
|
})
|
||||||
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
strat_stats.update({
|
strat_stats.update({
|
||||||
'max_drawdown': 0.0,
|
'max_drawdown': 0.0,
|
||||||
@ -330,6 +339,8 @@ def generate_backtest_stats(btdata: Dict[str, DataFrame],
|
|||||||
'drawdown_start_ts': 0,
|
'drawdown_start_ts': 0,
|
||||||
'drawdown_end': datetime(1970, 1, 1, tzinfo=timezone.utc),
|
'drawdown_end': datetime(1970, 1, 1, tzinfo=timezone.utc),
|
||||||
'drawdown_end_ts': 0,
|
'drawdown_end_ts': 0,
|
||||||
|
'csum_min': 0,
|
||||||
|
'csum_max': 0
|
||||||
})
|
})
|
||||||
|
|
||||||
strategy_results = generate_strategy_metrics(all_results=all_results)
|
strategy_results = generate_strategy_metrics(all_results=all_results)
|
||||||
@ -352,7 +363,7 @@ def text_table_bt_results(pair_results: List[Dict[str, Any]], stake_currency: st
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
headers = _get_line_header('Pair', stake_currency)
|
headers = _get_line_header('Pair', stake_currency)
|
||||||
floatfmt = _get_line_floatfmt()
|
floatfmt = _get_line_floatfmt(stake_currency)
|
||||||
output = [[
|
output = [[
|
||||||
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
|
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
|
||||||
t['profit_total_pct'], t['duration_avg'], t['wins'], t['draws'], t['losses']
|
t['profit_total_pct'], t['duration_avg'], t['wins'], t['draws'], t['losses']
|
||||||
@ -383,7 +394,9 @@ def text_table_sell_reason(sell_reason_stats: List[Dict[str, Any]], stake_curren
|
|||||||
|
|
||||||
output = [[
|
output = [[
|
||||||
t['sell_reason'], t['trades'], t['wins'], t['draws'], t['losses'],
|
t['sell_reason'], t['trades'], t['wins'], t['draws'], t['losses'],
|
||||||
t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'], t['profit_total_pct'],
|
t['profit_mean_pct'], t['profit_sum_pct'],
|
||||||
|
round_coin_value(t['profit_total_abs'], stake_currency, False),
|
||||||
|
t['profit_total_pct'],
|
||||||
] for t in sell_reason_stats]
|
] for t in sell_reason_stats]
|
||||||
return tabulate(output, headers=headers, tablefmt="orgtbl", stralign="right")
|
return tabulate(output, headers=headers, tablefmt="orgtbl", stralign="right")
|
||||||
|
|
||||||
@ -396,7 +409,7 @@ def text_table_strategy(strategy_results, stake_currency: str) -> str:
|
|||||||
:param all_results: Dict of <Strategyname: DataFrame> containing results for all strategies
|
:param all_results: Dict of <Strategyname: DataFrame> containing results for all strategies
|
||||||
:return: pretty printed table with tabulate as string
|
:return: pretty printed table with tabulate as string
|
||||||
"""
|
"""
|
||||||
floatfmt = _get_line_floatfmt()
|
floatfmt = _get_line_floatfmt(stake_currency)
|
||||||
headers = _get_line_header('Strategy', stake_currency)
|
headers = _get_line_header('Strategy', stake_currency)
|
||||||
|
|
||||||
output = [[
|
output = [[
|
||||||
@ -436,6 +449,12 @@ def text_table_add_metrics(strat_results: Dict) -> str:
|
|||||||
('Avg. Duration Winners', f"{strat_results['winner_holding_avg']}"),
|
('Avg. Duration Winners', f"{strat_results['winner_holding_avg']}"),
|
||||||
('Avg. Duration Loser', f"{strat_results['loser_holding_avg']}"),
|
('Avg. Duration Loser', f"{strat_results['loser_holding_avg']}"),
|
||||||
('', ''), # Empty line to improve readability
|
('', ''), # Empty line to improve readability
|
||||||
|
|
||||||
|
('Abs Profit Min', round_coin_value(strat_results['csum_min'],
|
||||||
|
strat_results['stake_currency'])),
|
||||||
|
('Abs Profit Max', round_coin_value(strat_results['csum_max'],
|
||||||
|
strat_results['stake_currency'])),
|
||||||
|
|
||||||
('Max Drawdown', f"{round(strat_results['max_drawdown'] * 100, 2)}%"),
|
('Max Drawdown', f"{round(strat_results['max_drawdown'] * 100, 2)}%"),
|
||||||
('Drawdown Start', strat_results['drawdown_start'].strftime(DATETIME_PRINT_FORMAT)),
|
('Drawdown Start', strat_results['drawdown_start'].strftime(DATETIME_PRINT_FORMAT)),
|
||||||
('Drawdown End', strat_results['drawdown_end'].strftime(DATETIME_PRINT_FORMAT)),
|
('Drawdown End', strat_results['drawdown_end'].strftime(DATETIME_PRINT_FORMAT)),
|
||||||
|
@ -171,6 +171,10 @@ class Order(_DECL_BASE):
|
|||||||
"""
|
"""
|
||||||
Get all non-closed orders - useful when trying to batch-update orders
|
Get all non-closed orders - useful when trying to batch-update orders
|
||||||
"""
|
"""
|
||||||
|
if not isinstance(order, dict):
|
||||||
|
logger.warning(f"{order} is not a valid response object.")
|
||||||
|
return
|
||||||
|
|
||||||
filtered_orders = [o for o in orders if o.order_id == order.get('id')]
|
filtered_orders = [o for o in orders if o.order_id == order.get('id')]
|
||||||
if filtered_orders:
|
if filtered_orders:
|
||||||
oobj = filtered_orders[0]
|
oobj = filtered_orders[0]
|
||||||
|
@ -53,7 +53,7 @@ def init_plotscript(config, markets: List, startup_candles: int = 0):
|
|||||||
data_format=config.get('dataformat_ohlcv', 'json'),
|
data_format=config.get('dataformat_ohlcv', 'json'),
|
||||||
)
|
)
|
||||||
|
|
||||||
if startup_candles:
|
if startup_candles and data:
|
||||||
min_date, max_date = get_timerange(data)
|
min_date, max_date = get_timerange(data)
|
||||||
logger.info(f"Loading data from {min_date} to {max_date}")
|
logger.info(f"Loading data from {min_date} to {max_date}")
|
||||||
timerange.adjust_start_if_necessary(timeframe_to_seconds(config.get('timeframe', '5m')),
|
timerange.adjust_start_if_necessary(timeframe_to_seconds(config.get('timeframe', '5m')),
|
||||||
@ -67,14 +67,16 @@ def init_plotscript(config, markets: List, startup_candles: int = 0):
|
|||||||
if not filename.is_dir() and not filename.is_file():
|
if not filename.is_dir() and not filename.is_file():
|
||||||
logger.warning("Backtest file is missing skipping trades.")
|
logger.warning("Backtest file is missing skipping trades.")
|
||||||
no_trades = True
|
no_trades = True
|
||||||
|
try:
|
||||||
trades = load_trades(
|
trades = load_trades(
|
||||||
config['trade_source'],
|
config['trade_source'],
|
||||||
db_url=config.get('db_url'),
|
db_url=config.get('db_url'),
|
||||||
exportfilename=filename,
|
exportfilename=filename,
|
||||||
no_trades=no_trades,
|
no_trades=no_trades,
|
||||||
strategy=config.get('strategy'),
|
strategy=config.get('strategy'),
|
||||||
)
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
raise OperationalException(e) from e
|
||||||
trades = trim_dataframe(trades, timerange, 'open_date')
|
trades = trim_dataframe(trades, timerange, 'open_date')
|
||||||
|
|
||||||
return {"ohlcv": data,
|
return {"ohlcv": data,
|
||||||
|
@ -30,10 +30,10 @@ class AgeFilter(IPairList):
|
|||||||
|
|
||||||
if self._min_days_listed < 1:
|
if self._min_days_listed < 1:
|
||||||
raise OperationalException("AgeFilter requires min_days_listed to be >= 1")
|
raise OperationalException("AgeFilter requires min_days_listed to be >= 1")
|
||||||
if self._min_days_listed > exchange.ohlcv_candle_limit:
|
if self._min_days_listed > exchange.ohlcv_candle_limit('1d'):
|
||||||
raise OperationalException("AgeFilter requires min_days_listed to not exceed "
|
raise OperationalException("AgeFilter requires min_days_listed to not exceed "
|
||||||
"exchange max request size "
|
"exchange max request size "
|
||||||
f"({exchange.ohlcv_candle_limit})")
|
f"({exchange.ohlcv_candle_limit('1d')})")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def needstickers(self) -> bool:
|
def needstickers(self) -> bool:
|
||||||
|
@ -32,10 +32,10 @@ class RangeStabilityFilter(IPairList):
|
|||||||
|
|
||||||
if self._days < 1:
|
if self._days < 1:
|
||||||
raise OperationalException("RangeStabilityFilter requires lookback_days to be >= 1")
|
raise OperationalException("RangeStabilityFilter requires lookback_days to be >= 1")
|
||||||
if self._days > exchange.ohlcv_candle_limit:
|
if self._days > exchange.ohlcv_candle_limit('1d'):
|
||||||
raise OperationalException("RangeStabilityFilter requires lookback_days to not "
|
raise OperationalException("RangeStabilityFilter requires lookback_days to not "
|
||||||
"exceed exchange max request size "
|
"exceed exchange max request size "
|
||||||
f"({exchange.ohlcv_candle_limit})")
|
f"({exchange.ohlcv_candle_limit('1d')})")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def needstickers(self) -> bool:
|
def needstickers(self) -> bool:
|
||||||
|
@ -58,13 +58,13 @@ class StoplossGuard(IProtection):
|
|||||||
SellType.STOPLOSS_ON_EXCHANGE.value)
|
SellType.STOPLOSS_ON_EXCHANGE.value)
|
||||||
and trade.close_profit < 0)]
|
and trade.close_profit < 0)]
|
||||||
|
|
||||||
if len(trades) > self._trade_limit:
|
if len(trades) < self._trade_limit:
|
||||||
self.log_once(f"Trading stopped due to {self._trade_limit} "
|
return False, None, None
|
||||||
f"stoplosses within {self._lookback_period} minutes.", logger.info)
|
|
||||||
until = self.calculate_lock_end(trades, self._stop_duration)
|
|
||||||
return True, until, self._reason()
|
|
||||||
|
|
||||||
return False, None, None
|
self.log_once(f"Trading stopped due to {self._trade_limit} "
|
||||||
|
f"stoplosses within {self._lookback_period} minutes.", logger.info)
|
||||||
|
until = self.calculate_lock_end(trades, self._stop_duration)
|
||||||
|
return True, until, self._reason()
|
||||||
|
|
||||||
def global_stop(self, date_now: datetime) -> ProtectionReturn:
|
def global_stop(self, date_now: datetime) -> ProtectionReturn:
|
||||||
"""
|
"""
|
||||||
|
@ -113,7 +113,7 @@ class Daily(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class ShowConfig(BaseModel):
|
class ShowConfig(BaseModel):
|
||||||
dry_run: str
|
dry_run: bool
|
||||||
stake_currency: str
|
stake_currency: str
|
||||||
stake_amount: Union[float, str]
|
stake_amount: Union[float, str]
|
||||||
max_open_trades: int
|
max_open_trades: int
|
||||||
|
@ -167,7 +167,7 @@ def reload_config(rpc: RPC = Depends(get_rpc)):
|
|||||||
|
|
||||||
|
|
||||||
@router.get('/pair_candles', response_model=PairHistory, tags=['candle data'])
|
@router.get('/pair_candles', response_model=PairHistory, tags=['candle data'])
|
||||||
def pair_candles(pair: str, timeframe: str, limit: Optional[int], rpc=Depends(get_rpc)):
|
def pair_candles(pair: str, timeframe: str, limit: Optional[int], rpc: RPC = Depends(get_rpc)):
|
||||||
return rpc._rpc_analysed_dataframe(pair, timeframe, limit)
|
return rpc._rpc_analysed_dataframe(pair, timeframe, limit)
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ import logging
|
|||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
import rapidjson
|
||||||
import uvicorn
|
import uvicorn
|
||||||
from fastapi import Depends, FastAPI
|
from fastapi import Depends, FastAPI
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
@ -14,6 +15,17 @@ from freqtrade.rpc.rpc import RPC, RPCException, RPCHandler
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class FTJSONResponse(JSONResponse):
|
||||||
|
media_type = "application/json"
|
||||||
|
|
||||||
|
def render(self, content: Any) -> bytes:
|
||||||
|
"""
|
||||||
|
Use rapidjson for responses
|
||||||
|
Handles NaN and Inf / -Inf in a javascript way by default.
|
||||||
|
"""
|
||||||
|
return rapidjson.dumps(content).encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
class ApiServer(RPCHandler):
|
class ApiServer(RPCHandler):
|
||||||
|
|
||||||
_rpc: RPC
|
_rpc: RPC
|
||||||
@ -32,6 +44,7 @@ class ApiServer(RPCHandler):
|
|||||||
self.app = FastAPI(title="Freqtrade API",
|
self.app = FastAPI(title="Freqtrade API",
|
||||||
docs_url='/docs' if api_config.get('enable_openapi', False) else None,
|
docs_url='/docs' if api_config.get('enable_openapi', False) else None,
|
||||||
redoc_url=None,
|
redoc_url=None,
|
||||||
|
default_response_class=FTJSONResponse,
|
||||||
)
|
)
|
||||||
self.configure_app(self.app, self._config)
|
self.configure_app(self.app, self._config)
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ from math import isnan
|
|||||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
from numpy import NAN, int64, mean
|
from numpy import NAN, inf, int64, mean
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from freqtrade.configuration.timerange import TimeRange
|
from freqtrade.configuration.timerange import TimeRange
|
||||||
@ -747,6 +747,7 @@ class RPC:
|
|||||||
sell_mask = (dataframe['sell'] == 1)
|
sell_mask = (dataframe['sell'] == 1)
|
||||||
sell_signals = int(sell_mask.sum())
|
sell_signals = int(sell_mask.sum())
|
||||||
dataframe.loc[sell_mask, '_sell_signal_open'] = dataframe.loc[sell_mask, 'open']
|
dataframe.loc[sell_mask, '_sell_signal_open'] = dataframe.loc[sell_mask, 'open']
|
||||||
|
dataframe = dataframe.replace([inf, -inf], NAN)
|
||||||
dataframe = dataframe.replace({NAN: None})
|
dataframe = dataframe.replace({NAN: None})
|
||||||
|
|
||||||
res = {
|
res = {
|
||||||
@ -775,7 +776,8 @@ class RPC:
|
|||||||
})
|
})
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _rpc_analysed_dataframe(self, pair: str, timeframe: str, limit: int) -> Dict[str, Any]:
|
def _rpc_analysed_dataframe(self, pair: str, timeframe: str,
|
||||||
|
limit: Optional[int]) -> Dict[str, Any]:
|
||||||
|
|
||||||
_data, last_analyzed = self._freqtrade.dataprovider.get_analyzed_dataframe(
|
_data, last_analyzed = self._freqtrade.dataprovider.get_analyzed_dataframe(
|
||||||
pair, timeframe)
|
pair, timeframe)
|
||||||
|
@ -18,6 +18,7 @@ from telegram.utils.helpers import escape_markdown
|
|||||||
|
|
||||||
from freqtrade.__init__ import __version__
|
from freqtrade.__init__ import __version__
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
|
from freqtrade.misc import round_coin_value
|
||||||
from freqtrade.rpc import RPC, RPCException, RPCHandler, RPCMessageType
|
from freqtrade.rpc import RPC, RPCException, RPCHandler, RPCMessageType
|
||||||
|
|
||||||
|
|
||||||
@ -189,14 +190,14 @@ class Telegram(RPCHandler):
|
|||||||
else:
|
else:
|
||||||
msg['stake_amount_fiat'] = 0
|
msg['stake_amount_fiat'] = 0
|
||||||
|
|
||||||
message = ("\N{LARGE BLUE CIRCLE} *{exchange}:* Buying {pair}\n"
|
message = (f"\N{LARGE BLUE CIRCLE} *{msg['exchange']}:* Buying {msg['pair']}\n"
|
||||||
"*Amount:* `{amount:.8f}`\n"
|
f"*Amount:* `{msg['amount']:.8f}`\n"
|
||||||
"*Open Rate:* `{limit:.8f}`\n"
|
f"*Open Rate:* `{msg['limit']:.8f}`\n"
|
||||||
"*Current Rate:* `{current_rate:.8f}`\n"
|
f"*Current Rate:* `{msg['current_rate']:.8f}`\n"
|
||||||
"*Total:* `({stake_amount:.6f} {stake_currency}").format(**msg)
|
f"*Total:* `({round_coin_value(msg['stake_amount'], msg['stake_currency'])}")
|
||||||
|
|
||||||
if msg.get('fiat_currency', None):
|
if msg.get('fiat_currency', None):
|
||||||
message += ", {stake_amount_fiat:.3f} {fiat_currency}".format(**msg)
|
message += f", {round_coin_value(msg['stake_amount_fiat'], msg['fiat_currency'])}"
|
||||||
message += ")`"
|
message += ")`"
|
||||||
|
|
||||||
elif msg['type'] == RPCMessageType.BUY_CANCEL_NOTIFICATION:
|
elif msg['type'] == RPCMessageType.BUY_CANCEL_NOTIFICATION:
|
||||||
@ -365,7 +366,7 @@ class Telegram(RPCHandler):
|
|||||||
)
|
)
|
||||||
stats_tab = tabulate(
|
stats_tab = tabulate(
|
||||||
[[day['date'],
|
[[day['date'],
|
||||||
f"{day['abs_profit']:.8f} {stats['stake_currency']}",
|
f"{round_coin_value(day['abs_profit'], stats['stake_currency'])}",
|
||||||
f"{day['fiat_value']:.3f} {stats['fiat_display_currency']}",
|
f"{day['fiat_value']:.3f} {stats['fiat_display_currency']}",
|
||||||
f"{day['trade_count']} trades"] for day in stats['data']],
|
f"{day['trade_count']} trades"] for day in stats['data']],
|
||||||
headers=[
|
headers=[
|
||||||
@ -415,18 +416,18 @@ class Telegram(RPCHandler):
|
|||||||
# Message to display
|
# Message to display
|
||||||
if stats['closed_trade_count'] > 0:
|
if stats['closed_trade_count'] > 0:
|
||||||
markdown_msg = ("*ROI:* Closed trades\n"
|
markdown_msg = ("*ROI:* Closed trades\n"
|
||||||
f"∙ `{profit_closed_coin:.8f} {stake_cur} "
|
f"∙ `{round_coin_value(profit_closed_coin, stake_cur)} "
|
||||||
f"({profit_closed_percent_mean:.2f}%) "
|
f"({profit_closed_percent_mean:.2f}%) "
|
||||||
f"({profit_closed_percent_sum} \N{GREEK CAPITAL LETTER SIGMA}%)`\n"
|
f"({profit_closed_percent_sum} \N{GREEK CAPITAL LETTER SIGMA}%)`\n"
|
||||||
f"∙ `{profit_closed_fiat:.3f} {fiat_disp_cur}`\n")
|
f"∙ `{round_coin_value(profit_closed_fiat, fiat_disp_cur)}`\n")
|
||||||
else:
|
else:
|
||||||
markdown_msg = "`No closed trade` \n"
|
markdown_msg = "`No closed trade` \n"
|
||||||
|
|
||||||
markdown_msg += (f"*ROI:* All trades\n"
|
markdown_msg += (f"*ROI:* All trades\n"
|
||||||
f"∙ `{profit_all_coin:.8f} {stake_cur} "
|
f"∙ `{round_coin_value(profit_all_coin, stake_cur)} "
|
||||||
f"({profit_all_percent_mean:.2f}%) "
|
f"({profit_all_percent_mean:.2f}%) "
|
||||||
f"({profit_all_percent_sum} \N{GREEK CAPITAL LETTER SIGMA}%)`\n"
|
f"({profit_all_percent_sum} \N{GREEK CAPITAL LETTER SIGMA}%)`\n"
|
||||||
f"∙ `{profit_all_fiat:.3f} {fiat_disp_cur}`\n"
|
f"∙ `{round_coin_value(profit_all_fiat, fiat_disp_cur)}`\n"
|
||||||
f"*Total Trade Count:* `{trade_count}`\n"
|
f"*Total Trade Count:* `{trade_count}`\n"
|
||||||
f"*First Trade opened:* `{first_trade_date}`\n"
|
f"*First Trade opened:* `{first_trade_date}`\n"
|
||||||
f"*Latest Trade opened:* `{latest_trade_date}\n`"
|
f"*Latest Trade opened:* `{latest_trade_date}\n`"
|
||||||
@ -494,15 +495,17 @@ class Telegram(RPCHandler):
|
|||||||
"Starting capital: "
|
"Starting capital: "
|
||||||
f"`{self._config['dry_run_wallet']}` {self._config['stake_currency']}.\n"
|
f"`{self._config['dry_run_wallet']}` {self._config['stake_currency']}.\n"
|
||||||
)
|
)
|
||||||
for currency in result['currencies']:
|
for curr in result['currencies']:
|
||||||
if currency['est_stake'] > 0.0001:
|
if curr['est_stake'] > 0.0001:
|
||||||
curr_output = ("*{currency}:*\n"
|
curr_output = (
|
||||||
"\t`Available: {free: .8f}`\n"
|
f"*{curr['currency']}:*\n"
|
||||||
"\t`Balance: {balance: .8f}`\n"
|
f"\t`Available: {curr['free']:.8f}`\n"
|
||||||
"\t`Pending: {used: .8f}`\n"
|
f"\t`Balance: {curr['balance']:.8f}`\n"
|
||||||
"\t`Est. {stake}: {est_stake: .8f}`\n").format(**currency)
|
f"\t`Pending: {curr['used']:.8f}`\n"
|
||||||
|
f"\t`Est. {curr['stake']}: "
|
||||||
|
f"{round_coin_value(curr['est_stake'], curr['stake'], False)}`\n")
|
||||||
else:
|
else:
|
||||||
curr_output = "*{currency}:* not showing <1$ amount \n".format(**currency)
|
curr_output = f"*{curr['currency']}:* not showing <1$ amount \n"
|
||||||
|
|
||||||
# Handle overflowing messsage length
|
# Handle overflowing messsage length
|
||||||
if len(output + curr_output) >= MAX_TELEGRAM_MESSAGE_LENGTH:
|
if len(output + curr_output) >= MAX_TELEGRAM_MESSAGE_LENGTH:
|
||||||
@ -512,8 +515,9 @@ class Telegram(RPCHandler):
|
|||||||
output += curr_output
|
output += curr_output
|
||||||
|
|
||||||
output += ("\n*Estimated Value*:\n"
|
output += ("\n*Estimated Value*:\n"
|
||||||
"\t`{stake}: {total: .8f}`\n"
|
f"\t`{result['stake']}: {result['total']: .8f}`\n"
|
||||||
"\t`{symbol}: {value: .2f}`\n").format(**result)
|
f"\t`{result['symbol']}: "
|
||||||
|
f"{round_coin_value(result['value'], result['symbol'], False)}`\n")
|
||||||
self._send_msg(output)
|
self._send_msg(output)
|
||||||
except RPCException as e:
|
except RPCException as e:
|
||||||
self._send_msg(str(e))
|
self._send_msg(str(e))
|
||||||
|
164
freqtrade/templates/base_custom_hyperopt.py.j2
Normal file
164
freqtrade/templates/base_custom_hyperopt.py.j2
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
# pragma pylint: disable=missing-docstring, invalid-name, pointless-string-statement
|
||||||
|
|
||||||
|
# --- Do not remove these libs ---
|
||||||
|
from functools import reduce
|
||||||
|
from typing import Any, Callable, Dict, List
|
||||||
|
|
||||||
|
import numpy as np # noqa
|
||||||
|
import pandas as pd # noqa
|
||||||
|
from pandas import DataFrame
|
||||||
|
from skopt.space import Categorical, Dimension, Integer, Real # noqa
|
||||||
|
|
||||||
|
from freqtrade.optimize.hyperopt_interface import IHyperOpt
|
||||||
|
|
||||||
|
# --------------------------------
|
||||||
|
# Add your lib to import here
|
||||||
|
import talib.abstract as ta # noqa
|
||||||
|
import freqtrade.vendor.qtpylib.indicators as qtpylib
|
||||||
|
|
||||||
|
|
||||||
|
class {{ hyperopt }}(IHyperOpt):
|
||||||
|
"""
|
||||||
|
This is a Hyperopt template to get you started.
|
||||||
|
|
||||||
|
More information in the documentation: https://www.freqtrade.io/en/latest/hyperopt/
|
||||||
|
|
||||||
|
You should:
|
||||||
|
- Add any lib you need to build your hyperopt.
|
||||||
|
|
||||||
|
You must keep:
|
||||||
|
- The prototypes for the methods: populate_indicators, indicator_space, buy_strategy_generator.
|
||||||
|
|
||||||
|
The methods roi_space, generate_roi_table and stoploss_space are not required
|
||||||
|
and are provided by default.
|
||||||
|
However, you may override them if you need 'roi' and 'stoploss' spaces that
|
||||||
|
differ from the defaults offered by Freqtrade.
|
||||||
|
Sample implementation of these methods will be copied to `user_data/hyperopts` when
|
||||||
|
creating the user-data directory using `freqtrade create-userdir --userdir user_data`,
|
||||||
|
or is available online under the following URL:
|
||||||
|
https://github.com/freqtrade/freqtrade/blob/develop/freqtrade/templates/sample_hyperopt_advanced.py.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def buy_strategy_generator(params: Dict[str, Any]) -> Callable:
|
||||||
|
"""
|
||||||
|
Define the buy strategy parameters to be used by Hyperopt.
|
||||||
|
"""
|
||||||
|
def populate_buy_trend(dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||||
|
"""
|
||||||
|
Buy strategy Hyperopt will build and use.
|
||||||
|
"""
|
||||||
|
conditions = []
|
||||||
|
|
||||||
|
# GUARDS AND TRENDS
|
||||||
|
{{ buy_guards | indent(12) }}
|
||||||
|
|
||||||
|
# TRIGGERS
|
||||||
|
if 'trigger' in params:
|
||||||
|
{{ buy_triggers | indent(16) }}
|
||||||
|
|
||||||
|
# Check that the candle had volume
|
||||||
|
conditions.append(dataframe['volume'] > 0)
|
||||||
|
|
||||||
|
if conditions:
|
||||||
|
dataframe.loc[
|
||||||
|
reduce(lambda x, y: x & y, conditions),
|
||||||
|
'buy'] = 1
|
||||||
|
|
||||||
|
return dataframe
|
||||||
|
|
||||||
|
return populate_buy_trend
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def indicator_space() -> List[Dimension]:
|
||||||
|
"""
|
||||||
|
Define your Hyperopt space for searching buy strategy parameters.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
{{ buy_space | indent(12) }}
|
||||||
|
]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def sell_strategy_generator(params: Dict[str, Any]) -> Callable:
|
||||||
|
"""
|
||||||
|
Define the sell strategy parameters to be used by Hyperopt.
|
||||||
|
"""
|
||||||
|
def populate_sell_trend(dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||||
|
"""
|
||||||
|
Sell strategy Hyperopt will build and use.
|
||||||
|
"""
|
||||||
|
conditions = []
|
||||||
|
|
||||||
|
# GUARDS AND TRENDS
|
||||||
|
{{ sell_guards | indent(12) }}
|
||||||
|
|
||||||
|
# TRIGGERS
|
||||||
|
if 'sell-trigger' in params:
|
||||||
|
{{ sell_triggers | indent(16) }}
|
||||||
|
|
||||||
|
# Check that the candle had volume
|
||||||
|
conditions.append(dataframe['volume'] > 0)
|
||||||
|
|
||||||
|
if conditions:
|
||||||
|
dataframe.loc[
|
||||||
|
reduce(lambda x, y: x & y, conditions),
|
||||||
|
'sell'] = 1
|
||||||
|
|
||||||
|
return dataframe
|
||||||
|
|
||||||
|
return populate_sell_trend
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def sell_indicator_space() -> List[Dimension]:
|
||||||
|
"""
|
||||||
|
Define your Hyperopt space for searching sell strategy parameters.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
{{ sell_space | indent(12) }}
|
||||||
|
]
|
||||||
|
@ staticmethod
|
||||||
|
def generate_roi_table(params: Dict) -> Dict[int, float]:
|
||||||
|
"""
|
||||||
|
Generate the ROI table that will be used by Hyperopt
|
||||||
|
This implementation generates the default legacy Freqtrade ROI tables.
|
||||||
|
Change it if you need different number of steps in the generated
|
||||||
|
ROI tables or other structure of the ROI tables.
|
||||||
|
Please keep it aligned with parameters in the 'roi' optimization
|
||||||
|
hyperspace defined by the roi_space method.
|
||||||
|
"""
|
||||||
|
roi_table = {}
|
||||||
|
roi_table[0] = params['roi_p1'] + params['roi_p2'] + params['roi_p3']
|
||||||
|
roi_table[params['roi_t3']] = params['roi_p1'] + params['roi_p2']
|
||||||
|
roi_table[params['roi_t3'] + params['roi_t2']] = params['roi_p1']
|
||||||
|
roi_table[params['roi_t3'] + params['roi_t2'] + params['roi_t1']] = 0
|
||||||
|
|
||||||
|
return roi_table
|
||||||
|
|
||||||
|
@ staticmethod
|
||||||
|
def roi_space() -> List[Dimension]:
|
||||||
|
"""
|
||||||
|
Values to search for each ROI steps
|
||||||
|
Override it if you need some different ranges for the parameters in the
|
||||||
|
'roi' optimization hyperspace.
|
||||||
|
Please keep it aligned with the implementation of the
|
||||||
|
generate_roi_table method.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
Integer(10, 120, name='roi_t1'),
|
||||||
|
Integer(10, 60, name='roi_t2'),
|
||||||
|
Integer(10, 40, name='roi_t3'),
|
||||||
|
Real(0.01, 0.04, name='roi_p1'),
|
||||||
|
Real(0.01, 0.07, name='roi_p2'),
|
||||||
|
Real(0.01, 0.20, name='roi_p3'),
|
||||||
|
]
|
||||||
|
|
||||||
|
@ staticmethod
|
||||||
|
def stoploss_space() -> List[Dimension]:
|
||||||
|
"""
|
||||||
|
Stoploss Value to search
|
||||||
|
Override it if you need some different range for the parameter in the
|
||||||
|
'stoploss' optimization hyperspace.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
Real(-0.35, -0.02, name='stoploss'),
|
||||||
|
]
|
@ -55,7 +55,16 @@ class {{ hyperopt }}(IHyperOpt):
|
|||||||
|
|
||||||
# TRIGGERS
|
# TRIGGERS
|
||||||
if 'trigger' in params:
|
if 'trigger' in params:
|
||||||
{{ buy_triggers | indent(16) }}
|
if params['trigger'] == 'bb_lower':
|
||||||
|
conditions.append(dataframe['close'] < dataframe['bb_lowerband'])
|
||||||
|
if params['trigger'] == 'macd_cross_signal':
|
||||||
|
conditions.append(qtpylib.crossed_above(
|
||||||
|
dataframe['macd'], dataframe['macdsignal']
|
||||||
|
))
|
||||||
|
if params['trigger'] == 'sar_reversal':
|
||||||
|
conditions.append(qtpylib.crossed_above(
|
||||||
|
dataframe['close'], dataframe['sar']
|
||||||
|
))
|
||||||
|
|
||||||
# Check that the candle had volume
|
# Check that the candle had volume
|
||||||
conditions.append(dataframe['volume'] > 0)
|
conditions.append(dataframe['volume'] > 0)
|
||||||
@ -94,7 +103,16 @@ class {{ hyperopt }}(IHyperOpt):
|
|||||||
|
|
||||||
# TRIGGERS
|
# TRIGGERS
|
||||||
if 'sell-trigger' in params:
|
if 'sell-trigger' in params:
|
||||||
{{ sell_triggers | indent(16) }}
|
if params['sell-trigger'] == 'sell-bb_upper':
|
||||||
|
conditions.append(dataframe['close'] > dataframe['bb_upperband'])
|
||||||
|
if params['sell-trigger'] == 'sell-macd_cross_signal':
|
||||||
|
conditions.append(qtpylib.crossed_above(
|
||||||
|
dataframe['macdsignal'], dataframe['macd']
|
||||||
|
))
|
||||||
|
if params['sell-trigger'] == 'sell-sar_reversal':
|
||||||
|
conditions.append(qtpylib.crossed_above(
|
||||||
|
dataframe['sar'], dataframe['close']
|
||||||
|
))
|
||||||
|
|
||||||
# Check that the candle had volume
|
# Check that the candle had volume
|
||||||
conditions.append(dataframe['volume'] > 0)
|
conditions.append(dataframe['volume'] > 0)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from math import exp
|
from math import exp
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
@ -35,6 +36,7 @@ class SampleHyperOptLoss(IHyperOptLoss):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
||||||
min_date: datetime, max_date: datetime,
|
min_date: datetime, max_date: datetime,
|
||||||
|
config: Dict, processed: Dict[str, DataFrame],
|
||||||
*args, **kwargs) -> float:
|
*args, **kwargs) -> float:
|
||||||
"""
|
"""
|
||||||
Objective function, returns smaller number for better results
|
Objective function, returns smaller number for better results
|
||||||
|
@ -40,7 +40,7 @@
|
|||||||
"# Location of the data\n",
|
"# Location of the data\n",
|
||||||
"data_location = Path(config['user_data_dir'], 'data', 'binance')\n",
|
"data_location = Path(config['user_data_dir'], 'data', 'binance')\n",
|
||||||
"# Pair to analyze - Only use one pair here\n",
|
"# Pair to analyze - Only use one pair here\n",
|
||||||
"pair = \"BTC_USDT\""
|
"pair = \"BTC/USDT\""
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -54,7 +54,9 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"candles = load_pair_history(datadir=data_location,\n",
|
"candles = load_pair_history(datadir=data_location,\n",
|
||||||
" timeframe=config[\"timeframe\"],\n",
|
" timeframe=config[\"timeframe\"],\n",
|
||||||
" pair=pair)\n",
|
" pair=pair,\n",
|
||||||
|
" data_format = \"hdf5\",\n",
|
||||||
|
" )\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Confirm success\n",
|
"# Confirm success\n",
|
||||||
"print(\"Loaded \" + str(len(candles)) + f\" rows of data for {pair} from {data_location}\")\n",
|
"print(\"Loaded \" + str(len(candles)) + f\" rows of data for {pair} from {data_location}\")\n",
|
||||||
|
@ -2,9 +2,9 @@
|
|||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
|
||||||
# Required for hyperopt
|
# Required for hyperopt
|
||||||
scipy==1.6.0
|
scipy==1.6.1
|
||||||
scikit-learn==0.24.1
|
scikit-learn==0.24.1
|
||||||
scikit-optimize==0.8.1
|
scikit-optimize==0.8.1
|
||||||
filelock==3.0.12
|
filelock==3.0.12
|
||||||
joblib==1.0.0
|
joblib==1.0.1
|
||||||
progressbar2==3.53.1
|
progressbar2==3.53.1
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
numpy==1.20.0
|
numpy==1.20.1
|
||||||
pandas==1.2.1
|
pandas==1.2.2
|
||||||
|
|
||||||
ccxt==1.41.62
|
ccxt==1.42.19
|
||||||
|
# Pin cryptography for now due to rust build errors with piwheels
|
||||||
|
cryptography==3.4.6
|
||||||
aiohttp==3.7.3
|
aiohttp==3.7.3
|
||||||
SQLAlchemy==1.3.22
|
SQLAlchemy==1.3.23
|
||||||
python-telegram-bot==13.1
|
python-telegram-bot==13.3
|
||||||
arrow==0.17.0
|
arrow==0.17.0
|
||||||
cachetools==4.2.1
|
cachetools==4.2.1
|
||||||
requests==2.25.1
|
requests==2.25.1
|
||||||
@ -12,14 +14,14 @@ urllib3==1.26.3
|
|||||||
wrapt==1.12.1
|
wrapt==1.12.1
|
||||||
jsonschema==3.2.0
|
jsonschema==3.2.0
|
||||||
TA-Lib==0.4.19
|
TA-Lib==0.4.19
|
||||||
tabulate==0.8.7
|
tabulate==0.8.9
|
||||||
pycoingecko==1.4.0
|
pycoingecko==1.4.0
|
||||||
jinja2==2.11.3
|
jinja2==2.11.3
|
||||||
tables==3.6.1
|
tables==3.6.1
|
||||||
blosc==1.10.2
|
blosc==1.10.2
|
||||||
|
|
||||||
# find first, C search in arrays
|
# find first, C search in arrays
|
||||||
py_find_1st==1.1.4
|
py_find_1st==1.1.5
|
||||||
|
|
||||||
# Load ticker files 30% faster
|
# Load ticker files 30% faster
|
||||||
python-rapidjson==1.0
|
python-rapidjson==1.0
|
||||||
@ -29,7 +31,7 @@ sdnotify==0.3.2
|
|||||||
|
|
||||||
# API Server
|
# API Server
|
||||||
fastapi==0.63.0
|
fastapi==0.63.0
|
||||||
uvicorn==0.13.3
|
uvicorn==0.13.4
|
||||||
pyjwt==2.0.1
|
pyjwt==2.0.1
|
||||||
aiofiles==0.6.0
|
aiofiles==0.6.0
|
||||||
|
|
||||||
@ -37,4 +39,4 @@ aiofiles==0.6.0
|
|||||||
colorama==0.4.4
|
colorama==0.4.4
|
||||||
# Building config files interactively
|
# Building config files interactively
|
||||||
questionary==1.9.0
|
questionary==1.9.0
|
||||||
prompt-toolkit==3.0.14
|
prompt-toolkit==3.0.16
|
||||||
|
2
setup.py
2
setup.py
@ -19,7 +19,7 @@ if readme_file.is_file():
|
|||||||
readme_long = (Path(__file__).parent / "README.md").read_text()
|
readme_long = (Path(__file__).parent / "README.md").read_text()
|
||||||
|
|
||||||
# Requirements used for submodules
|
# Requirements used for submodules
|
||||||
api = ['flask', 'flask-jwt-extended', 'flask-cors']
|
api = ['fastapi', 'uvicorn', 'pyjwt', 'aiofiles']
|
||||||
plot = ['plotly>=4.0']
|
plot = ['plotly>=4.0']
|
||||||
hyperopt = [
|
hyperopt = [
|
||||||
'scipy',
|
'scipy',
|
||||||
|
@ -8,11 +8,12 @@ from pandas import DataFrame, DateOffset, Timestamp, to_datetime
|
|||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import LAST_BT_RESULT_FN
|
from freqtrade.constants import LAST_BT_RESULT_FN
|
||||||
from freqtrade.data.btanalysis import (BT_DATA_COLUMNS, BT_DATA_COLUMNS_MID, BT_DATA_COLUMNS_OLD,
|
from freqtrade.data.btanalysis import (BT_DATA_COLUMNS, BT_DATA_COLUMNS_MID, BT_DATA_COLUMNS_OLD,
|
||||||
analyze_trade_parallelism, calculate_market_change,
|
analyze_trade_parallelism, calculate_csum,
|
||||||
calculate_max_drawdown, combine_dataframes_with_mean,
|
calculate_market_change, calculate_max_drawdown,
|
||||||
create_cum_profit, extract_trades_of_period,
|
combine_dataframes_with_mean, create_cum_profit,
|
||||||
get_latest_backtest_filename, get_latest_hyperopt_file,
|
extract_trades_of_period, get_latest_backtest_filename,
|
||||||
load_backtest_data, load_trades, load_trades_from_db)
|
get_latest_hyperopt_file, load_backtest_data, load_trades,
|
||||||
|
load_trades_from_db)
|
||||||
from freqtrade.data.history import load_data, load_pair_history
|
from freqtrade.data.history import load_data, load_pair_history
|
||||||
from tests.conftest import create_mock_trades
|
from tests.conftest import create_mock_trades
|
||||||
from tests.conftest_trades import MOCK_TRADE_COUNT
|
from tests.conftest_trades import MOCK_TRADE_COUNT
|
||||||
@ -284,6 +285,20 @@ def test_calculate_max_drawdown(testdatadir):
|
|||||||
drawdown, h, low = calculate_max_drawdown(DataFrame())
|
drawdown, h, low = calculate_max_drawdown(DataFrame())
|
||||||
|
|
||||||
|
|
||||||
|
def test_calculate_csum(testdatadir):
|
||||||
|
filename = testdatadir / "backtest-result_test.json"
|
||||||
|
bt_data = load_backtest_data(filename)
|
||||||
|
csum_min, csum_max = calculate_csum(bt_data)
|
||||||
|
|
||||||
|
assert isinstance(csum_min, float)
|
||||||
|
assert isinstance(csum_max, float)
|
||||||
|
assert csum_min < 0.01
|
||||||
|
assert csum_max > 0.02
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match='Trade dataframe empty.'):
|
||||||
|
csum_min, csum_max = calculate_csum(DataFrame())
|
||||||
|
|
||||||
|
|
||||||
def test_calculate_max_drawdown2():
|
def test_calculate_max_drawdown2():
|
||||||
values = [0.011580, 0.010048, 0.011340, 0.012161, 0.010416, 0.010009, 0.020024,
|
values = [0.011580, 0.010048, 0.011340, 0.012161, 0.010416, 0.010009, 0.020024,
|
||||||
-0.024662, -0.022350, 0.020496, -0.029859, -0.030511, 0.010041, 0.010872,
|
-0.024662, -0.022350, 0.020496, -0.029859, -0.030511, 0.010041, 0.010872,
|
||||||
|
@ -209,7 +209,7 @@ def test_nonexisting_stoploss(mocker, edge_conf):
|
|||||||
assert edge.stoploss('N/O') == -0.1
|
assert edge.stoploss('N/O') == -0.1
|
||||||
|
|
||||||
|
|
||||||
def test_stake_amount(mocker, edge_conf):
|
def test_edge_stake_amount(mocker, edge_conf):
|
||||||
freqtrade = get_patched_freqtradebot(mocker, edge_conf)
|
freqtrade = get_patched_freqtradebot(mocker, edge_conf)
|
||||||
edge = Edge(edge_conf, freqtrade.exchange, freqtrade.strategy)
|
edge = Edge(edge_conf, freqtrade.exchange, freqtrade.strategy)
|
||||||
mocker.patch('freqtrade.edge.Edge._cached_pairs', mocker.PropertyMock(
|
mocker.patch('freqtrade.edge.Edge._cached_pairs', mocker.PropertyMock(
|
||||||
@ -217,20 +217,33 @@ def test_stake_amount(mocker, edge_conf):
|
|||||||
'E/F': PairInfo(-0.02, 0.66, 3.71, 0.50, 1.71, 10, 60),
|
'E/F': PairInfo(-0.02, 0.66, 3.71, 0.50, 1.71, 10, 60),
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
free = 100
|
assert edge._capital_ratio == 0.5
|
||||||
total = 100
|
assert edge.stake_amount('E/F', free_capital=100, total_capital=100,
|
||||||
in_trade = 25
|
capital_in_trade=25) == 31.25
|
||||||
assert edge.stake_amount('E/F', free, total, in_trade) == 31.25
|
|
||||||
|
|
||||||
free = 20
|
assert edge.stake_amount('E/F', free_capital=20, total_capital=100,
|
||||||
total = 100
|
capital_in_trade=25) == 20
|
||||||
in_trade = 25
|
|
||||||
assert edge.stake_amount('E/F', free, total, in_trade) == 20
|
|
||||||
|
|
||||||
free = 0
|
assert edge.stake_amount('E/F', free_capital=0, total_capital=100,
|
||||||
total = 100
|
capital_in_trade=25) == 0
|
||||||
in_trade = 25
|
|
||||||
assert edge.stake_amount('E/F', free, total, in_trade) == 0
|
# Test with increased allowed_risk
|
||||||
|
# Result should be no more than allowed capital
|
||||||
|
edge._allowed_risk = 0.4
|
||||||
|
edge._capital_ratio = 0.5
|
||||||
|
assert edge.stake_amount('E/F', free_capital=100, total_capital=100,
|
||||||
|
capital_in_trade=25) == 62.5
|
||||||
|
|
||||||
|
assert edge.stake_amount('E/F', free_capital=100, total_capital=100,
|
||||||
|
capital_in_trade=0) == 50
|
||||||
|
|
||||||
|
edge._capital_ratio = 1
|
||||||
|
# Full capital is available
|
||||||
|
assert edge.stake_amount('E/F', free_capital=100, total_capital=100,
|
||||||
|
capital_in_trade=0) == 100
|
||||||
|
# Full capital is available
|
||||||
|
assert edge.stake_amount('E/F', free_capital=0, total_capital=100,
|
||||||
|
capital_in_trade=0) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_nonexisting_stake_amount(mocker, edge_conf):
|
def test_nonexisting_stake_amount(mocker, edge_conf):
|
||||||
|
@ -5,10 +5,12 @@ However, these tests should give a good idea to determine if a new exchange is
|
|||||||
suitable to run with freqtrade.
|
suitable to run with freqtrade.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_prev_date
|
||||||
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
|
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
|
||||||
from tests.conftest import get_default_conf
|
from tests.conftest import get_default_conf
|
||||||
|
|
||||||
@ -122,7 +124,10 @@ class TestCCXTExchange():
|
|||||||
assert len(ohlcv[pair_tf]) == len(exchange.klines(pair_tf))
|
assert len(ohlcv[pair_tf]) == len(exchange.klines(pair_tf))
|
||||||
# assert len(exchange.klines(pair_tf)) > 200
|
# assert len(exchange.klines(pair_tf)) > 200
|
||||||
# Assume 90% uptime ...
|
# Assume 90% uptime ...
|
||||||
assert len(exchange.klines(pair_tf)) > exchange._ohlcv_candle_limit * 0.90
|
assert len(exchange.klines(pair_tf)) > exchange.ohlcv_candle_limit(timeframe) * 0.90
|
||||||
|
# Check if last-timeframe is within the last 2 intervals
|
||||||
|
now = datetime.now(timezone.utc) - timedelta(minutes=(timeframe_to_minutes(timeframe) * 2))
|
||||||
|
assert exchange.klines(pair_tf).iloc[-1]['date'] >= timeframe_to_prev_date(timeframe, now)
|
||||||
|
|
||||||
# TODO: tests fetch_trades (?)
|
# TODO: tests fetch_trades (?)
|
||||||
|
|
||||||
|
@ -1417,7 +1417,7 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name):
|
|||||||
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
||||||
# one_call calculation * 1.8 should do 2 calls
|
# one_call calculation * 1.8 should do 2 calls
|
||||||
|
|
||||||
since = 5 * 60 * exchange._ft_has['ohlcv_candle_limit'] * 1.8
|
since = 5 * 60 * exchange.ohlcv_candle_limit('5m') * 1.8
|
||||||
ret = exchange.get_historic_ohlcv(pair, "5m", int((
|
ret = exchange.get_historic_ohlcv(pair, "5m", int((
|
||||||
arrow.utcnow().int_timestamp - since) * 1000))
|
arrow.utcnow().int_timestamp - since) * 1000))
|
||||||
|
|
||||||
@ -1473,7 +1473,7 @@ def test_get_historic_ohlcv_as_df(default_conf, mocker, exchange_name):
|
|||||||
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
||||||
# one_call calculation * 1.8 should do 2 calls
|
# one_call calculation * 1.8 should do 2 calls
|
||||||
|
|
||||||
since = 5 * 60 * exchange._ft_has['ohlcv_candle_limit'] * 1.8
|
since = 5 * 60 * exchange.ohlcv_candle_limit('5m') * 1.8
|
||||||
ret = exchange.get_historic_ohlcv_as_df(pair, "5m", int((
|
ret = exchange.get_historic_ohlcv_as_df(pair, "5m", int((
|
||||||
arrow.utcnow().int_timestamp - since) * 1000))
|
arrow.utcnow().int_timestamp - since) * 1000))
|
||||||
|
|
||||||
@ -2072,9 +2072,9 @@ def test_cancel_order_with_result_error(default_conf, mocker, exchange_name, cap
|
|||||||
def test_cancel_order(default_conf, mocker, exchange_name):
|
def test_cancel_order(default_conf, mocker, exchange_name):
|
||||||
default_conf['dry_run'] = False
|
default_conf['dry_run'] = False
|
||||||
api_mock = MagicMock()
|
api_mock = MagicMock()
|
||||||
api_mock.cancel_order = MagicMock(return_value=123)
|
api_mock.cancel_order = MagicMock(return_value={'id': '123'})
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
assert exchange.cancel_order(order_id='_', pair='TKN/BTC') == 123
|
assert exchange.cancel_order(order_id='_', pair='TKN/BTC') == {'id': '123'}
|
||||||
|
|
||||||
with pytest.raises(InvalidOrderException):
|
with pytest.raises(InvalidOrderException):
|
||||||
api_mock.cancel_order = MagicMock(side_effect=ccxt.InvalidOrder("Did not find order"))
|
api_mock.cancel_order = MagicMock(side_effect=ccxt.InvalidOrder("Did not find order"))
|
||||||
@ -2091,9 +2091,9 @@ def test_cancel_order(default_conf, mocker, exchange_name):
|
|||||||
def test_cancel_stoploss_order(default_conf, mocker, exchange_name):
|
def test_cancel_stoploss_order(default_conf, mocker, exchange_name):
|
||||||
default_conf['dry_run'] = False
|
default_conf['dry_run'] = False
|
||||||
api_mock = MagicMock()
|
api_mock = MagicMock()
|
||||||
api_mock.cancel_order = MagicMock(return_value=123)
|
api_mock.cancel_order = MagicMock(return_value={'id': '123'})
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
assert exchange.cancel_stoploss_order(order_id='_', pair='TKN/BTC') == 123
|
assert exchange.cancel_stoploss_order(order_id='_', pair='TKN/BTC') == {'id': '123'}
|
||||||
|
|
||||||
with pytest.raises(InvalidOrderException):
|
with pytest.raises(InvalidOrderException):
|
||||||
api_mock.cancel_order = MagicMock(side_effect=ccxt.InvalidOrder("Did not find order"))
|
api_mock.cancel_order = MagicMock(side_effect=ccxt.InvalidOrder("Did not find order"))
|
||||||
@ -2418,6 +2418,19 @@ def test_get_markets_error(default_conf, mocker):
|
|||||||
ex.get_markets('LTC', 'USDT', True, False)
|
ex.get_markets('LTC', 'USDT', True, False)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
|
def test_ohlcv_candle_limit(default_conf, mocker, exchange_name):
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
||||||
|
timeframes = ('1m', '5m', '1h')
|
||||||
|
expected = exchange._ft_has['ohlcv_candle_limit']
|
||||||
|
for timeframe in timeframes:
|
||||||
|
if 'ohlcv_candle_limit_per_timeframe' in exchange._ft_has:
|
||||||
|
expected = exchange._ft_has['ohlcv_candle_limit_per_timeframe'][timeframe]
|
||||||
|
# This should only run for bittrex
|
||||||
|
assert exchange_name == 'bittrex'
|
||||||
|
assert exchange.ohlcv_candle_limit(timeframe) == expected
|
||||||
|
|
||||||
|
|
||||||
def test_timeframe_to_minutes():
|
def test_timeframe_to_minutes():
|
||||||
assert timeframe_to_minutes("5m") == 5
|
assert timeframe_to_minutes("5m") == 5
|
||||||
assert timeframe_to_minutes("10m") == 10
|
assert timeframe_to_minutes("10m") == 10
|
||||||
@ -2462,6 +2475,9 @@ def test_timeframe_to_prev_date():
|
|||||||
|
|
||||||
date = datetime.now(tz=timezone.utc)
|
date = datetime.now(tz=timezone.utc)
|
||||||
assert timeframe_to_prev_date("5m") < date
|
assert timeframe_to_prev_date("5m") < date
|
||||||
|
# Does not round
|
||||||
|
time = datetime(2019, 8, 12, 13, 20, 0, tzinfo=timezone.utc)
|
||||||
|
assert timeframe_to_prev_date('5m', time) == time
|
||||||
|
|
||||||
|
|
||||||
def test_timeframe_to_next_date():
|
def test_timeframe_to_next_date():
|
||||||
|
@ -341,12 +341,14 @@ def test_backtesting_start(default_conf, mocker, testdatadir, caplog) -> None:
|
|||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.backtest')
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.backtest')
|
||||||
mocker.patch('freqtrade.optimize.backtesting.generate_backtest_stats')
|
mocker.patch('freqtrade.optimize.backtesting.generate_backtest_stats')
|
||||||
mocker.patch('freqtrade.optimize.backtesting.show_backtest_results')
|
mocker.patch('freqtrade.optimize.backtesting.show_backtest_results')
|
||||||
|
sbs = mocker.patch('freqtrade.optimize.backtesting.store_backtest_stats')
|
||||||
mocker.patch('freqtrade.plugins.pairlistmanager.PairListManager.whitelist',
|
mocker.patch('freqtrade.plugins.pairlistmanager.PairListManager.whitelist',
|
||||||
PropertyMock(return_value=['UNITTEST/BTC']))
|
PropertyMock(return_value=['UNITTEST/BTC']))
|
||||||
|
|
||||||
default_conf['timeframe'] = '1m'
|
default_conf['timeframe'] = '1m'
|
||||||
default_conf['datadir'] = testdatadir
|
default_conf['datadir'] = testdatadir
|
||||||
default_conf['export'] = None
|
default_conf['export'] = 'trades'
|
||||||
|
default_conf['exportfilename'] = 'export.txt'
|
||||||
default_conf['timerange'] = '-1510694220'
|
default_conf['timerange'] = '-1510694220'
|
||||||
|
|
||||||
backtesting = Backtesting(default_conf)
|
backtesting = Backtesting(default_conf)
|
||||||
@ -361,6 +363,7 @@ def test_backtesting_start(default_conf, mocker, testdatadir, caplog) -> None:
|
|||||||
assert log_has(line, caplog)
|
assert log_has(line, caplog)
|
||||||
assert backtesting.strategy.dp._pairlists is not None
|
assert backtesting.strategy.dp._pairlists is not None
|
||||||
assert backtesting.strategy.bot_loop_start.call_count == 1
|
assert backtesting.strategy.bot_loop_start.call_count == 1
|
||||||
|
assert sbs.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_backtesting_start_no_data(default_conf, mocker, caplog, testdatadir) -> None:
|
def test_backtesting_start_no_data(default_conf, mocker, caplog, testdatadir) -> None:
|
||||||
|
@ -83,7 +83,7 @@ def test_stoploss_guard(mocker, default_conf, fee, caplog):
|
|||||||
"method": "StoplossGuard",
|
"method": "StoplossGuard",
|
||||||
"lookback_period": 60,
|
"lookback_period": 60,
|
||||||
"stop_duration": 40,
|
"stop_duration": 40,
|
||||||
"trade_limit": 2
|
"trade_limit": 3
|
||||||
}]
|
}]
|
||||||
freqtrade = get_patched_freqtradebot(mocker, default_conf)
|
freqtrade = get_patched_freqtradebot(mocker, default_conf)
|
||||||
message = r"Trading stopped due to .*"
|
message = r"Trading stopped due to .*"
|
||||||
@ -136,7 +136,7 @@ def test_stoploss_guard_perpair(mocker, default_conf, fee, caplog, only_per_pair
|
|||||||
default_conf['protections'] = [{
|
default_conf['protections'] = [{
|
||||||
"method": "StoplossGuard",
|
"method": "StoplossGuard",
|
||||||
"lookback_period": 60,
|
"lookback_period": 60,
|
||||||
"trade_limit": 1,
|
"trade_limit": 2,
|
||||||
"stop_duration": 60,
|
"stop_duration": 60,
|
||||||
"only_per_pair": only_per_pair
|
"only_per_pair": only_per_pair
|
||||||
}]
|
}]
|
||||||
|
@ -11,9 +11,11 @@ import uvicorn
|
|||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi.exceptions import HTTPException
|
from fastapi.exceptions import HTTPException
|
||||||
from fastapi.testclient import TestClient
|
from fastapi.testclient import TestClient
|
||||||
|
from numpy import isnan
|
||||||
from requests.auth import _basic_auth_str
|
from requests.auth import _basic_auth_str
|
||||||
|
|
||||||
from freqtrade.__init__ import __version__
|
from freqtrade.__init__ import __version__
|
||||||
|
from freqtrade.exceptions import ExchangeError
|
||||||
from freqtrade.loggers import setup_logging, setup_logging_pre
|
from freqtrade.loggers import setup_logging, setup_logging_pre
|
||||||
from freqtrade.persistence import PairLocks, Trade
|
from freqtrade.persistence import PairLocks, Trade
|
||||||
from freqtrade.rpc import RPC
|
from freqtrade.rpc import RPC
|
||||||
@ -295,7 +297,7 @@ def test_api_run(default_conf, mocker, caplog):
|
|||||||
"Please make sure that this is intentional!", caplog)
|
"Please make sure that this is intentional!", caplog)
|
||||||
assert log_has_re("SECURITY WARNING - `jwt_secret_key` seems to be default.*", caplog)
|
assert log_has_re("SECURITY WARNING - `jwt_secret_key` seems to be default.*", caplog)
|
||||||
|
|
||||||
# Test crashing flask
|
# Test crashing API server
|
||||||
caplog.clear()
|
caplog.clear()
|
||||||
mocker.patch('freqtrade.rpc.api_server.webserver.UvicornServer',
|
mocker.patch('freqtrade.rpc.api_server.webserver.UvicornServer',
|
||||||
MagicMock(side_effect=Exception))
|
MagicMock(side_effect=Exception))
|
||||||
@ -789,6 +791,15 @@ def test_api_status(botclient, mocker, ticker, fee, markets):
|
|||||||
'exchange': 'bittrex',
|
'exchange': 'bittrex',
|
||||||
}]
|
}]
|
||||||
|
|
||||||
|
mocker.patch('freqtrade.freqtradebot.FreqtradeBot.get_sell_rate',
|
||||||
|
MagicMock(side_effect=ExchangeError("Pair 'ETH/BTC' not available")))
|
||||||
|
|
||||||
|
rc = client_get(client, f"{BASE_URI}/status")
|
||||||
|
assert_response(rc)
|
||||||
|
resp_values = rc.json()
|
||||||
|
assert len(resp_values) == 1
|
||||||
|
assert isnan(resp_values[0]['profit_abs'])
|
||||||
|
|
||||||
|
|
||||||
def test_api_version(botclient):
|
def test_api_version(botclient):
|
||||||
ftbot, client = botclient
|
ftbot, client = botclient
|
||||||
|
@ -519,7 +519,7 @@ def test_telegram_balance_handle(default_conf, update, mocker, rpc_balance, tick
|
|||||||
assert '*EUR:*' in result
|
assert '*EUR:*' in result
|
||||||
assert 'Balance:' in result
|
assert 'Balance:' in result
|
||||||
assert 'Est. BTC:' in result
|
assert 'Est. BTC:' in result
|
||||||
assert 'BTC: 12.00000000' in result
|
assert 'BTC: 12.00000000' in result
|
||||||
assert '*XRP:* not showing <1$ amount' in result
|
assert '*XRP:* not showing <1$ amount' in result
|
||||||
|
|
||||||
|
|
||||||
@ -1205,7 +1205,7 @@ def test_send_msg_buy_notification(default_conf, mocker, caplog) -> None:
|
|||||||
'*Amount:* `1333.33333333`\n' \
|
'*Amount:* `1333.33333333`\n' \
|
||||||
'*Open Rate:* `0.00001099`\n' \
|
'*Open Rate:* `0.00001099`\n' \
|
||||||
'*Current Rate:* `0.00001099`\n' \
|
'*Current Rate:* `0.00001099`\n' \
|
||||||
'*Total:* `(0.001000 BTC, 12.345 USD)`'
|
'*Total:* `(0.00100000 BTC, 12.345 USD)`'
|
||||||
|
|
||||||
freqtradebot.config['telegram']['notification_settings'] = {'buy': 'off'}
|
freqtradebot.config['telegram']['notification_settings'] = {'buy': 'off'}
|
||||||
caplog.clear()
|
caplog.clear()
|
||||||
@ -1389,7 +1389,7 @@ def test_send_msg_buy_notification_no_fiat(default_conf, mocker) -> None:
|
|||||||
'*Amount:* `1333.33333333`\n'
|
'*Amount:* `1333.33333333`\n'
|
||||||
'*Open Rate:* `0.00001099`\n'
|
'*Open Rate:* `0.00001099`\n'
|
||||||
'*Current Rate:* `0.00001099`\n'
|
'*Current Rate:* `0.00001099`\n'
|
||||||
'*Total:* `(0.001000 BTC)`')
|
'*Total:* `(0.00100000 BTC)`')
|
||||||
|
|
||||||
|
|
||||||
def test_send_msg_sell_notification_no_fiat(default_conf, mocker) -> None:
|
def test_send_msg_sell_notification_no_fiat(default_conf, mocker) -> None:
|
||||||
|
@ -743,18 +743,18 @@ def test_set_loggers_journald_importerror(mocker, import_fails):
|
|||||||
logger.handlers = orig_handlers
|
logger.handlers = orig_handlers
|
||||||
|
|
||||||
|
|
||||||
def test_set_logfile(default_conf, mocker):
|
def test_set_logfile(default_conf, mocker, tmpdir):
|
||||||
patched_configuration_load_config_file(mocker, default_conf)
|
patched_configuration_load_config_file(mocker, default_conf)
|
||||||
|
f = Path(tmpdir / "test_file.log")
|
||||||
|
assert not f.is_file()
|
||||||
arglist = [
|
arglist = [
|
||||||
'trade', '--logfile', 'test_file.log',
|
'trade', '--logfile', str(f),
|
||||||
]
|
]
|
||||||
args = Arguments(arglist).get_parsed_arg()
|
args = Arguments(arglist).get_parsed_arg()
|
||||||
configuration = Configuration(args)
|
configuration = Configuration(args)
|
||||||
validated_conf = configuration.load_config()
|
validated_conf = configuration.load_config()
|
||||||
|
|
||||||
assert validated_conf['logfile'] == "test_file.log"
|
assert validated_conf['logfile'] == str(f)
|
||||||
f = Path("test_file.log")
|
|
||||||
assert f.is_file()
|
assert f.is_file()
|
||||||
try:
|
try:
|
||||||
f.unlink()
|
f.unlink()
|
||||||
|
@ -6,9 +6,31 @@ from unittest.mock import MagicMock
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from freqtrade.misc import (file_dump_json, file_load_json, format_ms_time, pair_to_filename,
|
from freqtrade.misc import (decimals_per_coin, file_dump_json, file_load_json, format_ms_time,
|
||||||
plural, render_template, render_template_with_fallback,
|
pair_to_filename, plural, render_template,
|
||||||
safe_value_fallback, safe_value_fallback2, shorten_date)
|
render_template_with_fallback, round_coin_value, safe_value_fallback,
|
||||||
|
safe_value_fallback2, shorten_date)
|
||||||
|
|
||||||
|
|
||||||
|
def test_decimals_per_coin():
|
||||||
|
assert decimals_per_coin('USDT') == 3
|
||||||
|
assert decimals_per_coin('EUR') == 3
|
||||||
|
assert decimals_per_coin('BTC') == 8
|
||||||
|
assert decimals_per_coin('ETH') == 5
|
||||||
|
|
||||||
|
|
||||||
|
def test_round_coin_value():
|
||||||
|
assert round_coin_value(222.222222, 'USDT') == '222.222 USDT'
|
||||||
|
assert round_coin_value(222.2, 'USDT') == '222.200 USDT'
|
||||||
|
assert round_coin_value(222.12745, 'EUR') == '222.127 EUR'
|
||||||
|
assert round_coin_value(0.1274512123, 'BTC') == '0.12745121 BTC'
|
||||||
|
assert round_coin_value(0.1274512123, 'ETH') == '0.12745 ETH'
|
||||||
|
|
||||||
|
assert round_coin_value(222.222222, 'USDT', False) == '222.222'
|
||||||
|
assert round_coin_value(222.2, 'USDT', False) == '222.200'
|
||||||
|
assert round_coin_value(222.12745, 'EUR', False) == '222.127'
|
||||||
|
assert round_coin_value(0.1274512123, 'BTC', False) == '0.12745121'
|
||||||
|
assert round_coin_value(0.1274512123, 'ETH', False) == '0.12745'
|
||||||
|
|
||||||
|
|
||||||
def test_shorten_date() -> None:
|
def test_shorten_date() -> None:
|
||||||
|
@ -1074,7 +1074,7 @@ def test_get_best_pair(fee):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("init_persistence")
|
@pytest.mark.usefixtures("init_persistence")
|
||||||
def test_update_order_from_ccxt():
|
def test_update_order_from_ccxt(caplog):
|
||||||
# Most basic order return (only has orderid)
|
# Most basic order return (only has orderid)
|
||||||
o = Order.parse_from_ccxt_object({'id': '1234'}, 'ETH/BTC', 'buy')
|
o = Order.parse_from_ccxt_object({'id': '1234'}, 'ETH/BTC', 'buy')
|
||||||
assert isinstance(o, Order)
|
assert isinstance(o, Order)
|
||||||
@ -1120,6 +1120,14 @@ def test_update_order_from_ccxt():
|
|||||||
with pytest.raises(DependencyException, match=r"Order-id's don't match"):
|
with pytest.raises(DependencyException, match=r"Order-id's don't match"):
|
||||||
o.update_from_ccxt_object(ccxt_order)
|
o.update_from_ccxt_object(ccxt_order)
|
||||||
|
|
||||||
|
message = "aaaa is not a valid response object."
|
||||||
|
assert not log_has(message, caplog)
|
||||||
|
Order.update_orders([o], 'aaaa')
|
||||||
|
assert log_has(message, caplog)
|
||||||
|
|
||||||
|
# Call regular update - shouldn't fail.
|
||||||
|
Order.update_orders([o], {'id': '1234'})
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("init_persistence")
|
@pytest.mark.usefixtures("init_persistence")
|
||||||
def test_select_order(fee):
|
def test_select_order(fee):
|
||||||
|
Loading…
Reference in New Issue
Block a user