Merge branch 'develop' into spice-rack

This commit is contained in:
robcaulk 2022-09-25 11:37:38 +02:00
commit aca03e38f6
148 changed files with 3787 additions and 930 deletions

View File

@ -272,6 +272,16 @@ jobs:
pip install pyaml
python build_helpers/pre_commit_update.py
pre-commit:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
- uses: pre-commit/action@v3.0.0
docs_check:
runs-on: ubuntu-20.04
steps:
@ -302,7 +312,7 @@ jobs:
# Notify only once - when CI completes (and after deploy) in case it's successfull
notify-complete:
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check ]
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check, pre-commit ]
runs-on: ubuntu-20.04
# Discord notification can't handle schedule events
if: (github.event_name != 'schedule')
@ -327,7 +337,7 @@ jobs:
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
deploy:
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check ]
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check, pre-commit ]
runs-on: ubuntu-20.04
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'

View File

@ -77,7 +77,8 @@
"indicator_periods_candles": [
10,
20
]
],
"plot_feature_importance": false
},
"data_split_parameters": {
"test_size": 0.33,

View File

@ -172,7 +172,24 @@
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "freqtrader",
"password": "SuperSecurePassword"
"password": "SuperSecurePassword",
"ws_token": "secret_ws_t0ken."
},
"external_message_consumer": {
"enabled": false,
"producers": [
{
"name": "default",
"host": "127.0.0.2",
"port": 8080,
"ws_token": "secret_ws_t0ken."
}
],
"wait_timeout": 300,
"ping_timeout": 10,
"sleep_time": 10,
"remove_entry_exit_signals": false,
"message_size_limit": 8
},
"bot_name": "freqtrade",
"db_url": "sqlite:///tradesv3.sqlite",

View File

@ -6,4 +6,3 @@ FROM ${sourceimage}:${sourcetag}
COPY requirements-freqai.txt /freqtrade/
RUN pip install -r requirements-freqai.txt --user --no-cache-dir

View File

@ -17,6 +17,7 @@ from typing import Any, Dict
from pandas import DataFrame
from freqtrade.constants import Config
from freqtrade.optimize.hyperopt import IHyperOptLoss
TARGET_TRADES = 600
@ -31,7 +32,7 @@ class SuperDuperHyperOptLoss(IHyperOptLoss):
@staticmethod
def hyperopt_loss_function(results: DataFrame, trade_count: int,
min_date: datetime, max_date: datetime,
config: Dict, processed: Dict[str, DataFrame],
config: Config, processed: Dict[str, DataFrame],
backtest_stats: Dict[str, Any],
*args, **kwargs) -> float:
"""

View File

@ -225,14 +225,16 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `webhook.webhookexitcancel` | Payload to send on exit order cancel. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
| `webhook.webhookexitfill` | Payload to send on exit order filled. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
| `webhook.webhookstatus` | Payload to send on status calls. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
| | **Rest API / FreqUI**
| | **Rest API / FreqUI / Producer-Consumer**
| `api_server.enabled` | Enable usage of API Server. See the [API Server documentation](rest-api.md) for more details. <br> **Datatype:** Boolean
| `api_server.listen_ip_address` | Bind IP address. See the [API Server documentation](rest-api.md) for more details. <br> **Datatype:** IPv4
| `api_server.listen_port` | Bind Port. See the [API Server documentation](rest-api.md) for more details. <br>**Datatype:** Integer between 1024 and 65535
| `api_server.verbosity` | Logging verbosity. `info` will print all RPC Calls, while "error" will only display errors. <br>**Datatype:** Enum, either `info` or `error`. Defaults to `info`.
| `api_server.username` | Username for API server. See the [API Server documentation](rest-api.md) for more details. <br>**Keep it in secret, do not disclose publicly.**<br> **Datatype:** String
| `api_server.password` | Password for API server. See the [API Server documentation](rest-api.md) for more details. <br>**Keep it in secret, do not disclose publicly.**<br> **Datatype:** String
| `api_server.ws_token` | API token for the Message WebSocket. See the [API Server documentation](rest-api.md) for more details. <br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
| `bot_name` | Name of the bot. Passed via API to a client - can be shown to distinguish / name bots.<br> *Defaults to `freqtrade`*<br> **Datatype:** String
| `external_message_consumer` | Enable [Producer/Consumer mode](producer-consumer.md) for more details. <br> **Datatype:** Dict
| | **Other**
| `initial_state` | Defines the initial application state. If set to stopped, then the bot has to be explicitly started via `/start` RPC command. <br>*Defaults to `stopped`.* <br> **Datatype:** Enum, either `stopped` or `running`
| `force_entry_enable` | Enables the RPC Commands to force a Trade entry. More information below. <br> **Datatype:** Boolean
@ -659,17 +661,7 @@ You should also make sure to read the [Exchanges](exchanges.md) section of the d
### Using proxy with Freqtrade
To use a proxy with freqtrade, add the kwarg `"aiohttp_trust_env"=true` to the `"ccxt_async_kwargs"` dict in the exchange section of the configuration.
An example for this can be found in `config_examples/config_full.example.json`
``` json
"ccxt_async_config": {
"aiohttp_trust_env": true
}
```
Then, export your proxy settings using the variables `"HTTP_PROXY"` and `"HTTPS_PROXY"` set to the appropriate values
To use a proxy with freqtrade, export your proxy settings using the variables `"HTTP_PROXY"` and `"HTTPS_PROXY"` set to the appropriate values.
``` bash
export HTTP_PROXY="http://addr:port"
@ -677,6 +669,20 @@ export HTTPS_PROXY="http://addr:port"
freqtrade
```
#### Proxy just exchange requests
To use a proxy just for exchange connections (skips/ignores telegram and coingecko) - you can also define the proxies as part of the ccxt configuration.
``` json
"ccxt_config": {
"aiohttp_proxy": "http://addr:port",
"proxies": {
"http": "http://addr:port",
"https": "http://addr:port"
},
}
```
## Next step
Now you have configured your config.json, the next step is to [start your bot](bot-usage.md).

View File

@ -26,7 +26,7 @@ usage: freqtrade download-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[--timerange TIMERANGE] [--dl-trades]
[--exchange EXCHANGE]
[-t TIMEFRAMES [TIMEFRAMES ...]] [--erase]
[--data-format-ohlcv {json,jsongz,hdf5}]
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
[--data-format-trades {json,jsongz,hdf5}]
[--trading-mode {spot,margin,futures}]
[--prepend]
@ -55,7 +55,7 @@ optional arguments:
list. Default: `1m 5m`.
--erase Clean all existing data for the selected
exchange/pairs/timeframes.
--data-format-ohlcv {json,jsongz,hdf5}
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
Storage format for downloaded candle (OHLCV) data.
(default: `json`).
--data-format-trades {json,jsongz,hdf5}
@ -76,7 +76,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@ -179,9 +179,11 @@ freqtrade download-data --exchange binance --pairs ETH/USDT XRP/USDT BTC/USDT --
Freqtrade currently supports 3 data-formats for both OHLCV and trades data:
* `json` (plain "text" json files)
* `jsongz` (a gzip-zipped version of json files)
* `hdf5` (a high performance datastore)
* `json` - plain "text" json files
* `jsongz` - a gzip-zipped version of json files
* `hdf5` - a high performance datastore
* `feather` - a dataformat based on Apache Arrow
* `parquet` - columnar datastore
By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data.
@ -200,38 +202,74 @@ If the default data-format has been changed during download, then the keys `data
!!! Note
You can convert between data-formats using the [convert-data](#sub-command-convert-data) and [convert-trade-data](#sub-command-convert-trade-data) methods.
#### Dataformat comparison
The following comparisons have been made with the following data, and by using the linux `time` command.
```
Found 6 pair / timeframe combinations.
+----------+-------------+--------+---------------------+---------------------+
| Pair | Timeframe | Type | From | To |
|----------+-------------+--------+---------------------+---------------------|
| BTC/USDT | 5m | spot | 2017-08-17 04:00:00 | 2022-09-13 19:25:00 |
| ETH/USDT | 1m | spot | 2017-08-17 04:00:00 | 2022-09-13 19:26:00 |
| BTC/USDT | 1m | spot | 2017-08-17 04:00:00 | 2022-09-13 19:30:00 |
| XRP/USDT | 5m | spot | 2018-05-04 08:10:00 | 2022-09-13 19:15:00 |
| XRP/USDT | 1m | spot | 2018-05-04 08:11:00 | 2022-09-13 19:22:00 |
| ETH/USDT | 5m | spot | 2017-08-17 04:00:00 | 2022-09-13 19:20:00 |
+----------+-------------+--------+---------------------+---------------------+
```
Timings have been taken in a not very scientific way with the following command, which forces reading the data into memory.
``` bash
time freqtrade list-data --show-timerange --data-format-ohlcv <dataformat>
```
| Format | Size | timing |
|------------|-------------|-------------|
| `json` | 149Mb | 25.6s |
| `jsongz` | 39Mb | 27s |
| `hdf5` | 145Mb | 3.9s |
| `feather` | 72Mb | 3.5s |
| `parquet` | 83Mb | 3.8s |
Size has been taken from the BTC/USDT 1m spot combination for the timerange specified above.
To have a best performance/size mix, we recommend the use of either feather or parquet.
#### Sub-command convert data
```
usage: freqtrade convert-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH]
[-p PAIRS [PAIRS ...]] --format-from
{json,jsongz,hdf5} --format-to
{json,jsongz,hdf5} [--erase]
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...]]
{json,jsongz,hdf5,feather,parquet} --format-to
{json,jsongz,hdf5,feather,parquet} [--erase]
[--exchange EXCHANGE]
[-t TIMEFRAMES [TIMEFRAMES ...]]
[--trading-mode {spot,margin,futures}]
[--candle-types {spot,,futures,mark,index,premiumIndex,funding_rate} [{spot,,futures,mark,index,premiumIndex,funding_rate} ...]]
[--candle-types {spot,futures,mark,index,premiumIndex,funding_rate} [{spot,futures,mark,index,premiumIndex,funding_rate} ...]]
optional arguments:
-h, --help show this help message and exit
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space-
separated.
--format-from {json,jsongz,hdf5}
--format-from {json,jsongz,hdf5,feather,parquet}
Source format for data conversion.
--format-to {json,jsongz,hdf5}
--format-to {json,jsongz,hdf5,feather,parquet}
Destination format for data conversion.
--erase Clean all existing data for the selected
exchange/pairs/timeframes.
-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...], --timeframes {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...]
Specify which tickers to download. Space-separated
list. Default: `1m 5m`.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
--trading-mode {spot,margin,futures}
-t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...]
Specify which tickers to download. Space-separated
list. Default: `1m 5m`.
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
Select Trading mode
--candle-types {spot,,futures,mark,index,premiumIndex,funding_rate} [{spot,,futures,mark,index,premiumIndex,funding_rate} ...]
--candle-types {spot,futures,mark,index,premiumIndex,funding_rate} [{spot,futures,mark,index,premiumIndex,funding_rate} ...]
Select candle type to use
Common arguments:
@ -245,7 +283,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@ -267,20 +305,24 @@ freqtrade convert-data --format-from json --format-to jsongz --datadir ~/.freqtr
usage: freqtrade convert-trade-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH]
[-p PAIRS [PAIRS ...]] --format-from
{json,jsongz,hdf5} --format-to
{json,jsongz,hdf5} [--erase]
{json,jsongz,hdf5,feather,parquet}
--format-to
{json,jsongz,hdf5,feather,parquet}
[--erase] [--exchange EXCHANGE]
optional arguments:
-h, --help show this help message and exit
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Show profits for only these pairs. Pairs are space-
Limit command to these pairs. Pairs are space-
separated.
--format-from {json,jsongz,hdf5}
--format-from {json,jsongz,hdf5,feather,parquet}
Source format for data conversion.
--format-to {json,jsongz,hdf5}
--format-to {json,jsongz,hdf5,feather,parquet}
Destination format for data conversion.
--erase Clean all existing data for the selected
exchange/pairs/timeframes.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
@ -293,7 +335,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@ -318,9 +360,9 @@ This command will allow you to repeat this last step for additional timeframes w
usage: freqtrade trades-to-ohlcv [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH]
[-p PAIRS [PAIRS ...]]
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...]]
[-t TIMEFRAMES [TIMEFRAMES ...]]
[--exchange EXCHANGE]
[--data-format-ohlcv {json,jsongz,hdf5}]
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
[--data-format-trades {json,jsongz,hdf5}]
optional arguments:
@ -328,12 +370,12 @@ optional arguments:
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space-
separated.
-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...], --timeframes {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...]
-t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...]
Specify which tickers to download. Space-separated
list. Default: `1m 5m`.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
--data-format-ohlcv {json,jsongz,hdf5}
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
Storage format for downloaded candle (OHLCV) data.
(default: `json`).
--data-format-trades {json,jsongz,hdf5}
@ -351,7 +393,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@ -371,7 +413,7 @@ You can get a list of downloaded data using the `list-data` sub-command.
```
usage: freqtrade list-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH]
[--userdir PATH] [--exchange EXCHANGE]
[--data-format-ohlcv {json,jsongz,hdf5}]
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
[-p PAIRS [PAIRS ...]]
[--trading-mode {spot,margin,futures}]
[--show-timerange]
@ -380,13 +422,13 @@ optional arguments:
-h, --help show this help message and exit
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
--data-format-ohlcv {json,jsongz,hdf5}
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
Storage format for downloaded candle (OHLCV) data.
(default: `json`).
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space-
separated.
--trading-mode {spot,margin,futures}
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
Select Trading mode
--show-timerange Show timerange available for available data. (May take
a while to calculate).
@ -402,7 +444,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.

View File

@ -233,7 +233,7 @@ OKX requires a passphrase for each api key, you will therefore need to add this
!!! Warning "Futures"
OKX Futures has the concept of "position mode" - which can be Net or long/short (hedge mode).
Freqtrade supports both modes - but changing the mode mid-trading is not supported and will lead to exceptions and failures to place trades.
Freqtrade supports both modes (we recommend to use net mode) - but changing the mode mid-trading is not supported and will lead to exceptions and failures to place trades.
OKX also only provides MARK candles for the past ~3 months. Backtesting futures prior to that date will therefore lead to slight deviations, as funding-fees cannot be calculated correctly without this data.
## Gate.io

View File

@ -109,7 +109,8 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `indicator_max_period_candles` | **No longer used**. User must use the strategy set `startup_candle_count` which defines the maximum *period* used in `populate_any_indicators()` for indicator creation (timeframe independent). FreqAI uses this information in combination with the maximum timeframe to calculate how many data points it should download so that the first data point does not have a NaN <br> **Datatype:** positive integer.
| `indicator_periods_candles` | Calculate indicators for `indicator_periods_candles` time periods and add them to the feature set. <br> **Datatype:** List of positive integers.
| `stratify_training_data` | This value is used to indicate the grouping of the data. For example, 2 would set every 2nd data point into a separate dataset to be pulled from during training/testing. See details about how it works [here](#stratifying-the-data-for-training-and-testing-the-model) <br> **Datatype:** Positive integer.
| `principal_component_analysis` | Automatically reduce the dimensionality of the data set using Principal Component Analysis. See details about how it works [here](#reducing-data-dimensionality-with-principal-component-analysis) <br> **Datatype:** Boolean.
| `principal_component_analysis` | Automatically reduce the dimensionality of the data set using Principal Component Analysis. See details about how it works [here](#reducing-data-dimensionality-with-principal-component-analysis)
| `plot_feature_importance` | Create an interactive feature importance plot for each model.<br> **Datatype:** Boolean.<br> **Datatype:** Boolean, defaults to `False`
| `DI_threshold` | Activates the Dissimilarity Index for outlier detection when > 0. See details about how it works [here](#removing-outliers-with-the-dissimilarity-index). <br> **Datatype:** Positive float (typically < 1).
| `use_SVM_to_remove_outliers` | Train a support vector machine to detect and remove outliers from the training data set, as well as from incoming data points. See details about how it works [here](#removing-outliers-using-a-support-vector-machine-svm). <br> **Datatype:** Boolean.
| `svm_params` | All parameters available in Sklearn's `SGDOneClassSVM()`. See details about some select parameters [here](#removing-outliers-using-a-support-vector-machine-svm). <br> **Datatype:** Dictionary.

163
docs/producer-consumer.md Normal file
View File

@ -0,0 +1,163 @@
# Producer / Consumer mode
freqtrade provides a mechanism whereby an instance (also called `consumer`) may listen to messages from an upstream freqtrade instance (also called `producer`) using the message websocket. Mainly, `analyzed_df` and `whitelist` messages. This allows the reuse of computed indicators (and signals) for pairs in multiple bots without needing to compute them multiple times.
See [Message Websocket](rest-api.md#message-websocket) in the Rest API docs for setting up the `api_server` configuration for your message websocket (this will be your producer).
!!! Note
We strongly recommend to set `ws_token` to something random and known only to yourself to avoid unauthorized access to your bot.
## Configuration
Enable subscribing to an instance by adding the `external_message_consumer` section to the consumer's config file.
```json
{
//...
"external_message_consumer": {
"enabled": true,
"producers": [
{
"name": "default", // This can be any name you'd like, default is "default"
"host": "127.0.0.1", // The host from your producer's api_server config
"port": 8080, // The port from your producer's api_server config
"ws_token": "sercet_Ws_t0ken" // The ws_token from your producer's api_server config
}
],
// The following configurations are optional, and usually not required
// "wait_timeout": 300,
// "ping_timeout": 10,
// "sleep_time": 10,
// "remove_entry_exit_signals": false,
// "message_size_limit": 8
}
//...
}
```
| Parameter | Description |
|------------|-------------|
| `enabled` | **Required.** Enable consumer mode. If set to false, all other settings in this section are ignored.<br>*Defaults to `false`.*<br> **Datatype:** boolean .
| `producers` | **Required.** List of producers <br> **Datatype:** Array.
| `producers.name` | **Required.** Name of this producer. This name must be used in calls to `get_producer_pairs()` and `get_producer_df()` if more than one producer is used.<br> **Datatype:** string
| `producers.host` | **Required.** The hostname or IP address from your producer.<br> **Datatype:** string
| `producers.port` | **Required.** The port matching the above host.<br> **Datatype:** string
| `producers.ws_token` | **Required.** `ws_token` as configured on the producer.<br> **Datatype:** string
| | **Optional settings**
| `wait_timeout` | Timeout until we ping again if no message is received. <br>*Defaults to `300`.*<br> **Datatype:** Integer - in seconds.
| `wait_timeout` | Ping timeout <br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `sleep_time` | Sleep time before retrying to connect.<br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `remove_entry_exit_signals` | Remove signal columns from the dataframe (set them to 0) on dataframe receipt.<br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `message_size_limit` | Size limit per message<br>*Defaults to `8`.*<br> **Datatype:** Integer - Megabytes.
Instead of (or as well as) calculating indicators in `populate_indicators()` the follower instance listens on the connection to a producer instance's messages (or multiple producer instances in advanced configurations) and requests the producer's most recently analyzed dataframes for each pair in the active whitelist.
A consumer instance will then have a full copy of the analyzed dataframes without the need to calculate them itself.
## Examples
### Example - Producer Strategy
A simple strategy with multiple indicators. No special considerations are required in the strategy itself.
```py
class ProducerStrategy(IStrategy):
#...
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
"""
Calculate indicators in the standard freqtrade way which can then be broadcast to other instances
"""
dataframe['rsi'] = ta.RSI(dataframe)
bollinger = qtpylib.bollinger_bands(qtpylib.typical_price(dataframe), window=20, stds=2)
dataframe['bb_lowerband'] = bollinger['lower']
dataframe['bb_middleband'] = bollinger['mid']
dataframe['bb_upperband'] = bollinger['upper']
dataframe['tema'] = ta.TEMA(dataframe, timeperiod=9)
return dataframe
def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
"""
Populates the entry signal for the given dataframe
"""
dataframe.loc[
(
(qtpylib.crossed_above(dataframe['rsi'], self.buy_rsi.value)) &
(dataframe['tema'] <= dataframe['bb_middleband']) &
(dataframe['tema'] > dataframe['tema'].shift(1)) &
(dataframe['volume'] > 0)
),
'enter_long'] = 1
return dataframe
```
!!! Tip "FreqAI"
You can use this to setup [FreqAI](freqai.md) on a powerful machine, while you run consumers on simple machines like raspberries, which can interpret the signals generated from the producer in different ways.
### Example - Consumer Strategy
A logically equivalent strategy which calculates no indicators itself, but will have the same analyzed dataframes available to make trading decisions based on the indicators calculated in the producer. In this example the consumer has the same entry criteria, however this is not necessary. The consumer may use different logic to enter/exit trades, and only use the indicators as specified.
```py
class ConsumerStrategy(IStrategy):
#...
process_only_new_candles = False # required for consumers
_columns_to_expect = ['rsi_default', 'tema_default', 'bb_middleband_default']
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
"""
Use the websocket api to get pre-populated indicators from another freqtrade instance.
Use `self.dp.get_producer_df(pair)` to get the dataframe
"""
pair = metadata['pair']
timeframe = self.timeframe
producer_pairs = self.dp.get_producer_pairs()
# You can specify which producer to get pairs from via:
# self.dp.get_producer_pairs("my_other_producer")
# This func returns the analyzed dataframe, and when it was analyzed
producer_dataframe, _ = self.dp.get_producer_df(pair)
# You can get other data if the producer makes it available:
# self.dp.get_producer_df(
# pair,
# timeframe="1h",
# candle_type=CandleType.SPOT,
# producer_name="my_other_producer"
# )
if not producer_dataframe.empty:
# If you plan on passing the producer's entry/exit signal directly,
# specify ffill=False or it will have unintended results
merged_dataframe = merge_informative_pair(dataframe, producer_dataframe,
timeframe, timeframe,
append_timeframe=False,
suffix="default")
return merged_dataframe
else:
dataframe[self._columns_to_expect] = 0
return dataframe
def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
"""
Populates the entry signal for the given dataframe
"""
# Use the dataframe columns as if we calculated them ourselves
dataframe.loc[
(
(qtpylib.crossed_above(dataframe['rsi_default'], self.buy_rsi.value)) &
(dataframe['tema_default'] <= dataframe['bb_middleband_default']) &
(dataframe['tema_default'] > dataframe['tema_default'].shift(1)) &
(dataframe['volume'] > 0)
),
'enter_long'] = 1
return dataframe
```
!!! Tip "Using upstream signals"
By setting `remove_entry_exit_signals=false`, you can also use the producer's signals directly. They should be available as `enter_long_default` (assuming `suffix="default"` was used) - and can be used as either signal directly, or as additional indicator.

View File

@ -1,6 +1,6 @@
markdown==3.3.7
mkdocs==1.3.1
mkdocs-material==8.4.3
mkdocs-material==8.5.2
mdx_truly_sane_lists==1.3
pymdown-extensions==9.5
jinja2==3.1.2

View File

@ -31,7 +31,8 @@ Sample configuration:
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "Freqtrader",
"password": "SuperSecret1!"
"password": "SuperSecret1!",
"ws_token": "sercet_Ws_t0ken"
},
```
@ -93,7 +94,6 @@ Make sure that the following 2 lines are available in your docker-compose file:
!!! Danger "Security warning"
By using `8080:8080` in the docker port mapping, the API will be available to everyone connecting to the server under the correct port, so others may be able to control your bot.
## Rest API
### Consuming the API
@ -322,6 +322,73 @@ whitelist
```
### Message WebSocket
The API Server includes a websocket endpoint for subscribing to RPC messages from the freqtrade Bot.
This can be used to consume real-time data from your bot, such as entry/exit fill messages, whitelist changes, populated indicators for pairs, and more.
This is also used to setup [Producer/Consumer mode](producer-consumer.md) in Freqtrade.
Assuming your rest API is set to `127.0.0.1` on port `8080`, the endpoint is available at `http://localhost:8080/api/v1/message/ws`.
To access the websocket endpoint, the `ws_token` is required as a query parameter in the endpoint URL.
To generate a safe `ws_token` you can run the following code:
``` python
>>> import secrets
>>> secrets.token_urlsafe(25)
'hZ-y58LXyX_HZ8O1cJzVyN6ePWrLpNQv4Q'
```
You would then add that token under `ws_token` in your `api_server` config. Like so:
``` json
"api_server": {
"enabled": true,
"listen_ip_address": "127.0.0.1",
"listen_port": 8080,
"verbosity": "error",
"enable_openapi": false,
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "Freqtrader",
"password": "SuperSecret1!",
"ws_token": "hZ-y58LXyX_HZ8O1cJzVyN6ePWrLpNQv4Q" // <-----
},
```
You can now connect to the endpoint at `http://localhost:8080/api/v1/message/ws?token=hZ-y58LXyX_HZ8O1cJzVyN6ePWrLpNQv4Q`.
!!! Danger "Reuse of example tokens"
Please do not use the above example token. To make sure you are secure, generate a completely new token.
#### Using the WebSocket
Once connected to the WebSocket, the bot will broadcast RPC messages to anyone who is subscribed to them. To subscribe to a list of messages, you must send a JSON request through the WebSocket like the one below. The `data` key must be a list of message type strings.
``` json
{
"type": "subscribe",
"data": ["whitelist", "analyzed_df"] // A list of string message types
}
```
For a list of message types, please refer to the RPCMessageType enum in `freqtrade/enums/rpcmessagetype.py`
Now anytime those types of RPC messages are sent in the bot, you will receive them through the WebSocket as long as the connection is active. They typically take the same form as the request:
``` json
{
"type": "analyzed_df",
"data": {
"key": ["NEO/BTC", "5m", "spot"],
"df": {}, // The dataframe
"la": "2022-09-08 22:14:41.457786+00:00"
}
}
```
### OpenAPI interface
To enable the builtin openAPI interface (Swagger UI), specify `"enable_openapi": true` in the api_server configuration.

View File

@ -106,6 +106,12 @@ def custom_exit(self, pair: str, trade: Trade, current_time: datetime, current_r
!!! Note
`enter_tag` is limited to 100 characters, remaining data will be truncated.
!!! Warning
There is only one `enter_tag` column, which is used for both long and short trades.
As a consequence, this column must be treated as "last write wins" (it's just a dataframe column after all).
In fancy situations, where multiple signals collide (or if signals are deactivated again based on different conditions), this can lead to odd results with the wrong tag applied to an entry signal.
These results are a consequence of the strategy overwriting prior tags - where the last tag will "stick" and will be the one freqtrade will use.
## Exit tag
Similar to [Buy Tagging](#buy-tag), you can also specify a sell tag.

View File

@ -82,6 +82,8 @@ Example configuration showing the different settings:
"warning": "on",
"startup": "off",
"entry": "silent",
"entry_fill": "on",
"entry_cancel": "silent",
"exit": {
"roi": "silent",
"emergency_exit": "on",
@ -93,9 +95,7 @@ Example configuration showing the different settings:
"custom_exit": "silent",
"partial_exit": "on"
},
"entry_cancel": "silent",
"exit_cancel": "on",
"entry_fill": "off",
"exit_fill": "off",
"protection_trigger": "off",
"protection_trigger_global": "on",

View File

@ -34,6 +34,7 @@ dependencies:
- schedule
- python-dateutil
- joblib
- pyarrow
# ============================

View File

@ -62,9 +62,9 @@ ARGS_BUILD_CONFIG = ["config"]
ARGS_BUILD_STRATEGY = ["user_data_dir", "strategy", "template"]
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase"]
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase", "exchange"]
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes", "exchange", "trading_mode",
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes", "trading_mode",
"candle_types"]
ARGS_CONVERT_TRADES = ["pairs", "timeframes", "exchange", "dataformat_ohlcv", "dataformat_trades"]

View File

@ -211,6 +211,7 @@ def ask_user_config() -> Dict[str, Any]:
)
# Force JWT token to be a random string
answers['api_server_jwt_key'] = secrets.token_hex()
answers['api_server_ws_token'] = secrets.token_urlsafe(25)
return answers

View File

@ -69,7 +69,7 @@ AVAILABLE_CLI_OPTIONS = {
metavar='PATH',
),
"datadir": Arg(
'-d', '--datadir',
'-d', '--datadir', '--data-dir',
help='Path to directory with historical backtesting data.',
metavar='PATH',
),
@ -440,7 +440,7 @@ AVAILABLE_CLI_OPTIONS = {
"dataformat_trades": Arg(
'--data-format-trades',
help='Storage format for downloaded trades data. (default: `jsongz`).',
choices=constants.AVAILABLE_DATAHANDLERS,
choices=constants.AVAILABLE_DATAHANDLERS_TRADES,
),
"show_timerange": Arg(
'--show-timerange',

View File

@ -36,24 +36,24 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
"""
fallback = 'full'
indicators = render_template_with_fallback(
templatefile=f"subtemplates/indicators_{subtemplate}.j2",
templatefallbackfile=f"subtemplates/indicators_{fallback}.j2",
templatefile=f"strategy_subtemplates/indicators_{subtemplate}.j2",
templatefallbackfile=f"strategy_subtemplates/indicators_{fallback}.j2",
)
buy_trend = render_template_with_fallback(
templatefile=f"subtemplates/buy_trend_{subtemplate}.j2",
templatefallbackfile=f"subtemplates/buy_trend_{fallback}.j2",
templatefile=f"strategy_subtemplates/buy_trend_{subtemplate}.j2",
templatefallbackfile=f"strategy_subtemplates/buy_trend_{fallback}.j2",
)
sell_trend = render_template_with_fallback(
templatefile=f"subtemplates/sell_trend_{subtemplate}.j2",
templatefallbackfile=f"subtemplates/sell_trend_{fallback}.j2",
templatefile=f"strategy_subtemplates/sell_trend_{subtemplate}.j2",
templatefallbackfile=f"strategy_subtemplates/sell_trend_{fallback}.j2",
)
plot_config = render_template_with_fallback(
templatefile=f"subtemplates/plot_config_{subtemplate}.j2",
templatefallbackfile=f"subtemplates/plot_config_{fallback}.j2",
templatefile=f"strategy_subtemplates/plot_config_{subtemplate}.j2",
templatefallbackfile=f"strategy_subtemplates/plot_config_{fallback}.j2",
)
additional_methods = render_template_with_fallback(
templatefile=f"subtemplates/strategy_methods_{subtemplate}.j2",
templatefallbackfile="subtemplates/strategy_methods_empty.j2",
templatefile=f"strategy_subtemplates/strategy_methods_{subtemplate}.j2",
templatefallbackfile="strategy_subtemplates/strategy_methods_empty.j2",
)
strategy_text = render_template(templatefile='base_strategy.py.j2',

View File

@ -1,6 +1,6 @@
import logging
from typing import Any, Dict
from freqtrade.constants import Config
from freqtrade.enums import RunMode
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import (available_exchanges, is_exchange_known_ccxt,
@ -10,7 +10,7 @@ from freqtrade.exchange import (available_exchanges, is_exchange_known_ccxt,
logger = logging.getLogger(__name__)
def check_exchange(config: Dict[str, Any], check_for_bad: bool = True) -> bool:
def check_exchange(config: Config, check_for_bad: bool = True) -> bool:
"""
Check if the exchange name in the config file is supported by Freqtrade
:param check_for_bad: if True, check the exchange against the list of known 'bad'

View File

@ -1,4 +1,5 @@
import logging
from collections import Counter
from copy import deepcopy
from typing import Any, Dict
@ -85,6 +86,7 @@ def validate_config_consistency(conf: Dict[str, Any], preliminary: bool = False)
_validate_unlimited_amount(conf)
_validate_ask_orderbook(conf)
_validate_freqai_hyperopt(conf)
_validate_consumers(conf)
validate_migrated_strategy_settings(conf)
# validate configuration before returning
@ -332,6 +334,23 @@ def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
'Using analyze-per-epoch parameter is not supported with a FreqAI strategy.')
def _validate_consumers(conf: Dict[str, Any]) -> None:
emc_conf = conf.get('external_message_consumer', {})
if emc_conf.get('enabled', False):
if len(emc_conf.get('producers', [])) < 1:
raise OperationalException("You must specify at least 1 Producer to connect to.")
producer_names = [p['name'] for p in emc_conf.get('producers', [])]
duplicates = [item for item, count in Counter(producer_names).items() if count > 1]
if duplicates:
raise OperationalException(
f"Producer names must be unique. Duplicate: {', '.join(duplicates)}")
if conf.get('process_only_new_candles', True):
# Warning here or require it?
logger.warning("To receive best performance with external data, "
"please set `process_only_new_candles` to False")
def _strategy_settings(conf: Dict[str, Any]) -> None:
process_deprecated_setting(conf, None, 'use_sell_signal', None, 'use_exit_signal')

View File

@ -13,6 +13,7 @@ from freqtrade.configuration.deprecated_settings import process_temporary_deprec
from freqtrade.configuration.directory_operations import create_datadir, create_userdata_dir
from freqtrade.configuration.environment_vars import enironment_vars_to_dict
from freqtrade.configuration.load_config import load_file, load_from_files
from freqtrade.constants import Config
from freqtrade.enums import NON_UTIL_MODES, TRADING_MODES, CandleType, RunMode, TradingMode
from freqtrade.exceptions import OperationalException
from freqtrade.loggers import setup_logging
@ -30,10 +31,10 @@ class Configuration:
def __init__(self, args: Dict[str, Any], runmode: RunMode = None) -> None:
self.args = args
self.config: Optional[Dict[str, Any]] = None
self.config: Optional[Config] = None
self.runmode = runmode
def get_config(self) -> Dict[str, Any]:
def get_config(self) -> Config:
"""
Return the config. Use this method to get the bot config
:return: Dict: Bot config
@ -65,7 +66,7 @@ class Configuration:
:return: Configuration dictionary
"""
# Load all configs
config: Dict[str, Any] = load_from_files(self.args.get("config", []))
config: Config = load_from_files(self.args.get("config", []))
# Load environment variables
env_data = enironment_vars_to_dict()
@ -108,7 +109,7 @@ class Configuration:
return config
def _process_logging_options(self, config: Dict[str, Any]) -> None:
def _process_logging_options(self, config: Config) -> None:
"""
Extract information for sys.argv and load logging configuration:
the -v/--verbose, --logfile options
@ -121,7 +122,7 @@ class Configuration:
setup_logging(config)
def _process_trading_options(self, config: Dict[str, Any]) -> None:
def _process_trading_options(self, config: Config) -> None:
if config['runmode'] not in TRADING_MODES:
return
@ -137,7 +138,7 @@ class Configuration:
logger.info(f'Using DB: "{parse_db_uri_for_logging(config["db_url"])}"')
def _process_common_options(self, config: Dict[str, Any]) -> None:
def _process_common_options(self, config: Config) -> None:
# Set strategy if not specified in config and or if it's non default
if self.args.get('strategy') or not config.get('strategy'):
@ -161,7 +162,7 @@ class Configuration:
if 'sd_notify' in self.args and self.args['sd_notify']:
config['internals'].update({'sd_notify': True})
def _process_datadir_options(self, config: Dict[str, Any]) -> None:
def _process_datadir_options(self, config: Config) -> None:
"""
Extract information for sys.argv and load directory configurations
--user-data, --datadir
@ -195,7 +196,7 @@ class Configuration:
config['exportfilename'] = (config['user_data_dir']
/ 'backtest_results')
def _process_optimize_options(self, config: Dict[str, Any]) -> None:
def _process_optimize_options(self, config: Config) -> None:
# This will override the strategy configuration
self._args_to_config(config, argname='timeframe',
@ -380,7 +381,7 @@ class Configuration:
self._args_to_config(config, argname="hyperopt_ignore_missing_space",
logstring="Paramter --ignore-missing-space detected: {}")
def _process_plot_options(self, config: Dict[str, Any]) -> None:
def _process_plot_options(self, config: Config) -> None:
self._args_to_config(config, argname='pairs',
logstring='Using pairs {}')
@ -432,7 +433,7 @@ class Configuration:
self._args_to_config(config, argname='show_timerange',
logstring='Detected --show-timerange')
def _process_data_options(self, config: Dict[str, Any]) -> None:
def _process_data_options(self, config: Config) -> None:
self._args_to_config(config, argname='new_pairs_days',
logstring='Detected --new-pairs-days: {}')
self._args_to_config(config, argname='trading_mode',
@ -443,7 +444,7 @@ class Configuration:
self._args_to_config(config, argname='candle_types',
logstring='Detected --candle-types: {}')
def _process_analyze_options(self, config: Dict[str, Any]) -> None:
def _process_analyze_options(self, config: Config) -> None:
self._args_to_config(config, argname='analysis_groups',
logstring='Analysis reason groups: {}')
@ -456,7 +457,7 @@ class Configuration:
self._args_to_config(config, argname='indicator_list',
logstring='Analysis indicator list: {}')
def _process_runmode(self, config: Dict[str, Any]) -> None:
def _process_runmode(self, config: Config) -> None:
self._args_to_config(config, argname='dry_run',
logstring='Parameter --dry-run detected, '
@ -469,7 +470,7 @@ class Configuration:
config.update({'runmode': self.runmode})
def _process_freqai_options(self, config: Dict[str, Any]) -> None:
def _process_freqai_options(self, config: Config) -> None:
self._args_to_config(config, argname='freqaimodel',
logstring='Using freqaimodel class name: {}')
@ -479,7 +480,7 @@ class Configuration:
return
def _args_to_config(self, config: Dict[str, Any], argname: str,
def _args_to_config(self, config: Config, argname: str,
logstring: str, logfun: Optional[Callable] = None,
deprecated_msg: Optional[str] = None) -> None:
"""
@ -502,7 +503,7 @@ class Configuration:
if deprecated_msg:
warnings.warn(f"DEPRECATED: {deprecated_msg}", DeprecationWarning)
def _resolve_pairs_list(self, config: Dict[str, Any]) -> None:
def _resolve_pairs_list(self, config: Config) -> None:
"""
Helper for download script.
Takes first found:

View File

@ -3,15 +3,16 @@ Functions to handle deprecated settings
"""
import logging
from typing import Any, Dict, Optional
from typing import Optional
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
logger = logging.getLogger(__name__)
def check_conflicting_settings(config: Dict[str, Any],
def check_conflicting_settings(config: Config,
section_old: Optional[str], name_old: str,
section_new: Optional[str], name_new: str) -> None:
section_new_config = config.get(section_new, {}) if section_new else config
@ -28,7 +29,7 @@ def check_conflicting_settings(config: Dict[str, Any],
)
def process_removed_setting(config: Dict[str, Any],
def process_removed_setting(config: Config,
section1: str, name1: str,
section2: Optional[str], name2: str) -> None:
"""
@ -47,7 +48,7 @@ def process_removed_setting(config: Dict[str, Any],
)
def process_deprecated_setting(config: Dict[str, Any],
def process_deprecated_setting(config: Config,
section_old: Optional[str], name_old: str,
section_new: Optional[str], name_new: str
) -> None:
@ -69,7 +70,7 @@ def process_deprecated_setting(config: Dict[str, Any],
del section_old_config[name_old]
def process_temporary_deprecated_settings(config: Dict[str, Any]) -> None:
def process_temporary_deprecated_settings(config: Config) -> None:
# Kept for future deprecated / moved settings
# check_conflicting_settings(config, 'ask_strategy', 'use_sell_signal',

View File

@ -1,16 +1,16 @@
import logging
import shutil
from pathlib import Path
from typing import Any, Dict, Optional
from typing import Optional
from freqtrade.constants import USER_DATA_FILES
from freqtrade.constants import USER_DATA_FILES, Config
from freqtrade.exceptions import OperationalException
logger = logging.getLogger(__name__)
def create_datadir(config: Dict[str, Any], datadir: Optional[str] = None) -> Path:
def create_datadir(config: Config, datadir: Optional[str] = None) -> Path:
folder = Path(datadir) if datadir else Path(f"{config['user_data_dir']}/data")
if not datadir:

View File

@ -10,7 +10,7 @@ from typing import Any, Dict, List
import rapidjson
from freqtrade.constants import MINIMAL_CONFIG
from freqtrade.constants import MINIMAL_CONFIG, Config
from freqtrade.exceptions import OperationalException
from freqtrade.misc import deep_merge_dicts
@ -80,7 +80,7 @@ def load_from_files(files: List[str], base_path: Path = None, level: int = 0) ->
Recursively load configuration files if specified.
Sub-files are assumed to be relative to the initial config.
"""
config: Dict[str, Any] = {}
config: Config = {}
if level > 5:
raise OperationalException("Config loop detected.")

View File

@ -3,7 +3,7 @@
"""
bot constants
"""
from typing import List, Literal, Tuple
from typing import Any, Dict, List, Literal, Tuple
from freqtrade.enums import CandleType
@ -36,7 +36,8 @@ AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList',
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
AVAILABLE_PROTECTIONS = ['CooldownPeriod', 'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
AVAILABLE_DATAHANDLERS = ['json', 'jsongz', 'hdf5']
AVAILABLE_DATAHANDLERS_TRADES = ['json', 'jsongz', 'hdf5']
AVAILABLE_DATAHANDLERS = AVAILABLE_DATAHANDLERS_TRADES + ['feather', 'parquet']
BACKTEST_BREAKDOWNS = ['day', 'week', 'month']
BACKTEST_CACHE_AGE = ['none', 'day', 'week', 'month']
BACKTEST_CACHE_DEFAULT = 'day'
@ -243,6 +244,7 @@ CONF_SCHEMA = {
'exchange': {'$ref': '#/definitions/exchange'},
'edge': {'$ref': '#/definitions/edge'},
'freqai': {'$ref': '#/definitions/freqai'},
'external_message_consumer': {'$ref': '#/definitions/external_message_consumer'},
'experimental': {
'type': 'object',
'properties': {
@ -289,11 +291,12 @@ CONF_SCHEMA = {
'warning': {'type': 'string', 'enum': TELEGRAM_SETTING_OPTIONS},
'startup': {'type': 'string', 'enum': TELEGRAM_SETTING_OPTIONS},
'entry': {'type': 'string', 'enum': TELEGRAM_SETTING_OPTIONS},
'entry_cancel': {'type': 'string', 'enum': TELEGRAM_SETTING_OPTIONS},
'entry_fill': {'type': 'string',
'entry_fill': {
'type': 'string',
'enum': TELEGRAM_SETTING_OPTIONS,
'default': 'off'
},
'entry_cancel': {'type': 'string', 'enum': TELEGRAM_SETTING_OPTIONS, },
'exit': {
'type': ['string', 'object'],
'additionalProperties': {
@ -301,12 +304,12 @@ CONF_SCHEMA = {
'enum': TELEGRAM_SETTING_OPTIONS
}
},
'exit_cancel': {'type': 'string', 'enum': TELEGRAM_SETTING_OPTIONS},
'exit_fill': {
'type': 'string',
'enum': TELEGRAM_SETTING_OPTIONS,
'default': 'on'
},
'exit_cancel': {'type': 'string', 'enum': TELEGRAM_SETTING_OPTIONS},
'protection_trigger': {
'type': 'string',
'enum': TELEGRAM_SETTING_OPTIONS,
@ -315,14 +318,17 @@ CONF_SCHEMA = {
'protection_trigger_global': {
'type': 'string',
'enum': TELEGRAM_SETTING_OPTIONS,
'default': 'on'
},
'show_candle': {
'type': 'string',
'enum': ['off', 'ohlc'],
'default': 'off'
},
'strategy_msg': {
'type': 'string',
'enum': TELEGRAM_SETTING_OPTIONS,
'default': 'on'
},
}
},
@ -400,6 +406,7 @@ CONF_SCHEMA = {
},
'username': {'type': 'string'},
'password': {'type': 'string'},
'ws_token': {'type': ['string', 'array'], 'items': {'type': 'string'}},
'jwt_secret_key': {'type': 'string'},
'CORS_origins': {'type': 'array', 'items': {'type': 'string'}},
'verbosity': {'type': 'string', 'enum': ['error', 'info']},
@ -428,7 +435,7 @@ CONF_SCHEMA = {
},
'dataformat_trades': {
'type': 'string',
'enum': AVAILABLE_DATAHANDLERS,
'enum': AVAILABLE_DATAHANDLERS_TRADES,
'default': 'jsongz'
},
'position_adjustment_enable': {'type': 'boolean'},
@ -484,6 +491,47 @@ CONF_SCHEMA = {
},
'required': ['process_throttle_secs', 'allowed_risk']
},
'external_message_consumer': {
'type': 'object',
'properties': {
'enabled': {'type': 'boolean', 'default': False},
'producers': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'host': {'type': 'string'},
'port': {
'type': 'integer',
'default': 8080,
'minimum': 0,
'maximum': 65535
},
'ws_token': {'type': 'string'},
},
'required': ['name', 'host', 'ws_token']
}
},
'wait_timeout': {'type': 'integer', 'minimum': 0},
'sleep_time': {'type': 'integer', 'minimum': 0},
'ping_timeout': {'type': 'integer', 'minimum': 0},
'remove_entry_exit_signals': {'type': 'boolean', 'default': False},
'initial_candle_limit': {
'type': 'integer',
'minimum': 0,
'maximum': 1500,
'default': 1500
},
'message_size_limit': { # In megabytes
'type': 'integer',
'minimum': 1,
'maxmium': 20,
'default': 8,
}
},
'required': ['producers']
},
"freqai": {
"type": "object",
"properties": {
@ -504,6 +552,7 @@ CONF_SCHEMA = {
"weight_factor": {"type": "number", "default": 0},
"principal_component_analysis": {"type": "boolean", "default": False},
"use_SVM_to_remove_outliers": {"type": "boolean", "default": False},
"plot_feature_importance": {"type": "boolean", "default": False},
"svm_params": {"type": "object",
"properties": {
"shuffle": {"type": "boolean", "default": False},
@ -603,3 +652,5 @@ LongShort = Literal['long', 'short']
EntryExit = Literal['entry', 'exit']
BuySell = Literal['buy', 'sell']
MakerTaker = Literal['maker', 'taker']
Config = Dict[str, Any]

View File

@ -5,12 +5,12 @@ import itertools
import logging
from datetime import datetime, timezone
from operator import itemgetter
from typing import Any, Dict, List
from typing import Dict, List
import pandas as pd
from pandas import DataFrame, to_datetime
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, Config, TradeList
from freqtrade.enums import CandleType
@ -237,7 +237,7 @@ def trades_to_ohlcv(trades: TradeList, timeframe: str) -> DataFrame:
return df_new.loc[:, DEFAULT_DATAFRAME_COLUMNS]
def convert_trades_format(config: Dict[str, Any], convert_from: str, convert_to: str, erase: bool):
def convert_trades_format(config: Config, convert_from: str, convert_to: str, erase: bool):
"""
Convert trades from one format to another format.
:param config: Config dictionary
@ -263,7 +263,7 @@ def convert_trades_format(config: Dict[str, Any], convert_from: str, convert_to:
def convert_ohlcv_format(
config: Dict[str, Any],
config: Config,
convert_from: str,
convert_to: str,
erase: bool,
@ -292,6 +292,7 @@ def convert_ohlcv_format(
timeframe,
candle_type=candle_type
))
config['pairs'] = sorted(set(config['pairs']))
logger.info(f"Converting candle (OHLCV) data for {config['pairs']}")
for timeframe in timeframes:
@ -302,7 +303,7 @@ def convert_ohlcv_format(
drop_incomplete=False,
startup_candles=0,
candle_type=candle_type)
logger.info(f"Converting {len(data)} {candle_type} candles for {pair}")
logger.info(f"Converting {len(data)} {timeframe} {candle_type} candles for {pair}")
if len(data) > 0:
trg.ohlcv_store(
pair=pair,

View File

@ -12,11 +12,12 @@ from typing import Any, Dict, List, Optional, Tuple
from pandas import DataFrame
from freqtrade.configuration import TimeRange
from freqtrade.constants import ListPairsWithTimeframes, PairWithTimeframe
from freqtrade.constants import Config, ListPairsWithTimeframes, PairWithTimeframe
from freqtrade.data.history import load_pair_history
from freqtrade.enums import CandleType, RunMode
from freqtrade.enums import CandleType, RPCMessageType, RunMode
from freqtrade.exceptions import ExchangeError, OperationalException
from freqtrade.exchange import Exchange, timeframe_to_seconds
from freqtrade.rpc import RPCManager
from freqtrade.util import PeriodicCache
@ -28,17 +29,33 @@ MAX_DATAFRAME_CANDLES = 1000
class DataProvider:
def __init__(self, config: dict, exchange: Optional[Exchange], pairlists=None) -> None:
def __init__(
self,
config: Config,
exchange: Optional[Exchange],
pairlists=None,
rpc: Optional[RPCManager] = None
) -> None:
self._config = config
self._exchange = exchange
self._pairlists = pairlists
self.__rpc = rpc
self.__cached_pairs: Dict[PairWithTimeframe, Tuple[DataFrame, datetime]] = {}
self.__slice_index: Optional[int] = None
self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {}
self.__producer_pairs_df: Dict[str,
Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]] = {}
self.__producer_pairs: Dict[str, List[str]] = {}
self._msg_queue: deque = deque()
self._default_candle_type = self._config.get('candle_type_def', CandleType.SPOT)
self._default_timeframe = self._config.get('timeframe', '1h')
self.__msg_cache = PeriodicCache(
maxsize=1000, ttl=timeframe_to_seconds(self._config.get('timeframe', '1h')))
maxsize=1000, ttl=timeframe_to_seconds(self._default_timeframe))
self.producers = self._config.get('external_message_consumer', {}).get('producers', [])
self.external_data_enabled = len(self.producers) > 0
def _set_dataframe_max_index(self, limit_index: int):
"""
@ -63,9 +80,110 @@ class DataProvider:
:param dataframe: analyzed dataframe
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
self.__cached_pairs[(pair, timeframe, candle_type)] = (
pair_key = (pair, timeframe, candle_type)
self.__cached_pairs[pair_key] = (
dataframe, datetime.now(timezone.utc))
# For multiple producers we will want to merge the pairlists instead of overwriting
def _set_producer_pairs(self, pairlist: List[str], producer_name: str = "default"):
"""
Set the pairs received to later be used.
:param pairlist: List of pairs
"""
self.__producer_pairs[producer_name] = pairlist
def get_producer_pairs(self, producer_name: str = "default") -> List[str]:
"""
Get the pairs cached from the producer
:returns: List of pairs
"""
return self.__producer_pairs.get(producer_name, []).copy()
def _emit_df(
self,
pair_key: PairWithTimeframe,
dataframe: DataFrame
) -> None:
"""
Send this dataframe as an ANALYZED_DF message to RPC
:param pair_key: PairWithTimeframe tuple
:param data: Tuple containing the DataFrame and the datetime it was cached
"""
if self.__rpc:
self.__rpc.send_msg(
{
'type': RPCMessageType.ANALYZED_DF,
'data': {
'key': pair_key,
'df': dataframe,
'la': datetime.now(timezone.utc)
}
}
)
def _add_external_df(
self,
pair: str,
dataframe: DataFrame,
last_analyzed: datetime,
timeframe: str,
candle_type: CandleType,
producer_name: str = "default"
) -> None:
"""
Add the pair data to this class from an external source.
:param pair: pair to get the data for
:param timeframe: Timeframe to get data for
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
pair_key = (pair, timeframe, candle_type)
if producer_name not in self.__producer_pairs_df:
self.__producer_pairs_df[producer_name] = {}
_last_analyzed = datetime.now(timezone.utc) if not last_analyzed else last_analyzed
self.__producer_pairs_df[producer_name][pair_key] = (dataframe, _last_analyzed)
logger.debug(f"External DataFrame for {pair_key} from {producer_name} added.")
def get_producer_df(
self,
pair: str,
timeframe: Optional[str] = None,
candle_type: Optional[CandleType] = None,
producer_name: str = "default"
) -> Tuple[DataFrame, datetime]:
"""
Get the pair data from producers.
:param pair: pair to get the data for
:param timeframe: Timeframe to get data for
:param candle_type: Any of the enum CandleType (must match trading mode!)
:returns: Tuple of the DataFrame and last analyzed timestamp
"""
_timeframe = self._default_timeframe if not timeframe else timeframe
_candle_type = self._default_candle_type if not candle_type else candle_type
pair_key = (pair, _timeframe, _candle_type)
# If we have no data from this Producer yet
if producer_name not in self.__producer_pairs_df:
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
# If we do have data from that Producer, but no data on this pair_key
if pair_key not in self.__producer_pairs_df[producer_name]:
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
# We have it, return this data
df, la = self.__producer_pairs_df[producer_name][pair_key]
return (df.copy(), la)
def add_pairlisthandler(self, pairlists) -> None:
"""
Allow adding pairlisthandler after initialization
@ -90,8 +208,10 @@ class DataProvider:
if saved_pair not in self.__cached_pairs_backtesting:
timerange = TimeRange.parse_timerange(None if self._config.get(
'timerange') is None else str(self._config.get('timerange')))
# Move informative start time respecting startup_candle_count
startup_candles = self.get_required_startup(str(timeframe))
# It is not necessary to add the training candles, as they
# were already added at the beginning of the backtest.
startup_candles = self.get_required_startup(str(timeframe), False)
tf_seconds = timeframe_to_seconds(str(timeframe))
timerange.subtract_start(tf_seconds * startup_candles)
self.__cached_pairs_backtesting[saved_pair] = load_pair_history(
@ -105,7 +225,7 @@ class DataProvider:
)
return self.__cached_pairs_backtesting[saved_pair].copy()
def get_required_startup(self, timeframe: str) -> int:
def get_required_startup(self, timeframe: str, add_train_candles: bool = True) -> int:
freqai_config = self._config.get('freqai', {})
if not freqai_config.get('enabled', False):
return self._config.get('startup_candle_count', 0)
@ -115,6 +235,8 @@ class DataProvider:
# make sure the startupcandles is at least the set maximum indicator periods
self._config['startup_candle_count'] = max(startup_candles, max(indicator_periods))
tf_seconds = timeframe_to_seconds(timeframe)
train_candles = 0
if add_train_candles:
train_candles = freqai_config['train_period_days'] * 86400 / tf_seconds
total_candles = int(self._config['startup_candle_count'] + train_candles)
logger.info(f'Increasing startup_candle_count for freqai to {total_candles}')

View File

@ -0,0 +1,130 @@
import logging
from typing import Optional
from pandas import DataFrame, read_feather, to_datetime
from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList
from freqtrade.enums import CandleType
from .idatahandler import IDataHandler
logger = logging.getLogger(__name__)
class FeatherDataHandler(IDataHandler):
_columns = DEFAULT_DATAFRAME_COLUMNS
def ohlcv_store(
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
"""
Store data in json format "values".
format looks as follows:
[[<date>,<open>,<high>,<low>,<close>]]
:param pair: Pair - used to generate filename
:param timeframe: Timeframe - used to generate filename
:param data: Dataframe containing OHLCV data
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: None
"""
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type)
self.create_dir_if_needed(filename)
data.reset_index(drop=True).loc[:, self._columns].to_feather(
filename, compression_level=9, compression='lz4')
def _ohlcv_load(self, pair: str, timeframe: str,
timerange: Optional[TimeRange], candle_type: CandleType
) -> DataFrame:
"""
Internal method used to load data for one pair from disk.
Implements the loading and conversion to a Pandas dataframe.
Timerange trimming and dataframe validation happens outside of this method.
:param pair: Pair to load data
:param timeframe: Timeframe (e.g. "5m")
:param timerange: Limit data to be loaded to this timerange.
Optionally implemented by subclasses to avoid loading
all data where possible.
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: DataFrame with ohlcv data, or empty DataFrame
"""
filename = self._pair_data_filename(
self._datadir, pair, timeframe, candle_type=candle_type)
if not filename.exists():
# Fallback mode for 1M files
filename = self._pair_data_filename(
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
if not filename.exists():
return DataFrame(columns=self._columns)
pairdata = read_feather(filename)
pairdata.columns = self._columns
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
'low': 'float', 'close': 'float', 'volume': 'float'})
pairdata['date'] = to_datetime(pairdata['date'],
unit='ms',
utc=True,
infer_datetime_format=True)
return pairdata
def ohlcv_append(
self,
pair: str,
timeframe: str,
data: DataFrame,
candle_type: CandleType
) -> None:
"""
Append data to existing data structures
:param pair: Pair
:param timeframe: Timeframe this ohlcv data is for
:param data: Data to append.
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
raise NotImplementedError()
def trades_store(self, pair: str, data: TradeList) -> None:
"""
Store trades data (list of Dicts) to file
:param pair: Pair - used for filename
:param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
"""
# filename = self._pair_trades_filename(self._datadir, pair)
raise NotImplementedError()
# array = pa.array(data)
# array
# feather.write_feather(data, filename)
def trades_append(self, pair: str, data: TradeList):
"""
Append data to existing files
:param pair: Pair - used for filename
:param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
"""
raise NotImplementedError()
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
"""
Load a pair from file, either .json.gz or .json
# TODO: respect timerange ...
:param pair: Load trades for this pair
:param timerange: Timerange to load trades for - currently not implemented
:return: List of trades
"""
raise NotImplementedError()
# filename = self._pair_trades_filename(self._datadir, pair)
# tradesdata = misc.file_load_json(filename)
# if not tradesdata:
# return []
# return tradesdata
@classmethod
def _get_file_extension(cls):
return "feather"

View File

@ -1,7 +1,5 @@
import logging
import re
from pathlib import Path
from typing import List, Optional
from typing import Optional
import numpy as np
import pandas as pd
@ -20,26 +18,6 @@ class HDF5DataHandler(IDataHandler):
_columns = DEFAULT_DATAFRAME_COLUMNS
@classmethod
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]:
"""
Returns a list of all pairs with ohlcv data available in this datadir
for the specified timeframe
:param datadir: Directory to search for ohlcv files
:param timeframe: Timeframe to search pairs for
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: List of Pairs
"""
candle = ""
if candle_type != CandleType.SPOT:
datadir = datadir.joinpath('futures')
candle = f"-{candle_type}"
_tmp = [re.search(r'^(\S+)(?=\-' + timeframe + candle + '.h5)', p.name)
for p in datadir.glob(f"*{timeframe}{candle}.h5")]
# Check if regex found something and only return these results
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
def ohlcv_store(
self, pair: str, timeframe: str, data: pd.DataFrame, candle_type: CandleType) -> None:
"""
@ -103,6 +81,7 @@ class HDF5DataHandler(IDataHandler):
raise ValueError("Wrong dataframe format")
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
'low': 'float', 'close': 'float', 'volume': 'float'})
pairdata = pairdata.reset_index(drop=True)
return pairdata
def ohlcv_append(
@ -121,18 +100,6 @@ class HDF5DataHandler(IDataHandler):
"""
raise NotImplementedError()
@classmethod
def trades_get_pairs(cls, datadir: Path) -> List[str]:
"""
Returns a list of all pairs for which trade data is available in this
:param datadir: Directory to search for ohlcv files
:return: List of Pairs
"""
_tmp = [re.search(r'^(\S+)(?=\-trades.h5)', p.name)
for p in datadir.glob("*trades.h5")]
# Check if regex found something and only return these results to avoid exceptions.
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
def trades_store(self, pair: str, data: TradeList) -> None:
"""
Store trades data (list of Dicts) to file

View File

@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
class IDataHandler(ABC):
_OHLCV_REGEX = r'^([a-zA-Z_-]+)\-(\d+[a-zA-Z]{1,2})\-?([a-zA-Z_]*)?(?=\.)'
_OHLCV_REGEX = r'^([a-zA-Z_\d-]+)\-(\d+[a-zA-Z]{1,2})\-?([a-zA-Z_]*)?(?=\.)'
def __init__(self, datadir: Path) -> None:
self._datadir = datadir
@ -61,7 +61,6 @@ class IDataHandler(ABC):
) for match in _tmp if match and len(match.groups()) > 1]
@classmethod
@abstractmethod
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]:
"""
Returns a list of all pairs with ohlcv data available in this datadir
@ -71,6 +70,15 @@ class IDataHandler(ABC):
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: List of Pairs
"""
candle = ""
if candle_type != CandleType.SPOT:
datadir = datadir.joinpath('futures')
candle = f"-{candle_type}"
ext = cls._get_file_extension()
_tmp = [re.search(r'^(\S+)(?=\-' + timeframe + candle + f'.{ext})', p.name)
for p in datadir.glob(f"*{timeframe}{candle}.{ext}")]
# Check if regex found something and only return these results
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
@abstractmethod
def ohlcv_store(
@ -144,13 +152,17 @@ class IDataHandler(ABC):
"""
@classmethod
@abstractmethod
def trades_get_pairs(cls, datadir: Path) -> List[str]:
"""
Returns a list of all pairs for which trade data is available in this
:param datadir: Directory to search for ohlcv files
:return: List of Pairs
"""
_ext = cls._get_file_extension()
_tmp = [re.search(r'^(\S+)(?=\-trades.' + _ext + ')', p.name)
for p in datadir.glob(f"*trades.{_ext}")]
# Check if regex found something and only return these results to avoid exceptions.
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
@abstractmethod
def trades_store(self, pair: str, data: TradeList) -> None:
@ -255,7 +267,7 @@ class IDataHandler(ABC):
Rebuild pair name from filename
Assumes a asset name of max. 7 length to also support BTC-PERP and BTC-PERP:USD names.
"""
res = re.sub(r'^(([A-Za-z]{1,10})|^([A-Za-z\-]{1,6}))(_)', r'\g<1>/', pair, 1)
res = re.sub(r'^(([A-Za-z\d]{1,10})|^([A-Za-z\-]{1,6}))(_)', r'\g<1>/', pair, 1)
res = re.sub('_', ':', res, 1)
return res
@ -363,6 +375,12 @@ def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
elif datatype == 'hdf5':
from .hdf5datahandler import HDF5DataHandler
return HDF5DataHandler
elif datatype == 'feather':
from .featherdatahandler import FeatherDataHandler
return FeatherDataHandler
elif datatype == 'parquet':
from .parquetdatahandler import ParquetDataHandler
return ParquetDataHandler
else:
raise ValueError(f"No datahandler for datatype {datatype} available.")

View File

@ -1,7 +1,5 @@
import logging
import re
from pathlib import Path
from typing import List, Optional
from typing import Optional
import numpy as np
from pandas import DataFrame, read_json, to_datetime
@ -23,26 +21,6 @@ class JsonDataHandler(IDataHandler):
_use_zip = False
_columns = DEFAULT_DATAFRAME_COLUMNS
@classmethod
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]:
"""
Returns a list of all pairs with ohlcv data available in this datadir
for the specified timeframe
:param datadir: Directory to search for ohlcv files
:param timeframe: Timeframe to search pairs for
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: List of Pairs
"""
candle = ""
if candle_type != CandleType.SPOT:
datadir = datadir.joinpath('futures')
candle = f"-{candle_type}"
_tmp = [re.search(r'^(\S+)(?=\-' + timeframe + candle + '.json)', p.name)
for p in datadir.glob(f"*{timeframe}{candle}.{cls._get_file_extension()}")]
# Check if regex found something and only return these results
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
def ohlcv_store(
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
"""
@ -119,18 +97,6 @@ class JsonDataHandler(IDataHandler):
"""
raise NotImplementedError()
@classmethod
def trades_get_pairs(cls, datadir: Path) -> List[str]:
"""
Returns a list of all pairs for which trade data is available in this
:param datadir: Directory to search for ohlcv files
:return: List of Pairs
"""
_tmp = [re.search(r'^(\S+)(?=\-trades.json)', p.name)
for p in datadir.glob(f"*trades.{cls._get_file_extension()}")]
# Check if regex found something and only return these results to avoid exceptions.
return [cls.rebuild_pair_from_filename(match[0]) for match in _tmp if match]
def trades_store(self, pair: str, data: TradeList) -> None:
"""
Store trades data (list of Dicts) to file

View File

@ -0,0 +1,129 @@
import logging
from typing import Optional
from pandas import DataFrame, read_parquet, to_datetime
from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList
from freqtrade.enums import CandleType
from .idatahandler import IDataHandler
logger = logging.getLogger(__name__)
class ParquetDataHandler(IDataHandler):
_columns = DEFAULT_DATAFRAME_COLUMNS
def ohlcv_store(
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
"""
Store data in json format "values".
format looks as follows:
[[<date>,<open>,<high>,<low>,<close>]]
:param pair: Pair - used to generate filename
:param timeframe: Timeframe - used to generate filename
:param data: Dataframe containing OHLCV data
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: None
"""
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type)
self.create_dir_if_needed(filename)
data.reset_index(drop=True).loc[:, self._columns].to_parquet(filename)
def _ohlcv_load(self, pair: str, timeframe: str,
timerange: Optional[TimeRange], candle_type: CandleType
) -> DataFrame:
"""
Internal method used to load data for one pair from disk.
Implements the loading and conversion to a Pandas dataframe.
Timerange trimming and dataframe validation happens outside of this method.
:param pair: Pair to load data
:param timeframe: Timeframe (e.g. "5m")
:param timerange: Limit data to be loaded to this timerange.
Optionally implemented by subclasses to avoid loading
all data where possible.
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: DataFrame with ohlcv data, or empty DataFrame
"""
filename = self._pair_data_filename(
self._datadir, pair, timeframe, candle_type=candle_type)
if not filename.exists():
# Fallback mode for 1M files
filename = self._pair_data_filename(
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
if not filename.exists():
return DataFrame(columns=self._columns)
pairdata = read_parquet(filename)
pairdata.columns = self._columns
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
'low': 'float', 'close': 'float', 'volume': 'float'})
pairdata['date'] = to_datetime(pairdata['date'],
unit='ms',
utc=True,
infer_datetime_format=True)
return pairdata
def ohlcv_append(
self,
pair: str,
timeframe: str,
data: DataFrame,
candle_type: CandleType
) -> None:
"""
Append data to existing data structures
:param pair: Pair
:param timeframe: Timeframe this ohlcv data is for
:param data: Data to append.
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
raise NotImplementedError()
def trades_store(self, pair: str, data: TradeList) -> None:
"""
Store trades data (list of Dicts) to file
:param pair: Pair - used for filename
:param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
"""
# filename = self._pair_trades_filename(self._datadir, pair)
raise NotImplementedError()
# array = pa.array(data)
# array
# feather.write_feather(data, filename)
def trades_append(self, pair: str, data: TradeList):
"""
Append data to existing files
:param pair: Pair - used for filename
:param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
"""
raise NotImplementedError()
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
"""
Load a pair from file, either .json.gz or .json
# TODO: respect timerange ...
:param pair: Load trades for this pair
:param timerange: Timerange to load trades for - currently not implemented
:return: List of trades
"""
raise NotImplementedError()
# filename = self._pair_trades_filename(self._datadir, pair)
# tradesdata = misc.file_load_json(filename)
# if not tradesdata:
# return []
# return tradesdata
@classmethod
def _get_file_extension(cls):
return "parquet"

View File

@ -11,7 +11,7 @@ import utils_find_1st as utf1st
from pandas import DataFrame
from freqtrade.configuration import TimeRange
from freqtrade.constants import DATETIME_PRINT_FORMAT, UNLIMITED_STAKE_AMOUNT
from freqtrade.constants import DATETIME_PRINT_FORMAT, UNLIMITED_STAKE_AMOUNT, Config
from freqtrade.data.history import get_timerange, load_data, refresh_data
from freqtrade.enums import CandleType, ExitType, RunMode
from freqtrade.exceptions import OperationalException
@ -42,10 +42,9 @@ class Edge:
Author: https://github.com/mishaker
"""
config: Dict = {}
_cached_pairs: Dict[str, Any] = {} # Keeps a list of pairs
def __init__(self, config: Dict[str, Any], exchange, strategy) -> None:
def __init__(self, config: Config, exchange, strategy) -> None:
self.config = config
self.exchange = exchange

View File

@ -6,7 +6,7 @@ from freqtrade.enums.exittype import ExitType
from freqtrade.enums.hyperoptstate import HyperoptState
from freqtrade.enums.marginmode import MarginMode
from freqtrade.enums.ordertypevalue import OrderTypeValues
from freqtrade.enums.rpcmessagetype import RPCMessageType
from freqtrade.enums.rpcmessagetype import RPCMessageType, RPCRequestType
from freqtrade.enums.runmode import NON_UTIL_MODES, OPTIMIZE_MODES, TRADING_MODES, RunMode
from freqtrade.enums.signaltype import SignalDirection, SignalTagType, SignalType
from freqtrade.enums.state import State

View File

@ -1,7 +1,7 @@
from enum import Enum
class RPCMessageType(Enum):
class RPCMessageType(str, Enum):
STATUS = 'status'
WARNING = 'warning'
STARTUP = 'startup'
@ -19,8 +19,19 @@ class RPCMessageType(Enum):
STRATEGY_MSG = 'strategy_msg'
WHITELIST = 'whitelist'
ANALYZED_DF = 'analyzed_df'
def __repr__(self):
return self.value
def __str__(self):
return self.value
# Enum for parsing requests from ws consumers
class RPCRequestType(str, Enum):
SUBSCRIBE = 'subscribe'
WHITELIST = 'whitelist'
ANALYZED_DF = 'analyzed_df'

View File

@ -1,5 +1,4 @@
""" Binance exchange subclass """
import json
import logging
from datetime import datetime
from pathlib import Path
@ -12,7 +11,7 @@ from freqtrade.enums import CandleType, MarginMode, TradingMode
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
from freqtrade.exchange import Exchange
from freqtrade.exchange.common import retrier
from freqtrade.misc import deep_merge_dicts
from freqtrade.misc import deep_merge_dicts, json_load
logger = logging.getLogger(__name__)
@ -200,7 +199,7 @@ class Binance(Exchange):
Path(__file__).parent / 'binance_leverage_tiers.json'
)
with open(leverage_tiers_path) as json_file:
return json.load(json_file)
return json_load(json_file)
else:
try:
return self._api.fetch_leverage_tiers()

View File

@ -21,7 +21,8 @@ from dateutil import parser
from pandas import DataFrame
from freqtrade.constants import (DEFAULT_AMOUNT_RESERVE_PERCENT, NON_OPEN_EXCHANGE_STATES, BuySell,
EntryExit, ListPairsWithTimeframes, MakerTaker, PairWithTimeframe)
Config, EntryExit, ListPairsWithTimeframes, MakerTaker,
PairWithTimeframe)
from freqtrade.data.converter import ohlcv_to_dataframe, trades_dict_to_list
from freqtrade.enums import OPTIMIZE_MODES, CandleType, MarginMode, TradingMode
from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFundsError,
@ -91,7 +92,7 @@ class Exchange:
# TradingMode.SPOT always supported and not required in this list
]
def __init__(self, config: Dict[str, Any], validate: bool = True,
def __init__(self, config: Config, validate: bool = True,
load_leverage_tiers: bool = False) -> None:
"""
Initializes this module with the given config,
@ -108,7 +109,7 @@ class Exchange:
self._loop_lock = Lock()
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
self._config: Dict = {}
self._config: Config = {}
self._config.update(config)
@ -2890,7 +2891,7 @@ def amount_to_contracts(amount: float, contract_size: Optional[float]) -> float:
:return: num-contracts
"""
if contract_size and contract_size != 1:
return amount / contract_size
return float(FtPrecise(amount) / FtPrecise(contract_size))
else:
return amount
@ -2904,7 +2905,7 @@ def contracts_to_amount(num_contracts: float, contract_size: Optional[float]) ->
"""
if contract_size and contract_size != 1:
return num_contracts * contract_size
return float(FtPrecise(num_contracts) * FtPrecise(contract_size))
else:
return num_contracts

View File

@ -71,6 +71,7 @@ class Okx(Exchange):
try:
if self.trading_mode == TradingMode.FUTURES and not self._config['dry_run']:
accounts = self._api.fetch_accounts()
self._log_exchange_response('fetch_accounts', accounts)
if len(accounts) > 0:
self.net_only = accounts[0].get('info', {}).get('posMode') == 'net_mode'
except ccxt.DDoSProtection as e:

View File

@ -1,4 +1,5 @@
import logging
from time import time
from typing import Any, Tuple
import numpy as np
@ -32,7 +33,9 @@ class BaseClassifierModel(IFreqaiModel):
:model: Trained model which can be used to inference (self.predict)
"""
logger.info("-------------------- Starting training " f"{pair} --------------------")
logger.info(f"-------------------- Starting training {pair} --------------------")
start_time = time()
# filter the features requested by user in the configuration file and elegantly handle NaNs
features_filtered, labels_filtered = dk.filter_features(
@ -45,10 +48,10 @@ class BaseClassifierModel(IFreqaiModel):
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
logger.info(f"-------------------- Training on data from {start_date} to "
f"{end_date}--------------------")
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get('fit_live_predictions', 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)
@ -57,13 +60,16 @@ class BaseClassifierModel(IFreqaiModel):
self.data_cleaning_train(dk)
logger.info(
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
)
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
model = self.fit(data_dictionary, dk)
logger.info(f"--------------------done training {pair}--------------------")
end_time = time()
logger.info(f"-------------------- Done training {pair} "
f"({end_time - start_time:.2f} secs) --------------------")
return model

View File

@ -1,4 +1,5 @@
import logging
from time import time
from typing import Any, Tuple
import numpy as np
@ -31,7 +32,9 @@ class BaseRegressionModel(IFreqaiModel):
:model: Trained model which can be used to inference (self.predict)
"""
logger.info("-------------------- Starting training " f"{pair} --------------------")
logger.info(f"-------------------- Starting training {pair} --------------------")
start_time = time()
# filter the features requested by user in the configuration file and elegantly handle NaNs
features_filtered, labels_filtered = dk.filter_features(
@ -44,10 +47,10 @@ class BaseRegressionModel(IFreqaiModel):
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
logger.info(f"-------------------- Training on data from {start_date} to "
f"{end_date}--------------------")
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get('fit_live_predictions', 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)
@ -56,13 +59,16 @@ class BaseRegressionModel(IFreqaiModel):
self.data_cleaning_train(dk)
logger.info(
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
)
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
model = self.fit(data_dictionary, dk)
logger.info(f"--------------------done training {pair}--------------------")
end_time = time()
logger.info(f"-------------------- Done training {pair} "
f"({end_time - start_time:.2f} secs) --------------------")
return model

View File

@ -1,4 +1,5 @@
import logging
from time import time
from typing import Any
from pandas import DataFrame
@ -28,7 +29,9 @@ class BaseTensorFlowModel(IFreqaiModel):
:model: Trained model which can be used to inference (self.predict)
"""
logger.info("-------------------- Starting training " f"{pair} --------------------")
logger.info(f"-------------------- Starting training {pair} --------------------")
start_time = time()
# filter the features requested by user in the configuration file and elegantly handle NaNs
features_filtered, labels_filtered = dk.filter_features(
@ -41,10 +44,10 @@ class BaseTensorFlowModel(IFreqaiModel):
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
logger.info(f"-------------------- Training on data from {start_date} to "
f"{end_date}--------------------")
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get('fit_live_predictions', 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)
@ -53,12 +56,15 @@ class BaseTensorFlowModel(IFreqaiModel):
self.data_cleaning_train(dk)
logger.info(
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
)
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
model = self.fit(data_dictionary, dk)
logger.info(f"--------------------done training {pair}--------------------")
end_time = time()
logger.info(f"-------------------- Done training {pair} "
f"({end_time - start_time:.2f} secs) --------------------")
return model

View File

@ -1,4 +1,3 @@
from joblib import Parallel
from sklearn.multioutput import MultiOutputRegressor, _fit_estimator
from sklearn.utils.fixes import delayed

View File

@ -16,6 +16,7 @@ from numpy.typing import NDArray
from pandas import DataFrame
from freqtrade.configuration import TimeRange
from freqtrade.constants import Config
from freqtrade.data.history import load_pair_history
from freqtrade.exceptions import OperationalException
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
@ -27,9 +28,7 @@ logger = logging.getLogger(__name__)
class pair_info(TypedDict):
model_filename: str
first: bool
trained_timestamp: int
priority: int
data_path: str
extras: dict
@ -58,7 +57,7 @@ class FreqaiDataDrawer:
Juha Nykänen @suikula, Wagner Costa @wagnercosta, Johan Vlugt @Jooopieeert
"""
def __init__(self, full_path: Path, config: dict, follow_mode: bool = False):
def __init__(self, full_path: Path, config: Config, follow_mode: bool = False):
self.config = config
self.freqai_info = config.get("freqai", {})
@ -91,7 +90,7 @@ class FreqaiDataDrawer:
self.old_DBSCAN_eps: Dict[str, float] = {}
self.empty_pair_dict: pair_info = {
"model_filename": "", "trained_timestamp": 0,
"priority": 1, "first": True, "data_path": "", "extras": {}}
"data_path": "", "extras": {}}
def load_drawer_from_disk(self):
"""
@ -216,7 +215,6 @@ class FreqaiDataDrawer:
self.pair_dict[pair] = self.empty_pair_dict.copy()
model_filename = ""
trained_timestamp = 0
self.pair_dict[pair]["priority"] = len(self.pair_dict)
if not data_path_set and self.follow_mode:
logger.warning(
@ -236,18 +234,9 @@ class FreqaiDataDrawer:
return
else:
self.pair_dict[metadata["pair"]] = self.empty_pair_dict.copy()
self.pair_dict[metadata["pair"]]["priority"] = len(self.pair_dict)
return
def pair_to_end_of_training_queue(self, pair: str) -> None:
# march all pairs up in the queue
with self.pair_dict_lock:
for p in self.pair_dict:
self.pair_dict[p]["priority"] -= 1
# send pair to end of queue
self.pair_dict[pair]["priority"] = len(self.pair_dict)
def set_initial_return_values(self, pair: str, pred_df: DataFrame) -> None:
"""
Set the initial return values to the historical predictions dataframe. This avoids needing
@ -441,6 +430,16 @@ class FreqaiDataDrawer:
return
def load_metadata(self, dk: FreqaiDataKitchen) -> None:
"""
Load only metadata into datakitchen to increase performance during
presaved backtesting (prediction file loading).
"""
with open(dk.data_path / f"{dk.model_filename}_metadata.json", "r") as fp:
dk.data = json.load(fp)
dk.training_features_list = dk.data["training_features_list"]
dk.label_list = dk.data["label_list"]
def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any:
"""
loads all data required to make a prediction on a sub-train time range

View File

@ -18,6 +18,7 @@ from sklearn.model_selection import train_test_split
from sklearn.neighbors import NearestNeighbors
from freqtrade.configuration import TimeRange
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import timeframe_to_seconds
from freqtrade.strategy.interface import IStrategy
@ -57,7 +58,7 @@ class FreqaiDataKitchen:
def __init__(
self,
config: Dict[str, Any],
config: Config,
live: bool = False,
pair: str = "",
):
@ -466,27 +467,6 @@ class FreqaiDataKitchen:
return df
def remove_training_from_backtesting(
self
) -> DataFrame:
"""
Function which takes the backtesting time range and
remove training data from dataframe, keeping only the
startup_candle_count candles
"""
startup_candle_count = self.config.get('startup_candle_count', 0)
tf = self.config['timeframe']
tr = self.config["timerange"]
backtesting_timerange = TimeRange.parse_timerange(tr)
if startup_candle_count > 0 and backtesting_timerange:
backtesting_timerange.subtract_start(timeframe_to_seconds(tf) * startup_candle_count)
start = datetime.fromtimestamp(backtesting_timerange.startts, tz=timezone.utc)
df = self.return_dataframe
df = df.loc[df["date"] >= start, :]
return df
def principal_component_analysis(self) -> None:
"""
Performs Principal Component Analysis on the data for dimensionality reduction
@ -775,12 +755,22 @@ class FreqaiDataKitchen:
def compute_inlier_metric(self, set_='train') -> None:
"""
Compute inlier metric from backwards distance distributions.
This metric defines how well features from a timepoint fit
into previous timepoints.
"""
def normalise(dataframe: DataFrame, key: str) -> DataFrame:
if set_ == 'train':
min_value = dataframe.min()
max_value = dataframe.max()
self.data[f'{key}_min'] = min_value
self.data[f'{key}_max'] = max_value
else:
min_value = self.data[f'{key}_min']
max_value = self.data[f'{key}_max']
return (dataframe - min_value) / (max_value - min_value)
no_prev_pts = self.freqai_config["feature_parameters"]["inlier_metric_window"]
if set_ == 'train':
@ -825,7 +815,12 @@ class FreqaiDataKitchen:
inliers = pd.DataFrame(index=distances.index)
for key in distances.keys():
current_distances = distances[key].dropna()
current_distances = normalise(current_distances, key)
if set_ == 'train':
fit_params = stats.weibull_min.fit(current_distances)
self.data[f'{key}_fit_params'] = fit_params
else:
fit_params = self.data[f'{key}_fit_params']
quantiles = stats.weibull_min.cdf(current_distances, *fit_params)
df_inlier = pd.DataFrame(
@ -979,8 +974,6 @@ class FreqaiDataKitchen:
to_keep = [col for col in dataframe.columns if not col.startswith("&")]
self.return_dataframe = pd.concat([dataframe[to_keep], self.full_df], axis=1)
# self.return_dataframe = self.remove_training_from_backtesting()
self.full_df = DataFrame()
return

View File

@ -3,6 +3,7 @@ import shutil
import threading
import time
from abc import ABC, abstractmethod
from collections import deque
from datetime import datetime, timezone
from pathlib import Path
from threading import Lock
@ -14,12 +15,13 @@ from numpy.typing import NDArray
from pandas import DataFrame
from freqtrade.configuration import TimeRange
from freqtrade.constants import DATETIME_PRINT_FORMAT
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
from freqtrade.enums import RunMode
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import timeframe_to_seconds
from freqtrade.freqai.data_drawer import FreqaiDataDrawer
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
from freqtrade.freqai.utils import plot_feature_importance
from freqtrade.strategy.interface import IStrategy
@ -50,7 +52,7 @@ class IFreqaiModel(ABC):
Juha Nykänen @suikula, Wagner Costa @wagnercosta, Johan Vlugt @Jooopieeert
"""
def __init__(self, config: Dict[str, Any]) -> None:
def __init__(self, config: Config) -> None:
self.config = config
self.assert_config(self.config)
@ -63,7 +65,7 @@ class IFreqaiModel(ABC):
self.first = True
self.set_full_path()
self.follow_mode: bool = self.freqai_info.get("follow_mode", False)
self.save_backtest_models: bool = self.freqai_info.get("save_backtest_models", False)
self.save_backtest_models: bool = self.freqai_info.get("save_backtest_models", True)
if self.save_backtest_models:
logger.info('Backtesting module configured to save all models.')
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config, self.follow_mode)
@ -80,6 +82,7 @@ class IFreqaiModel(ABC):
self.pair_it = 0
self.pair_it_train = 0
self.total_pairs = len(self.config.get("exchange", {}).get("pair_whitelist"))
self.train_queue = self._set_train_queue()
self.last_trade_database_summary: DataFrame = {}
self.current_trade_database_summary: DataFrame = {}
self.analysis_lock = Lock()
@ -99,7 +102,7 @@ class IFreqaiModel(ABC):
"""
return ({})
def assert_config(self, config: Dict[str, Any]) -> None:
def assert_config(self, config: Config) -> None:
if not config.get("freqai", {}):
raise OperationalException("No freqai parameters found in configuration file.")
@ -181,12 +184,16 @@ class IFreqaiModel(ABC):
"""
while not self._stop_event.is_set():
time.sleep(1)
for pair in self.config.get("exchange", {}).get("pair_whitelist"):
pair = self.train_queue[0]
# ensure pair is avaialble in dp
if pair not in strategy.dp.current_whitelist():
self.train_queue.popleft()
logger.warning(f'{pair} not in current whitelist, removing from train queue.')
continue
(_, trained_timestamp, _) = self.dd.get_pair_dict_info(pair)
if self.dd.pair_dict[pair]["priority"] != 1:
continue
dk = FreqaiDataKitchen(self.config, self.live, pair)
dk.set_paths(pair, trained_timestamp)
(
@ -198,11 +205,18 @@ class IFreqaiModel(ABC):
if retrain:
self.train_timer('start')
try:
self.extract_data_and_train_model(
new_trained_timerange, pair, strategy, dk, data_load_timerange
)
except Exception as msg:
logger.warning(f'Training {pair} raised exception {msg}, skipping.')
self.train_timer('stop')
# only rotate the queue after the first has been trained.
self.train_queue.rotate(-1)
self.dd.save_historic_predictions_to_disk()
def start_backtesting(
@ -230,7 +244,8 @@ class IFreqaiModel(ABC):
# following tr_train. Both of these windows slide through the
# entire backtest
for tr_train, tr_backtest in zip(dk.training_timeranges, dk.backtesting_timeranges):
(_, _, _) = self.dd.get_pair_dict_info(metadata["pair"])
pair = metadata["pair"]
(_, _, _) = self.dd.get_pair_dict_info(pair)
train_it += 1
total_trains = len(dk.backtesting_timeranges)
self.training_timerange = tr_train
@ -245,37 +260,37 @@ class IFreqaiModel(ABC):
tr_train.stopts,
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
logger.info(
f"Training {metadata['pair']}, {self.pair_it}/{self.total_pairs} pairs"
f"Training {pair}, {self.pair_it}/{self.total_pairs} pairs"
f" from {tr_train_startts_str} to {tr_train_stopts_str}, {train_it}/{total_trains} "
"trains"
)
trained_timestamp_int = int(trained_timestamp.stopts)
dk.data_path = Path(
dk.full_path
/
f"sub-train-{metadata['pair'].split('/')[0]}_{trained_timestamp_int}"
dk.full_path / f"sub-train-{pair.split('/')[0]}_{trained_timestamp_int}"
)
dk.set_new_model_names(metadata["pair"], trained_timestamp)
dk.set_new_model_names(pair, trained_timestamp)
if dk.check_if_backtest_prediction_exists():
self.dd.load_metadata(dk)
self.check_if_feature_list_matches_strategy(dataframe_train, dk)
append_df = dk.get_backtesting_prediction()
dk.append_predictions(append_df)
else:
if not self.model_exists(
metadata["pair"], dk, trained_timestamp=trained_timestamp_int
pair, dk, trained_timestamp=trained_timestamp_int
):
dk.find_features(dataframe_train)
self.model = self.train(dataframe_train, metadata["pair"], dk)
self.dd.pair_dict[metadata["pair"]]["trained_timestamp"] = int(
self.model = self.train(dataframe_train, pair, dk)
self.dd.pair_dict[pair]["trained_timestamp"] = int(
trained_timestamp.stopts)
if self.save_backtest_models:
logger.info('Saving backtest model to disk.')
self.dd.save_data(self.model, metadata["pair"], dk)
self.dd.save_data(self.model, pair, dk)
else:
self.model = self.dd.load_data(metadata["pair"], dk)
self.model = self.dd.load_data(pair, dk)
self.check_if_feature_list_matches_strategy(dataframe_train, dk)
@ -416,14 +431,16 @@ class IFreqaiModel(ABC):
if "training_features_list_raw" in dk.data:
feature_list = dk.data["training_features_list_raw"]
else:
feature_list = dk.training_features_list
feature_list = dk.data['training_features_list']
if dk.training_features_list != feature_list:
raise OperationalException(
"Trying to access pretrained model with `identifier` "
"but found different features furnished by current strategy."
"Change `identifier` to train from scratch, or ensure the"
"strategy is furnishing the same features as the pretrained"
"model"
"model. In case of --strategy-list, please be aware that FreqAI "
"requires all strategies to maintain identical "
"populate_any_indicator() functions"
)
def data_cleaning_train(self, dk: FreqaiDataKitchen) -> None:
@ -557,11 +574,11 @@ class IFreqaiModel(ABC):
self.dd.pair_dict[pair]["trained_timestamp"] = new_trained_timerange.stopts
dk.set_new_model_names(pair, new_trained_timerange)
self.dd.pair_dict[pair]["first"] = False
if self.dd.pair_dict[pair]["priority"] == 1 and self.scanning:
self.dd.pair_to_end_of_training_queue(pair)
self.dd.save_data(model, pair, dk)
if self.freqai_info["feature_parameters"].get("plot_feature_importance", False):
plot_feature_importance(model, pair, dk)
if self.freqai_info.get("purge_old_models", False):
self.dd.purge_old_models()
@ -685,6 +702,32 @@ class IFreqaiModel(ABC):
return init_model
def _set_train_queue(self):
"""
Sets train queue from existing train timestamps if they exist
otherwise it sets the train queue based on the provided whitelist.
"""
current_pairlist = self.config.get("exchange", {}).get("pair_whitelist")
if not self.dd.pair_dict:
logger.info('Set fresh train queue from whitelist. '
f'Queue: {current_pairlist}')
return deque(current_pairlist)
best_queue = deque()
pair_dict_sorted = sorted(self.dd.pair_dict.items(),
key=lambda k: k[1]['trained_timestamp'])
for pair in pair_dict_sorted:
if pair[0] in current_pairlist:
best_queue.append(pair[0])
for pair in current_pairlist:
if pair not in best_queue:
best_queue.appendleft(pair)
logger.info('Set existing queue from trained timestamps. '
f'Best approximation queue: {best_queue}')
return best_queue
def spice_rack(self, indicator: str, dataframe: DataFrame,
metadata: dict, strategy: IStrategy) -> NDArray:
if not self.spice_rack_open:

View File

@ -10,11 +10,13 @@ from scipy.signal import argrelextrema
from technical import qtpylib
from freqtrade.configuration import TimeRange
from freqtrade.constants import Config
from freqtrade.data.dataprovider import DataProvider
from freqtrade.data.history.history_utils import refresh_backtest_ohlcv_data
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import Exchange, timeframe_to_seconds
from freqtrade.exchange.exchange import market_is_active
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
from freqtrade.strategy import merge_informative_pair
@ -22,7 +24,7 @@ from freqtrade.strategy import merge_informative_pair
logger = logging.getLogger(__name__)
def download_all_data_for_training(dp: DataProvider, config: dict) -> None:
def download_all_data_for_training(dp: DataProvider, config: Config) -> None:
"""
Called only once upon start of bot to download the necessary data for
populating indicators and training the model.
@ -56,9 +58,7 @@ def download_all_data_for_training(dp: DataProvider, config: dict) -> None:
)
def get_required_data_timerange(
config: dict
) -> TimeRange:
def get_required_data_timerange(config: Config) -> TimeRange:
"""
Used to compute the required data download time range
for auto data-download in FreqAI
@ -226,7 +226,7 @@ def setup_freqai_spice_rack(config: dict, exchange: Optional[Exchange]) -> Dict[
return config
# Keep below for when we wish to download heterogeneously lengthed data for FreqAI.
# def download_all_data_for_training(dp: DataProvider, config: dict) -> None:
# def download_all_data_for_training(dp: DataProvider, config: Config) -> None:
# """
# Called only once upon start of bot to download the necessary data for
# populating indicators and training a FreqAI model.
@ -272,3 +272,58 @@ def setup_freqai_spice_rack(config: dict, exchange: Optional[Exchange]) -> Dict[
# trading_mode=config.get("trading_mode", "spot"),
# prepend=config.get("prepend_data", False),
# )
def plot_feature_importance(model: Any, pair: str, dk: FreqaiDataKitchen,
count_max: int = 25) -> None:
"""
Plot Best and worst features by importance for a single sub-train.
:param model: Any = A model which was `fit` using a common library
such as catboost or lightgbm
:param pair: str = pair e.g. BTC/USD
:param dk: FreqaiDataKitchen = non-persistent data container for current coin/loop
:param count_max: int = the amount of features to be loaded per column
"""
from freqtrade.plot.plotting import go, make_subplots, store_plot_file
# Extract feature importance from model
models = {}
if 'FreqaiMultiOutputRegressor' in str(model.__class__):
for estimator, label in zip(model.estimators_, dk.label_list):
models[label] = estimator
else:
models[dk.label_list[0]] = model
for label in models:
mdl = models[label]
if "catboost.core" in str(mdl.__class__):
feature_importance = mdl.get_feature_importance()
elif "lightgbm.sklearn" or "xgb" in str(mdl.__class__):
feature_importance = mdl.feature_importances_
else:
logger.info('Model type not support for generating feature importances.')
return
# Data preparation
fi_df = pd.DataFrame({
"feature_names": np.array(dk.training_features_list),
"feature_importance": np.array(feature_importance)
})
fi_df_top = fi_df.nlargest(count_max, "feature_importance")[::-1]
fi_df_worst = fi_df.nsmallest(count_max, "feature_importance")[::-1]
# Plotting
def add_feature_trace(fig, fi_df, col):
return fig.add_trace(
go.Bar(
x=fi_df["feature_importance"],
y=fi_df["feature_names"],
orientation='h', showlegend=False
), row=1, col=col
)
fig = make_subplots(rows=1, cols=2, horizontal_spacing=0.5)
fig = add_feature_trace(fig, fi_df_top, 1)
fig = add_feature_trace(fig, fi_df_worst, 2)
fig.update_layout(title_text=f"Best and worst features by importance {pair}")
label = label.replace('&', '').replace('%', '') # escape two FreqAI specific characters
store_plot_file(fig, f"{dk.model_filename}-{label}.html", dk.data_path)

View File

@ -11,9 +11,9 @@ from typing import Any, Dict, List, Optional, Tuple
from schedule import Scheduler
from freqtrade import __version__, constants
from freqtrade import constants
from freqtrade.configuration import validate_config_consistency
from freqtrade.constants import BuySell, LongShort
from freqtrade.constants import BuySell, Config, LongShort
from freqtrade.data.converter import order_book_to_dataframe
from freqtrade.data.dataprovider import DataProvider
from freqtrade.edge import Edge
@ -29,6 +29,7 @@ from freqtrade.plugins.pairlistmanager import PairListManager
from freqtrade.plugins.protectionmanager import ProtectionManager
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
from freqtrade.rpc import RPCManager
from freqtrade.rpc.external_message_consumer import ExternalMessageConsumer
from freqtrade.strategy.interface import IStrategy
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
from freqtrade.util import FtPrecise
@ -44,7 +45,7 @@ class FreqtradeBot(LoggingMixin):
This is from here the bot start its logic.
"""
def __init__(self, config: Dict[str, Any]) -> None:
def __init__(self, config: Config) -> None:
"""
Init all variables and objects the bot needs to work
:param config: configuration dict, you can use Configuration.get_config()
@ -52,8 +53,6 @@ class FreqtradeBot(LoggingMixin):
"""
self.active_pair_whitelist: List[str] = []
logger.info('Starting freqtrade %s', __version__)
# Init bot state
self.state = State.STOPPED
@ -74,6 +73,8 @@ class FreqtradeBot(LoggingMixin):
PairLocks.timeframe = self.config['timeframe']
self.pairlists = PairListManager(self.exchange, self.config)
# RPC runs in separate threads, can start handling external commands just after
# initialization, even before Freqtradebot has a chance to start its throttling,
# so anything in the Freqtradebot instance should be ready (initialized), including
@ -81,9 +82,7 @@ class FreqtradeBot(LoggingMixin):
# Keep this at the end of this initialization method.
self.rpc: RPCManager = RPCManager(self)
self.pairlists = PairListManager(self.exchange, self.config)
self.dataprovider = DataProvider(self.config, self.exchange, self.pairlists)
self.dataprovider = DataProvider(self.config, self.exchange, self.pairlists, self.rpc)
# Attach Dataprovider to strategy instance
self.strategy.dp = self.dataprovider
@ -94,6 +93,10 @@ class FreqtradeBot(LoggingMixin):
self.edge = Edge(self.config, self.exchange, self.strategy) if \
self.config.get('edge', {}).get('enabled', False) else None
# Init ExternalMessageConsumer if enabled
self.emc = ExternalMessageConsumer(self.config, self.dataprovider) if \
self.config.get('external_message_consumer', {}).get('enabled', False) else None
self.active_pair_whitelist = self._refresh_active_whitelist()
# Set initial bot state from config
@ -154,6 +157,8 @@ class FreqtradeBot(LoggingMixin):
self.strategy.ft_bot_cleanup()
self.rpc.cleanup()
if self.emc:
self.emc.shutdown()
Trade.commit()
self.exchange.close()
@ -256,6 +261,7 @@ class FreqtradeBot(LoggingMixin):
pairs that have open trades.
"""
# Refresh whitelist
_prev_whitelist = self.pairlists.whitelist
self.pairlists.refresh_pairlist()
_whitelist = self.pairlists.whitelist
@ -268,6 +274,11 @@ class FreqtradeBot(LoggingMixin):
# Extend active-pair whitelist with pairs of open trades
# It ensures that candle (OHLCV) data are downloaded for open trades as well
_whitelist.extend([trade.pair for trade in trades if trade.pair not in _whitelist])
# Called last to include the included pairs
if _prev_whitelist != _whitelist:
self.rpc.send_msg({'type': RPCMessageType.WHITELIST, 'data': _whitelist})
return _whitelist
def get_free_open_trades(self) -> int:
@ -596,7 +607,7 @@ class FreqtradeBot(LoggingMixin):
amount = trade.amount
if amount == 0.0:
logger.info("Amount to sell is 0.0 due to exchange limits - not selling.")
logger.info("Amount to exit is 0.0 due to exchange limits - not exiting.")
return
remaining = (trade.amount - amount) * current_exit_rate
@ -923,7 +934,7 @@ class FreqtradeBot(LoggingMixin):
'stake_amount': trade.stake_amount,
'stake_currency': self.config['stake_currency'],
'fiat_currency': self.config.get('fiat_display_currency', None),
'amount': order.safe_amount_after_fee if fill else order.amount,
'amount': order.safe_amount_after_fee if fill else (order.amount or trade.amount),
'open_date': trade.open_date or datetime.utcnow(),
'current_rate': current_rate,
'sub_trade': sub_trade,
@ -1599,14 +1610,14 @@ class FreqtradeBot(LoggingMixin):
# second condition is for mypy only; order will always be passed during sub trade
if sub_trade and order is not None:
amount = order.safe_filled if fill else order.amount
profit_rate = order.safe_price
order_rate: float = order.safe_price
profit = trade.calc_profit(rate=profit_rate, amount=amount, open_rate=trade.open_rate)
profit_ratio = trade.calc_profit_ratio(profit_rate, amount, trade.open_rate)
profit = trade.calc_profit(rate=order_rate, amount=amount, open_rate=trade.open_rate)
profit_ratio = trade.calc_profit_ratio(order_rate, amount, trade.open_rate)
else:
profit_rate = trade.close_rate if trade.close_rate else trade.close_rate_requested
profit = trade.calc_profit(rate=profit_rate) + (0.0 if fill else trade.realized_profit)
profit_ratio = trade.calc_profit_ratio(profit_rate)
order_rate = trade.close_rate if trade.close_rate else trade.close_rate_requested
profit = trade.calc_profit(rate=order_rate) + (0.0 if fill else trade.realized_profit)
profit_ratio = trade.calc_profit_ratio(order_rate)
amount = trade.amount
gain = "profit" if profit_ratio > 0 else "loss"
@ -1619,11 +1630,12 @@ class FreqtradeBot(LoggingMixin):
'leverage': trade.leverage,
'direction': 'Short' if trade.is_short else 'Long',
'gain': gain,
'limit': profit_rate,
'limit': order_rate, # Deprecated
'order_rate': order_rate,
'order_type': order_type,
'amount': amount,
'open_rate': trade.open_rate,
'close_rate': profit_rate,
'close_rate': order_rate,
'current_rate': current_rate,
'profit_amount': profit,
'profit_ratio': profit_ratio,

View File

@ -2,8 +2,8 @@ import logging
import sys
from logging import Formatter
from logging.handlers import BufferingHandler, RotatingFileHandler, SysLogHandler
from typing import Any, Dict
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
@ -73,7 +73,7 @@ def setup_logging_pre() -> None:
)
def setup_logging(config: Dict[str, Any]) -> None:
def setup_logging(config: Config) -> None:
"""
Process -v/--verbose, --logfile options
"""

View File

@ -12,6 +12,7 @@ from typing import Any, List
if sys.version_info < (3, 8): # pragma: no cover
sys.exit("Freqtrade requires Python version >= 3.8")
from freqtrade import __version__
from freqtrade.commands import Arguments
from freqtrade.exceptions import FreqtradeException, OperationalException
from freqtrade.loggers import setup_logging_pre
@ -34,6 +35,7 @@ def main(sysargv: List[str] = None) -> None:
# Call subcommand.
if 'func' in args:
logger.info(f'freqtrade {__version__}')
return_code = args['func'](args)
else:
# No subcommand was issued.

View File

@ -10,9 +10,11 @@ from typing import Any, Iterator, List
from typing.io import IO
from urllib.parse import urlparse
import pandas
import rapidjson
from freqtrade.constants import DECIMAL_PER_COIN_FALLBACK, DECIMALS_PER_COIN
from freqtrade.enums import SignalTagType, SignalType
logger = logging.getLogger(__name__)
@ -249,3 +251,41 @@ def parse_db_uri_for_logging(uri: str):
return uri
pwd = parsed_db_uri.netloc.split(':')[1].split('@')[0]
return parsed_db_uri.geturl().replace(f':{pwd}@', ':*****@')
def dataframe_to_json(dataframe: pandas.DataFrame) -> str:
"""
Serialize a DataFrame for transmission over the wire using JSON
:param dataframe: A pandas DataFrame
:returns: A JSON string of the pandas DataFrame
"""
return dataframe.to_json(orient='split')
def json_to_dataframe(data: str) -> pandas.DataFrame:
"""
Deserialize JSON into a DataFrame
:param data: A JSON string
:returns: A pandas DataFrame from the JSON string
"""
dataframe = pandas.read_json(data, orient='split')
if 'date' in dataframe.columns:
dataframe['date'] = pandas.to_datetime(dataframe['date'], unit='ms', utc=True)
return dataframe
def remove_entry_exit_signals(dataframe: pandas.DataFrame):
"""
Remove Entry and Exit signals from a DataFrame
:param dataframe: The DataFrame to remove signals from
"""
dataframe[SignalType.ENTER_LONG.value] = 0
dataframe[SignalType.EXIT_LONG.value] = 0
dataframe[SignalType.ENTER_SHORT.value] = 0
dataframe[SignalType.EXIT_SHORT.value] = 0
dataframe[SignalTagType.ENTER_TAG.value] = None
dataframe[SignalTagType.EXIT_TAG.value] = None
return dataframe

View File

@ -15,7 +15,7 @@ from pandas import DataFrame
from freqtrade import constants
from freqtrade.configuration import TimeRange, validate_config_consistency
from freqtrade.constants import DATETIME_PRINT_FORMAT, LongShort
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config, LongShort
from freqtrade.data import history
from freqtrade.data.btanalysis import find_existing_backtest_stats, trade_list_to_dataframe
from freqtrade.data.converter import trim_dataframe, trim_dataframes
@ -70,7 +70,7 @@ class Backtesting:
backtesting.start()
"""
def __init__(self, config: Dict[str, Any]) -> None:
def __init__(self, config: Config) -> None:
LoggingMixin.show_output = False
self.config = config
@ -95,8 +95,8 @@ class Backtesting:
if self.config.get('strategy_list'):
if self.config.get('freqai', {}).get('enabled', False):
raise OperationalException(
"You can't use strategy_list and freqai at the same time.")
logger.warning("Using --strategy-list with FreqAI REQUIRES all strategies "
"to have identical populate_any_indicators.")
for strat in list(self.config['strategy_list']):
stratconf = deepcopy(self.config)
stratconf['strategy'] = strat
@ -143,9 +143,14 @@ class Backtesting:
# Get maximum required startup period
self.required_startup = max([strat.startup_candle_count for strat in self.strategylist])
self.exchange.validate_required_startup_candles(self.required_startup, self.timeframe)
if self.config.get('freqai', {}).get('enabled', False):
# For FreqAI, increase the required_startup to includes the training data
self.required_startup = self.dataprovider.get_required_startup(self.timeframe)
# Add maximum startup candle count to configuration for informative pairs support
self.config['startup_candle_count'] = self.required_startup
self.exchange.validate_required_startup_candles(self.required_startup, self.timeframe)
self.trading_mode: TradingMode = config.get('trading_mode', TradingMode.SPOT)
# strategies which define "can_short=True" will fail to load in Spot mode.
@ -221,7 +226,7 @@ class Backtesting:
pairs=self.pairlists.whitelist,
timeframe=self.timeframe,
timerange=self.timerange,
startup_candles=self.dataprovider.get_required_startup(self.timeframe),
startup_candles=self.config['startup_candle_count'],
fail_without_data=True,
data_format=self.config.get('dataformat_ohlcv', 'json'),
candle_type=self.config.get('candle_type_def', CandleType.SPOT)

View File

@ -4,10 +4,10 @@
This module contains the edge backtesting interface
"""
import logging
from typing import Any, Dict
from freqtrade import constants
from freqtrade.configuration import TimeRange, validate_config_consistency
from freqtrade.constants import Config
from freqtrade.data.dataprovider import DataProvider
from freqtrade.edge import Edge
from freqtrade.optimize.optimize_reports import generate_edge_table
@ -26,7 +26,7 @@ class EdgeCli:
edge.start()
"""
def __init__(self, config: Dict[str, Any]) -> None:
def __init__(self, config: Config) -> None:
self.config = config
# Ensure using dry-run

View File

@ -21,7 +21,7 @@ from joblib import Parallel, cpu_count, delayed, dump, load, wrap_non_picklable_
from joblib.externals import cloudpickle
from pandas import DataFrame
from freqtrade.constants import DATETIME_PRINT_FORMAT, FTHYPT_FILEVERSION, LAST_BT_RESULT_FN
from freqtrade.constants import DATETIME_PRINT_FORMAT, FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config
from freqtrade.data.converter import trim_dataframes
from freqtrade.data.history import get_timerange
from freqtrade.enums import HyperoptState
@ -66,7 +66,7 @@ class Hyperopt:
hyperopt.start()
"""
def __init__(self, config: Dict[str, Any]) -> None:
def __init__(self, config: Config) -> None:
self.buy_space: List[Dimension] = []
self.sell_space: List[Dimension] = []
self.protection_space: List[Dimension] = []
@ -132,7 +132,7 @@ class Hyperopt:
self.print_json = self.config.get('print_json', False)
@staticmethod
def get_lock_filename(config: Dict[str, Any]) -> str:
def get_lock_filename(config: Config) -> str:
return str(config['user_data_dir'] / 'hyperopt.lock')

View File

@ -10,6 +10,7 @@ from typing import Dict, List, Union
from sklearn.base import RegressorMixin
from skopt.space import Categorical, Dimension, Integer
from freqtrade.constants import Config
from freqtrade.exchange import timeframe_to_minutes
from freqtrade.misc import round_dict
from freqtrade.optimize.space import SKDecimal
@ -32,7 +33,7 @@ class IHyperOpt(ABC):
timeframe: str
strategy: IStrategy
def __init__(self, config: dict) -> None:
def __init__(self, config: Config) -> None:
self.config = config
# Assign timeframe to be used in hyperopt

View File

@ -10,6 +10,7 @@ from typing import Any, Dict
from pandas import DataFrame
from freqtrade.constants import Config
from freqtrade.data.metrics import calculate_max_drawdown
from freqtrade.optimize.hyperopt import IHyperOptLoss
@ -27,7 +28,7 @@ class CalmarHyperOptLoss(IHyperOptLoss):
trade_count: int,
min_date: datetime,
max_date: datetime,
config: Dict,
config: Config,
processed: Dict[str, DataFrame],
backtest_stats: Dict[str, Any],
*args,

View File

@ -4,10 +4,9 @@ MaxDrawDownRelativeHyperOptLoss
This module defines the alternative HyperOptLoss class which can be used for
Hyperoptimization.
"""
from typing import Dict
from pandas import DataFrame
from freqtrade.constants import Config
from freqtrade.data.metrics import calculate_underwater
from freqtrade.optimize.hyperopt import IHyperOptLoss
@ -22,7 +21,7 @@ class MaxDrawDownRelativeHyperOptLoss(IHyperOptLoss):
"""
@staticmethod
def hyperopt_loss_function(results: DataFrame, config: Dict,
def hyperopt_loss_function(results: DataFrame, config: Config,
*args, **kwargs) -> float:
"""

View File

@ -9,6 +9,8 @@ from typing import Any, Dict
from pandas import DataFrame
from freqtrade.constants import Config
class IHyperOptLoss(ABC):
"""
@ -21,7 +23,7 @@ class IHyperOptLoss(ABC):
@abstractmethod
def hyperopt_loss_function(*, results: DataFrame, trade_count: int,
min_date: datetime, max_date: datetime,
config: Dict, processed: Dict[str, DataFrame],
config: Config, processed: Dict[str, DataFrame],
backtest_stats: Dict[str, Any],
**kwargs) -> float:
"""

View File

@ -12,7 +12,7 @@ import tabulate
from colorama import Fore, Style
from pandas import isna, json_normalize
from freqtrade.constants import FTHYPT_FILEVERSION, USERPATH_STRATEGIES
from freqtrade.constants import FTHYPT_FILEVERSION, USERPATH_STRATEGIES, Config
from freqtrade.enums import HyperoptState
from freqtrade.exceptions import OperationalException
from freqtrade.misc import deep_merge_dicts, round_coin_value, round_dict, safe_value_fallback2
@ -45,7 +45,7 @@ class HyperoptStateContainer():
class HyperoptTools():
@staticmethod
def get_strategy_filename(config: Dict, strategy_name: str) -> Optional[Path]:
def get_strategy_filename(config: Config, strategy_name: str) -> Optional[Path]:
"""
Get Strategy-location (filename) from strategy_name
"""
@ -81,7 +81,7 @@ class HyperoptTools():
)
@staticmethod
def try_export_params(config: Dict[str, Any], strategy_name: str, params: Dict):
def try_export_params(config: Config, strategy_name: str, params: Dict):
if params.get(FTHYPT_FILEVERSION, 1) >= 2 and not config.get('disableparamexport', False):
# Export parameters ...
fn = HyperoptTools.get_strategy_filename(config, strategy_name)
@ -91,7 +91,7 @@ class HyperoptTools():
logger.warning("Strategy not found, not exporting parameter file.")
@staticmethod
def has_space(config: Dict[str, Any], space: str) -> bool:
def has_space(config: Config, space: str) -> bool:
"""
Tell if the space value is contained in the configuration
"""
@ -131,7 +131,7 @@ class HyperoptTools():
return False
@staticmethod
def load_filtered_results(results_file: Path, config: Dict[str, Any]) -> Tuple[List, int]:
def load_filtered_results(results_file: Path, config: Config) -> Tuple[List, int]:
filteroptions = {
'only_best': config.get('hyperopt_list_best', False),
'only_profitable': config.get('hyperopt_list_profitable', False),
@ -346,7 +346,7 @@ class HyperoptTools():
return trials
@staticmethod
def get_result_table(config: dict, results: list, total_epochs: int, highlight_best: bool,
def get_result_table(config: Config, results: list, total_epochs: int, highlight_best: bool,
print_colorized: bool, remove_header: int) -> str:
"""
Log result table
@ -444,7 +444,7 @@ class HyperoptTools():
return table
@staticmethod
def export_csv_file(config: dict, results: list, csv_file: str) -> None:
def export_csv_file(config: Config, results: list, csv_file: str) -> None:
"""
Log result to csv-file
"""

View File

@ -7,7 +7,8 @@ from typing import Any, Dict, List, Union
from pandas import DataFrame, to_datetime
from tabulate import tabulate
from freqtrade.constants import DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN, UNLIMITED_STAKE_AMOUNT
from freqtrade.constants import (DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN, UNLIMITED_STAKE_AMOUNT,
Config)
from freqtrade.data.metrics import (calculate_cagr, calculate_csum, calculate_market_change,
calculate_max_drawdown)
from freqtrade.misc import decimals_per_coin, file_dump_joblib, file_dump_json, round_coin_value
@ -898,7 +899,7 @@ def show_backtest_result(strategy: str, results: Dict[str, Any], stake_currency:
print()
def show_backtest_results(config: Dict, backtest_stats: Dict):
def show_backtest_results(config: Config, backtest_stats: Dict):
stake_currency = config['stake_currency']
for strategy, results in backtest_stats['strategy'].items():
@ -918,7 +919,7 @@ def show_backtest_results(config: Dict, backtest_stats: Dict):
print('\nFor more details, please look at the detail tables above')
def show_sorted_pairlist(config: Dict, backtest_stats: Dict):
def show_sorted_pairlist(config: Config, backtest_stats: Dict):
if config.get('backtest_show_pair_list', False):
for strategy, results in backtest_stats['strategy'].items():
print(f"Pairs for Strategy {strategy}: \n[")

View File

@ -1,10 +1,11 @@
import logging
from pathlib import Path
from typing import Any, Dict, List, Optional
from typing import Dict, List, Optional
import pandas as pd
from freqtrade.configuration import TimeRange
from freqtrade.constants import Config
from freqtrade.data.btanalysis import (analyze_trade_parallelism, extract_trades_of_period,
load_trades)
from freqtrade.data.converter import trim_dataframe
@ -618,7 +619,7 @@ def store_plot_file(fig, filename: str, directory: Path, auto_open: bool = False
logger.info(f"Stored plot as {_filename}")
def load_and_plot_trades(config: Dict[str, Any]):
def load_and_plot_trades(config: Config):
"""
From configuration provided
- Initializes plot-script
@ -666,7 +667,7 @@ def load_and_plot_trades(config: Dict[str, Any]):
logger.info('End of plotting process. %s plots generated', pair_counter)
def plot_profit(config: Dict[str, Any]) -> None:
def plot_profit(config: Config) -> None:
"""
Plots the total profit for all pairs.
Note, the profit calculation isn't realistic.

View File

@ -8,7 +8,7 @@ from typing import Any, Dict, List, Optional
import arrow
from pandas import DataFrame
from freqtrade.constants import ListPairsWithTimeframes
from freqtrade.constants import Config, ListPairsWithTimeframes
from freqtrade.exceptions import OperationalException
from freqtrade.misc import plural
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
class AgeFilter(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -6,6 +6,7 @@ from abc import ABC, abstractmethod, abstractproperty
from copy import deepcopy
from typing import Any, Dict, List
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import Exchange, market_is_active
from freqtrade.mixins import LoggingMixin
@ -17,7 +18,7 @@ logger = logging.getLogger(__name__)
class IPairList(LoggingMixin, ABC):
def __init__(self, exchange: Exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
"""
:param exchange: Exchange instance

View File

@ -4,6 +4,7 @@ Offset pair list filter
import logging
from typing import Any, Dict, List
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -14,7 +15,7 @@ logger = logging.getLogger(__name__)
class OffsetFilter(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -6,6 +6,7 @@ from typing import Any, Dict, List
import pandas as pd
from freqtrade.constants import Config
from freqtrade.persistence import Trade
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -16,7 +17,7 @@ logger = logging.getLogger(__name__)
class PerformanceFilter(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -4,6 +4,7 @@ Precision pair list filter
import logging
from typing import Any, Dict
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -14,7 +15,7 @@ logger = logging.getLogger(__name__)
class PrecisionFilter(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -4,6 +4,7 @@ Price pair list filter
import logging
from typing import Any, Dict
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -14,7 +15,7 @@ logger = logging.getLogger(__name__)
class PriceFilter(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -5,6 +5,7 @@ import logging
import random
from typing import Any, Dict, List
from freqtrade.constants import Config
from freqtrade.enums import RunMode
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -15,7 +16,7 @@ logger = logging.getLogger(__name__)
class ShuffleFilter(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -4,6 +4,7 @@ Spread pair list filter
import logging
from typing import Any, Dict
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -14,7 +15,7 @@ logger = logging.getLogger(__name__)
class SpreadFilter(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -7,6 +7,7 @@ import logging
from copy import deepcopy
from typing import Any, Dict, List
from freqtrade.constants import Config
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -16,7 +17,7 @@ logger = logging.getLogger(__name__)
class StaticPairList(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -11,7 +11,7 @@ import numpy as np
from cachetools import TTLCache
from pandas import DataFrame
from freqtrade.constants import ListPairsWithTimeframes
from freqtrade.constants import Config, ListPairsWithTimeframes
from freqtrade.exceptions import OperationalException
from freqtrade.misc import plural
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -26,7 +26,7 @@ class VolatilityFilter(IPairList):
"""
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -9,7 +9,7 @@ from typing import Any, Dict, List
from cachetools import TTLCache
from freqtrade.constants import ListPairsWithTimeframes
from freqtrade.constants import Config, ListPairsWithTimeframes
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_prev_date
from freqtrade.misc import format_ms_time
@ -25,7 +25,7 @@ SORT_VALUES = ['quoteVolume']
class VolumePairList(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)

View File

@ -1,5 +1,7 @@
import re
from typing import Any, Dict, List
from typing import List
from freqtrade.constants import Config
def expand_pairlist(wildcardpl: List[str], available_pairs: List[str],
@ -42,7 +44,7 @@ def expand_pairlist(wildcardpl: List[str], available_pairs: List[str],
return result
def dynamic_expand_pairlist(config: Dict[str, Any], markets: List[str]) -> List[str]:
def dynamic_expand_pairlist(config: Config, markets: List[str]) -> List[str]:
expanded_pairs = expand_pairlist(config['pairs'], markets)
if config.get('freqai', {}).get('enabled', False):
corr_pairlist = config['freqai']['feature_parameters']['include_corr_pairlist']

View File

@ -9,7 +9,7 @@ import arrow
from cachetools import TTLCache
from pandas import DataFrame
from freqtrade.constants import ListPairsWithTimeframes
from freqtrade.constants import Config, ListPairsWithTimeframes
from freqtrade.exceptions import OperationalException
from freqtrade.misc import plural
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
class RangeStabilityFilter(IPairList):
def __init__(self, exchange, pairlistmanager,
config: Dict[str, Any], pairlistconfig: Dict[str, Any],
config: Config, pairlistconfig: Dict[str, Any],
pairlist_pos: int) -> None:
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)
@ -100,23 +100,19 @@ class RangeStabilityFilter(IPairList):
if cached_res is not None:
return cached_res
result = False
result = True
if daily_candles is not None and not daily_candles.empty:
highest_high = daily_candles['high'].max()
lowest_low = daily_candles['low'].min()
pct_change = ((highest_high - lowest_low) / lowest_low) if lowest_low > 0 else 0
if pct_change >= self._min_rate_of_change:
result = True
else:
if pct_change < self._min_rate_of_change:
self.log_once(f"Removed {pair} from whitelist, because rate of change "
f"over {self._days} {plural(self._days, 'day')} is {pct_change:.3f}, "
f"which is below the threshold of {self._min_rate_of_change}.",
logger.info)
result = False
if self._max_rate_of_change:
if pct_change <= self._max_rate_of_change:
result = True
else:
if pct_change > self._max_rate_of_change:
self.log_once(
f"Removed {pair} from whitelist, because rate of change "
f"over {self._days} {plural(self._days, 'day')} is {pct_change:.3f}, "

View File

@ -7,7 +7,7 @@ from typing import Dict, List
from cachetools import TTLCache, cached
from freqtrade.constants import ListPairsWithTimeframes
from freqtrade.constants import Config, ListPairsWithTimeframes
from freqtrade.enums import CandleType
from freqtrade.exceptions import OperationalException
from freqtrade.mixins import LoggingMixin
@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
class PairListManager(LoggingMixin):
def __init__(self, exchange, config: dict) -> None:
def __init__(self, exchange, config: Config) -> None:
self._exchange = exchange
self._config = config
self._whitelist = self._config['exchange'].get('pair_whitelist')

View File

@ -5,7 +5,7 @@ import logging
from datetime import datetime, timezone
from typing import Dict, List, Optional
from freqtrade.constants import LongShort
from freqtrade.constants import Config, LongShort
from freqtrade.persistence import PairLocks
from freqtrade.persistence.models import PairLock
from freqtrade.plugins.protections import IProtection
@ -17,7 +17,7 @@ logger = logging.getLogger(__name__)
class ProtectionManager():
def __init__(self, config: Dict, protections: List) -> None:
def __init__(self, config: Config, protections: List) -> None:
self._config = config
self._protection_handlers: List[IProtection] = []

View File

@ -5,7 +5,7 @@ from dataclasses import dataclass
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, List, Optional
from freqtrade.constants import LongShort
from freqtrade.constants import Config, LongShort
from freqtrade.exchange import timeframe_to_minutes
from freqtrade.misc import plural
from freqtrade.mixins import LoggingMixin
@ -30,7 +30,7 @@ class IProtection(LoggingMixin, ABC):
# Can stop trading for one pair
has_local_stop: bool = False
def __init__(self, config: Dict[str, Any], protection_config: Dict[str, Any]) -> None:
def __init__(self, config: Config, protection_config: Dict[str, Any]) -> None:
self._config = config
self._protection_config = protection_config
self._stop_duration_candles: Optional[int] = None

View File

@ -3,7 +3,7 @@ import logging
from datetime import datetime, timedelta
from typing import Any, Dict, Optional
from freqtrade.constants import LongShort
from freqtrade.constants import Config, LongShort
from freqtrade.persistence import Trade
from freqtrade.plugins.protections import IProtection, ProtectionReturn
@ -16,7 +16,7 @@ class LowProfitPairs(IProtection):
has_global_stop: bool = False
has_local_stop: bool = True
def __init__(self, config: Dict[str, Any], protection_config: Dict[str, Any]) -> None:
def __init__(self, config: Config, protection_config: Dict[str, Any]) -> None:
super().__init__(config, protection_config)
self._trade_limit = protection_config.get('trade_limit', 1)

View File

@ -5,7 +5,7 @@ from typing import Any, Dict, Optional
import pandas as pd
from freqtrade.constants import LongShort
from freqtrade.constants import Config, LongShort
from freqtrade.data.metrics import calculate_max_drawdown
from freqtrade.persistence import Trade
from freqtrade.plugins.protections import IProtection, ProtectionReturn
@ -19,7 +19,7 @@ class MaxDrawdown(IProtection):
has_global_stop: bool = True
has_local_stop: bool = False
def __init__(self, config: Dict[str, Any], protection_config: Dict[str, Any]) -> None:
def __init__(self, config: Config, protection_config: Dict[str, Any]) -> None:
super().__init__(config, protection_config)
self._trade_limit = protection_config.get('trade_limit', 1)

View File

@ -3,7 +3,7 @@ import logging
from datetime import datetime, timedelta
from typing import Any, Dict, Optional
from freqtrade.constants import LongShort
from freqtrade.constants import Config, LongShort
from freqtrade.enums import ExitType
from freqtrade.persistence import Trade
from freqtrade.plugins.protections import IProtection, ProtectionReturn
@ -17,7 +17,7 @@ class StoplossGuard(IProtection):
has_global_stop: bool = True
has_local_stop: bool = True
def __init__(self, config: Dict[str, Any], protection_config: Dict[str, Any]) -> None:
def __init__(self, config: Config, protection_config: Dict[str, Any]) -> None:
super().__init__(config, protection_config)
self._trade_limit = protection_config.get('trade_limit', 10)

View File

@ -4,6 +4,7 @@ This module loads custom exchanges
import logging
import freqtrade.exchange as exchanges
from freqtrade.constants import Config
from freqtrade.exchange import MAP_EXCHANGE_CHILDCLASS, Exchange
from freqtrade.resolvers import IResolver
@ -18,7 +19,7 @@ class ExchangeResolver(IResolver):
object_type = Exchange
@staticmethod
def load_exchange(exchange_name: str, config: dict, validate: bool = True,
def load_exchange(exchange_name: str, config: Config, validate: bool = True,
load_leverage_tiers: bool = False) -> Exchange:
"""
Load the custom class from config parameter

View File

@ -5,9 +5,8 @@ This module load a custom model for freqai
"""
import logging
from pathlib import Path
from typing import Dict
from freqtrade.constants import USERPATH_FREQAIMODELS
from freqtrade.constants import USERPATH_FREQAIMODELS, Config
from freqtrade.exceptions import OperationalException
from freqtrade.freqai.freqai_interface import IFreqaiModel
from freqtrade.resolvers import IResolver
@ -29,7 +28,7 @@ class FreqaiModelResolver(IResolver):
)
@staticmethod
def load_freqaimodel(config: Dict) -> IFreqaiModel:
def load_freqaimodel(config: Config) -> IFreqaiModel:
"""
Load the custom class from config parameter
:param config: configuration dictionary

View File

@ -5,9 +5,8 @@ This module load custom hyperopt
"""
import logging
from pathlib import Path
from typing import Dict
from freqtrade.constants import HYPEROPT_LOSS_BUILTIN, USERPATH_HYPEROPTS
from freqtrade.constants import HYPEROPT_LOSS_BUILTIN, USERPATH_HYPEROPTS, Config
from freqtrade.exceptions import OperationalException
from freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss
from freqtrade.resolvers import IResolver
@ -26,7 +25,7 @@ class HyperOptLossResolver(IResolver):
initial_search_path = Path(__file__).parent.parent.joinpath('optimize/hyperopt_loss').resolve()
@staticmethod
def load_hyperoptloss(config: Dict) -> IHyperOptLoss:
def load_hyperoptloss(config: Config) -> IHyperOptLoss:
"""
Load the custom class from config parameter
:param config: configuration dictionary

View File

@ -10,6 +10,7 @@ import sys
from pathlib import Path
from typing import Any, Dict, Iterator, List, Optional, Tuple, Type, Union
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
@ -43,7 +44,7 @@ class IResolver:
initial_search_path: Optional[Path]
@classmethod
def build_search_paths(cls, config: Dict[str, Any], user_subdir: Optional[str] = None,
def build_search_paths(cls, config: Config, user_subdir: Optional[str] = None,
extra_dirs: List[str] = []) -> List[Path]:
abs_paths: List[Path] = []
@ -153,7 +154,7 @@ class IResolver:
return None
@classmethod
def load_object(cls, object_name: str, config: dict, *, kwargs: dict,
def load_object(cls, object_name: str, config: Config, *, kwargs: dict,
extra_dir: Optional[str] = None) -> Any:
"""
Search and loads the specified object as configured in hte child class.

View File

@ -6,6 +6,7 @@ This module load custom pairlists
import logging
from pathlib import Path
from freqtrade.constants import Config
from freqtrade.plugins.pairlist.IPairList import IPairList
from freqtrade.resolvers import IResolver
@ -24,7 +25,7 @@ class PairListResolver(IResolver):
@staticmethod
def load_pairlist(pairlist_name: str, exchange, pairlistmanager,
config: dict, pairlistconfig: dict, pairlist_pos: int) -> IPairList:
config: Config, pairlistconfig: dict, pairlist_pos: int) -> IPairList:
"""
Load the pairlist with pairlist_name
:param pairlist_name: Classname of the pairlist

View File

@ -5,6 +5,7 @@ import logging
from pathlib import Path
from typing import Dict
from freqtrade.constants import Config
from freqtrade.plugins.protections import IProtection
from freqtrade.resolvers import IResolver
@ -22,7 +23,8 @@ class ProtectionResolver(IResolver):
initial_search_path = Path(__file__).parent.parent.joinpath('plugins/protections').resolve()
@staticmethod
def load_protection(protection_name: str, config: Dict, protection_config: Dict) -> IProtection:
def load_protection(protection_name: str, config: Config,
protection_config: Dict) -> IProtection:
"""
Load the protection with protection_name
:param protection_name: Classname of the pairlist

View File

@ -9,10 +9,10 @@ from base64 import urlsafe_b64decode
from inspect import getfullargspec
from os import walk
from pathlib import Path
from typing import Any, Dict, List, Optional
from typing import Any, List, Optional
from freqtrade.configuration.config_validation import validate_migrated_strategy_settings
from freqtrade.constants import REQUIRED_ORDERTIF, REQUIRED_ORDERTYPES, USERPATH_STRATEGIES
from freqtrade.constants import REQUIRED_ORDERTIF, REQUIRED_ORDERTYPES, USERPATH_STRATEGIES, Config
from freqtrade.enums import TradingMode
from freqtrade.exceptions import OperationalException
from freqtrade.resolvers import IResolver
@ -32,7 +32,7 @@ class StrategyResolver(IResolver):
initial_search_path = None
@staticmethod
def load_strategy(config: Dict[str, Any] = None) -> IStrategy:
def load_strategy(config: Config = None) -> IStrategy:
"""
Load the custom class from config parameter
:param config: configuration dictionary or None
@ -91,8 +91,7 @@ class StrategyResolver(IResolver):
return strategy
@staticmethod
def _override_attribute_helper(strategy, config: Dict[str, Any],
attribute: str, default: Any):
def _override_attribute_helper(strategy, config: Config, attribute: str, default: Any):
"""
Override attributes in the strategy.
Prevalence:
@ -215,7 +214,7 @@ class StrategyResolver(IResolver):
@staticmethod
def _load_strategy(strategy_name: str,
config: dict, extra_dir: Optional[str] = None) -> IStrategy:
config: Config, extra_dir: Optional[str] = None) -> IStrategy:
"""
Search and loads the specified strategy.
:param strategy_name: name of the module to import

View File

@ -1,8 +1,10 @@
import logging
import secrets
from datetime import datetime, timedelta
from typing import Any, Dict, Union
import jwt
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi import APIRouter, Depends, HTTPException, Query, WebSocket, status
from fastapi.security import OAuth2PasswordBearer
from fastapi.security.http import HTTPBasic, HTTPBasicCredentials
@ -10,6 +12,8 @@ from freqtrade.rpc.api_server.api_schemas import AccessAndRefreshToken, AccessTo
from freqtrade.rpc.api_server.deps import get_api_config
logger = logging.getLogger(__name__)
ALGORITHM = "HS256"
router_login = APIRouter()
@ -25,7 +29,7 @@ httpbasic = HTTPBasic(auto_error=False)
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
def get_user_from_token(token, secret_key: str, token_type: str = "access"):
def get_user_from_token(token, secret_key: str, token_type: str = "access") -> str:
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
@ -44,6 +48,45 @@ def get_user_from_token(token, secret_key: str, token_type: str = "access"):
return username
# This should be reimplemented to better realign with the existing tools provided
# by FastAPI regarding API Tokens
# https://github.com/tiangolo/fastapi/blob/master/fastapi/security/api_key.py
async def validate_ws_token(
ws: WebSocket,
ws_token: Union[str, None] = Query(default=None, alias="token"),
api_config: Dict[str, Any] = Depends(get_api_config)
):
secret_ws_token = api_config.get('ws_token', None)
secret_jwt_key = api_config.get('jwt_secret_key', 'super-secret')
# Check if ws_token is/in secret_ws_token
if ws_token and secret_ws_token:
is_valid_ws_token = False
if isinstance(secret_ws_token, str):
is_valid_ws_token = secrets.compare_digest(secret_ws_token, ws_token)
elif isinstance(secret_ws_token, list):
is_valid_ws_token = any([
secrets.compare_digest(potential, ws_token)
for potential in secret_ws_token
])
if is_valid_ws_token:
return ws_token
# Check if ws_token is a JWT
try:
user = get_user_from_token(ws_token, secret_jwt_key)
return user
# If the token is a jwt, and it's valid return the user
except HTTPException:
pass
# No checks passed, deny the connection
logger.debug("Denying websocket request.")
# If it doesn't match, close the websocket connection
await ws.close(code=status.WS_1008_POLICY_VIOLATION)
def create_token(data: dict, secret_key: str, token_type: str = "access") -> str:
to_encode = data.copy()
if token_type == "access":

View File

@ -38,7 +38,8 @@ logger = logging.getLogger(__name__)
# 2.15: Add backtest history endpoints
# 2.16: Additional daily metrics
# 2.17: Forceentry - leverage, partial force_exit
API_VERSION = 2.17
# 2.20: Add websocket endpoints
API_VERSION = 2.20
# Public API, requires no auth.
router_public = APIRouter()

View File

@ -0,0 +1,140 @@
import logging
from typing import Any, Dict
from fastapi import APIRouter, Depends, WebSocketDisconnect
from fastapi.websockets import WebSocket, WebSocketState
from pydantic import ValidationError
from freqtrade.enums import RPCMessageType, RPCRequestType
from freqtrade.rpc.api_server.api_auth import validate_ws_token
from freqtrade.rpc.api_server.deps import get_channel_manager, get_rpc
from freqtrade.rpc.api_server.ws import WebSocketChannel
from freqtrade.rpc.api_server.ws_schemas import (WSAnalyzedDFMessage, WSMessageSchema,
WSRequestSchema, WSWhitelistMessage)
from freqtrade.rpc.rpc import RPC
logger = logging.getLogger(__name__)
# Private router, protected by API Key authentication
router = APIRouter()
async def is_websocket_alive(ws: WebSocket) -> bool:
"""
Check if a FastAPI Websocket is still open
"""
if (
ws.application_state == WebSocketState.CONNECTED and
ws.client_state == WebSocketState.CONNECTED
):
return True
return False
async def _process_consumer_request(
request: Dict[str, Any],
channel: WebSocketChannel,
rpc: RPC
):
"""
Validate and handle a request from a websocket consumer
"""
# Validate the request, makes sure it matches the schema
try:
websocket_request = WSRequestSchema.parse_obj(request)
except ValidationError as e:
logger.error(f"Invalid request from {channel}: {e}")
return
type, data = websocket_request.type, websocket_request.data
response: WSMessageSchema
logger.debug(f"Request of type {type} from {channel}")
# If we have a request of type SUBSCRIBE, set the topics in this channel
if type == RPCRequestType.SUBSCRIBE:
# If the request is empty, do nothing
if not data:
return
# If all topics passed are a valid RPCMessageType, set subscriptions on channel
if all([any(x.value == topic for x in RPCMessageType) for topic in data]):
channel.set_subscriptions(data)
# We don't send a response for subscriptions
return
elif type == RPCRequestType.WHITELIST:
# Get whitelist
whitelist = rpc._ws_request_whitelist()
# Format response
response = WSWhitelistMessage(data=whitelist)
# Send it back
await channel.send(response.dict(exclude_none=True))
elif type == RPCRequestType.ANALYZED_DF:
limit = None
if data:
# Limit the amount of candles per dataframe to 'limit' or 1500
limit = max(data.get('limit', 1500), 1500)
# They requested the full historical analyzed dataframes
analyzed_df = rpc._ws_request_analyzed_df(limit)
# For every dataframe, send as a separate message
for _, message in analyzed_df.items():
response = WSAnalyzedDFMessage(data=message)
await channel.send(response.dict(exclude_none=True))
@router.websocket("/message/ws")
async def message_endpoint(
ws: WebSocket,
rpc: RPC = Depends(get_rpc),
channel_manager=Depends(get_channel_manager),
token: str = Depends(validate_ws_token)
):
"""
Message WebSocket endpoint, facilitates sending RPC messages
"""
try:
channel = await channel_manager.on_connect(ws)
if await is_websocket_alive(ws):
logger.info(f"Consumer connected - {channel}")
# Keep connection open until explicitly closed, and process requests
try:
while not channel.is_closed():
request = await channel.recv()
# Process the request here
await _process_consumer_request(request, channel, rpc)
except WebSocketDisconnect:
# Handle client disconnects
logger.info(f"Consumer disconnected - {channel}")
await channel_manager.on_disconnect(ws)
except Exception as e:
logger.info(f"Consumer connection failed - {channel}")
logger.exception(e)
# Handle cases like -
# RuntimeError('Cannot call "send" once a closed message has been sent')
await channel_manager.on_disconnect(ws)
else:
await ws.close()
except RuntimeError:
# WebSocket was closed
await channel_manager.on_disconnect(ws)
except Exception as e:
logger.error(f"Failed to serve - {ws.client}")
# Log tracebacks to keep track of what errors are happening
logger.exception(e)
await channel_manager.on_disconnect(ws)

View File

@ -41,6 +41,10 @@ def get_exchange(config=Depends(get_config)):
return ApiServer._exchange
def get_channel_manager():
return ApiServer._ws_channel_manager
def is_webserver_mode(config=Depends(get_config)):
if config['runmode'] != RunMode.WEBSERVER:
raise RPCException('Bot is not in the correct state')

View File

@ -1,15 +1,21 @@
import asyncio
import logging
from ipaddress import IPv4Address
from threading import Thread
from typing import Any, Dict
import orjson
import uvicorn
from fastapi import Depends, FastAPI
from fastapi.middleware.cors import CORSMiddleware
# Look into alternatives
from janus import Queue as ThreadedQueue
from starlette.responses import JSONResponse
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.rpc.api_server.uvicorn_threaded import UvicornServer
from freqtrade.rpc.api_server.ws import ChannelManager
from freqtrade.rpc.rpc import RPC, RPCException, RPCHandler
@ -37,12 +43,16 @@ class ApiServer(RPCHandler):
_bt = None
_bt_data = None
_bt_timerange = None
_bt_last_config: Dict[str, Any] = {}
_bt_last_config: Config = {}
_has_rpc: bool = False
_bgtask_running: bool = False
_config: Dict[str, Any] = {}
_config: Config = {}
# Exchange - only available in webserver mode.
_exchange = None
# websocket message queue stuff
_ws_channel_manager = None
_ws_thread = None
_ws_loop = None
def __new__(cls, *args, **kwargs):
"""
@ -54,23 +64,27 @@ class ApiServer(RPCHandler):
ApiServer.__initialized = False
return ApiServer.__instance
def __init__(self, config: Dict[str, Any], standalone: bool = False) -> None:
def __init__(self, config: Config, standalone: bool = False) -> None:
ApiServer._config = config
if self.__initialized and (standalone or self._standalone):
return
self._standalone: bool = standalone
self._server = None
self._ws_queue = None
self._ws_background_task = None
ApiServer.__initialized = True
api_config = self._config['api_server']
ApiServer._ws_channel_manager = ChannelManager()
self.app = FastAPI(title="Freqtrade API",
docs_url='/docs' if api_config.get('enable_openapi', False) else None,
redoc_url=None,
default_response_class=FTJSONResponse,
)
self.configure_app(self.app, self._config)
self.start_api()
def add_rpc_handler(self, rpc: RPC):
@ -92,6 +106,19 @@ class ApiServer(RPCHandler):
logger.info("Stopping API Server")
self._server.cleanup()
if self._ws_thread and self._ws_loop:
logger.info("Stopping API Server background tasks")
if self._ws_background_task:
# Cancel the queue task
self._ws_background_task.cancel()
self._ws_thread.join()
self._ws_thread = None
self._ws_loop = None
self._ws_background_task = None
@classmethod
def shutdown(cls):
cls.__initialized = False
@ -101,7 +128,9 @@ class ApiServer(RPCHandler):
cls._rpc = None
def send_msg(self, msg: Dict[str, str]) -> None:
pass
if self._ws_queue:
sync_q = self._ws_queue.sync_q
sync_q.put(msg)
def handle_rpc_exception(self, request, exc):
logger.exception(f"API Error calling: {exc}")
@ -115,6 +144,7 @@ class ApiServer(RPCHandler):
from freqtrade.rpc.api_server.api_backtest import router as api_backtest
from freqtrade.rpc.api_server.api_v1 import router as api_v1
from freqtrade.rpc.api_server.api_v1 import router_public as api_v1_public
from freqtrade.rpc.api_server.api_ws import router as ws_router
from freqtrade.rpc.api_server.web_ui import router_ui
app.include_router(api_v1_public, prefix="/api/v1")
@ -125,6 +155,7 @@ class ApiServer(RPCHandler):
app.include_router(api_backtest, prefix="/api/v1",
dependencies=[Depends(http_basic_or_jwt_token)],
)
app.include_router(ws_router, prefix="/api/v1")
app.include_router(router_login, prefix="/api/v1", tags=["auth"])
# UI Router MUST be last!
app.include_router(router_ui, prefix='')
@ -139,6 +170,48 @@ class ApiServer(RPCHandler):
app.add_exception_handler(RPCException, self.handle_rpc_exception)
def start_message_queue(self):
if self._ws_thread:
return
# Create a new loop, as it'll be just for the background thread
self._ws_loop = asyncio.new_event_loop()
# Start the thread
self._ws_thread = Thread(target=self._ws_loop.run_forever)
self._ws_thread.start()
# Finally, submit the coro to the thread
self._ws_background_task = asyncio.run_coroutine_threadsafe(
self._broadcast_queue_data(), loop=self._ws_loop)
async def _broadcast_queue_data(self):
# Instantiate the queue in this coroutine so it's attached to our loop
self._ws_queue = ThreadedQueue()
async_queue = self._ws_queue.async_q
try:
while True:
logger.debug("Getting queue messages...")
# Get data from queue
message = await async_queue.get()
logger.debug(f"Found message of type: {message.get('type')}")
# Broadcast it
await self._ws_channel_manager.broadcast(message)
# Sleep, make this configurable?
await asyncio.sleep(0.1)
except asyncio.CancelledError:
pass
# For testing, shouldn't happen when stable
except Exception as e:
logger.exception(f"Exception happened in background task: {e}")
finally:
# Disconnect channels and stop the loop on cancel
await self._ws_channel_manager.disconnect_all()
self._ws_loop.stop()
def start_api(self):
"""
Start API ... should be run in thread.
@ -176,6 +249,7 @@ class ApiServer(RPCHandler):
if self._standalone:
self._server.run()
else:
self.start_message_queue()
self._server.run_in_thread()
except Exception:
logger.exception("Api server failed to start.")

View File

@ -0,0 +1,6 @@
# flake8: noqa: F401
# isort: off
from freqtrade.rpc.api_server.ws.types import WebSocketType
from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy
from freqtrade.rpc.api_server.ws.serializer import HybridJSONWebSocketSerializer
from freqtrade.rpc.api_server.ws.channel import ChannelManager, WebSocketChannel

View File

@ -0,0 +1,178 @@
import logging
from threading import RLock
from typing import List, Optional, Type
from uuid import uuid4
from fastapi import WebSocket as FastAPIWebSocket
from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy
from freqtrade.rpc.api_server.ws.serializer import (HybridJSONWebSocketSerializer,
WebSocketSerializer)
from freqtrade.rpc.api_server.ws.types import WebSocketType
logger = logging.getLogger(__name__)
class WebSocketChannel:
"""
Object to help facilitate managing a websocket connection
"""
def __init__(
self,
websocket: WebSocketType,
channel_id: Optional[str] = None,
serializer_cls: Type[WebSocketSerializer] = HybridJSONWebSocketSerializer
):
self.channel_id = channel_id if channel_id else uuid4().hex[:8]
# The WebSocket object
self._websocket = WebSocketProxy(websocket)
# The Serializing class for the WebSocket object
self._serializer_cls = serializer_cls
self._subscriptions: List[str] = []
# Internal event to signify a closed websocket
self._closed = False
# Wrap the WebSocket in the Serializing class
self._wrapped_ws = self._serializer_cls(self._websocket)
def __repr__(self):
return f"WebSocketChannel({self.channel_id}, {self.remote_addr})"
@property
def remote_addr(self):
return self._websocket.remote_addr
async def send(self, data):
"""
Send data on the wrapped websocket
"""
await self._wrapped_ws.send(data)
async def recv(self):
"""
Receive data on the wrapped websocket
"""
return await self._wrapped_ws.recv()
async def ping(self):
"""
Ping the websocket
"""
return await self._websocket.ping()
async def close(self):
"""
Close the WebSocketChannel
"""
self._closed = True
def is_closed(self) -> bool:
"""
Closed flag
"""
return self._closed
def set_subscriptions(self, subscriptions: List[str] = []) -> None:
"""
Set which subscriptions this channel is subscribed to
:param subscriptions: List of subscriptions, List[str]
"""
self._subscriptions = subscriptions
def subscribed_to(self, message_type: str) -> bool:
"""
Check if this channel is subscribed to the message_type
:param message_type: The message type to check
"""
return message_type in self._subscriptions
class ChannelManager:
def __init__(self):
self.channels = dict()
self._lock = RLock() # Re-entrant Lock
async def on_connect(self, websocket: WebSocketType):
"""
Wrap websocket connection into Channel and add to list
:param websocket: The WebSocket object to attach to the Channel
"""
if isinstance(websocket, FastAPIWebSocket):
try:
await websocket.accept()
except RuntimeError:
# The connection was closed before we could accept it
return
ws_channel = WebSocketChannel(websocket)
with self._lock:
self.channels[websocket] = ws_channel
return ws_channel
async def on_disconnect(self, websocket: WebSocketType):
"""
Call close on the channel if it's not, and remove from channel list
:param websocket: The WebSocket objet attached to the Channel
"""
with self._lock:
channel = self.channels.get(websocket)
if channel:
if not channel.is_closed():
await channel.close()
del self.channels[websocket]
async def disconnect_all(self):
"""
Disconnect all Channels
"""
with self._lock:
for websocket, channel in self.channels.items():
if not channel.is_closed():
await channel.close()
self.channels = dict()
async def broadcast(self, data):
"""
Broadcast data on all Channels
:param data: The data to send
"""
with self._lock:
message_type = data.get('type')
for websocket, channel in self.channels.items():
try:
if channel.subscribed_to(message_type):
await channel.send(data)
except RuntimeError:
# Handle cannot send after close cases
await self.on_disconnect(websocket)
async def send_direct(self, channel, data):
"""
Send data directly through direct_channel only
:param direct_channel: The WebSocketChannel object to send data through
:param data: The data to send
"""
await channel.send(data)
def has_channels(self):
"""
Flag for more than 0 channels
"""
return len(self.channels) > 0

View File

@ -0,0 +1,69 @@
from typing import Any, Tuple, Union
from fastapi import WebSocket as FastAPIWebSocket
from websockets.client import WebSocketClientProtocol as WebSocket
from freqtrade.rpc.api_server.ws.types import WebSocketType
class WebSocketProxy:
"""
WebSocketProxy object to bring the FastAPIWebSocket and websockets.WebSocketClientProtocol
under the same API
"""
def __init__(self, websocket: WebSocketType):
self._websocket: Union[FastAPIWebSocket, WebSocket] = websocket
@property
def remote_addr(self) -> Tuple[Any, ...]:
if isinstance(self._websocket, WebSocket):
return self._websocket.remote_address
elif isinstance(self._websocket, FastAPIWebSocket):
if self._websocket.client:
client, port = self._websocket.client.host, self._websocket.client.port
return (client, port)
return ("unknown", 0)
async def send(self, data):
"""
Send data on the wrapped websocket
"""
if hasattr(self._websocket, "send_text"):
await self._websocket.send_text(data)
else:
await self._websocket.send(data)
async def recv(self):
"""
Receive data on the wrapped websocket
"""
if hasattr(self._websocket, "receive_text"):
return await self._websocket.receive_text()
else:
return await self._websocket.recv()
async def ping(self):
"""
Ping the websocket, not supported by FastAPI WebSockets
"""
if hasattr(self._websocket, "ping"):
return await self._websocket.ping()
return False
async def close(self, code: int = 1000):
"""
Close the websocket connection, only supported by FastAPI WebSockets
"""
if hasattr(self._websocket, "close"):
try:
return await self._websocket.close(code)
except RuntimeError:
pass
async def accept(self):
"""
Accept the WebSocket connection, only support by FastAPI WebSockets
"""
if hasattr(self._websocket, "accept"):
return await self._websocket.accept()

View File

@ -0,0 +1,62 @@
import logging
from abc import ABC, abstractmethod
import orjson
import rapidjson
from pandas import DataFrame
from freqtrade.misc import dataframe_to_json, json_to_dataframe
from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy
logger = logging.getLogger(__name__)
class WebSocketSerializer(ABC):
def __init__(self, websocket: WebSocketProxy):
self._websocket: WebSocketProxy = websocket
@abstractmethod
def _serialize(self, data):
raise NotImplementedError()
@abstractmethod
def _deserialize(self, data):
raise NotImplementedError()
async def send(self, data: bytes):
await self._websocket.send(self._serialize(data))
async def recv(self) -> bytes:
data = await self._websocket.recv()
return self._deserialize(data)
async def close(self, code: int = 1000):
await self._websocket.close(code)
class HybridJSONWebSocketSerializer(WebSocketSerializer):
def _serialize(self, data) -> str:
return str(orjson.dumps(data, default=_json_default), "utf-8")
def _deserialize(self, data: str):
# RapidJSON expects strings
return rapidjson.loads(data, object_hook=_json_object_hook)
# Support serializing pandas DataFrames
def _json_default(z):
if isinstance(z, DataFrame):
return {
'__type__': 'dataframe',
'__value__': dataframe_to_json(z)
}
raise TypeError
# Support deserializing JSON to pandas DataFrames
def _json_object_hook(z):
if z.get('__type__') == 'dataframe':
return json_to_dataframe(z.get('__value__'))
return z

View File

@ -0,0 +1,8 @@
from typing import Any, Dict, TypeVar
from fastapi import WebSocket as FastAPIWebSocket
from websockets.client import WebSocketClientProtocol as WebSocket
WebSocketType = TypeVar("WebSocketType", FastAPIWebSocket, WebSocket)
MessageType = Dict[str, Any]

View File

@ -0,0 +1,63 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from pandas import DataFrame
from pydantic import BaseModel
from freqtrade.constants import PairWithTimeframe
from freqtrade.enums.rpcmessagetype import RPCMessageType, RPCRequestType
class BaseArbitraryModel(BaseModel):
class Config:
arbitrary_types_allowed = True
class WSRequestSchema(BaseArbitraryModel):
type: RPCRequestType
data: Optional[Any] = None
class WSMessageSchema(BaseArbitraryModel):
type: RPCMessageType
data: Optional[Any] = None
class Config:
extra = 'allow'
# ------------------------------ REQUEST SCHEMAS ----------------------------
class WSSubscribeRequest(WSRequestSchema):
type: RPCRequestType = RPCRequestType.SUBSCRIBE
data: List[RPCMessageType]
class WSWhitelistRequest(WSRequestSchema):
type: RPCRequestType = RPCRequestType.WHITELIST
data: None = None
class WSAnalyzedDFRequest(WSRequestSchema):
type: RPCRequestType = RPCRequestType.ANALYZED_DF
data: Dict[str, Any] = {"limit": 1500}
# ------------------------------ MESSAGE SCHEMAS ----------------------------
class WSWhitelistMessage(WSMessageSchema):
type: RPCMessageType = RPCMessageType.WHITELIST
data: List[str]
class WSAnalyzedDFMessage(WSMessageSchema):
class AnalyzedDFData(BaseArbitraryModel):
key: PairWithTimeframe
df: DataFrame
la: datetime
type: RPCMessageType = RPCMessageType.ANALYZED_DF
data: AnalyzedDFData
# --------------------------------------------------------------------------

View File

@ -1,6 +1,6 @@
import logging
from typing import Any, Dict
from freqtrade.constants import Config
from freqtrade.enums import RPCMessageType
from freqtrade.rpc import RPC
from freqtrade.rpc.webhook import Webhook
@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
class Discord(Webhook):
def __init__(self, rpc: 'RPC', config: Dict[str, Any]):
def __init__(self, rpc: 'RPC', config: Config):
# super().__init__(rpc, config)
self.rpc = rpc
self.config = config

View File

@ -0,0 +1,335 @@
"""
ExternalMessageConsumer module
Main purpose is to connect to external bot's message websocket to consume data
from it
"""
import asyncio
import logging
import socket
from threading import Thread
from typing import TYPE_CHECKING, Any, Callable, Dict, List, TypedDict
import websockets
from pydantic import ValidationError
from freqtrade.data.dataprovider import DataProvider
from freqtrade.enums import RPCMessageType
from freqtrade.misc import remove_entry_exit_signals
from freqtrade.rpc.api_server.ws import WebSocketChannel
from freqtrade.rpc.api_server.ws_schemas import (WSAnalyzedDFMessage, WSAnalyzedDFRequest,
WSMessageSchema, WSRequestSchema,
WSSubscribeRequest, WSWhitelistMessage,
WSWhitelistRequest)
if TYPE_CHECKING:
import websockets.connect
class Producer(TypedDict):
name: str
host: str
port: int
ws_token: str
logger = logging.getLogger(__name__)
class ExternalMessageConsumer:
"""
The main controller class for consuming external messages from
other freqtrade bot's
"""
def __init__(
self,
config: Dict[str, Any],
dataprovider: DataProvider
):
self._config = config
self._dp = dataprovider
self._running = False
self._thread = None
self._loop = None
self._main_task = None
self._sub_tasks = None
self._emc_config = self._config.get('external_message_consumer', {})
self.enabled = self._emc_config.get('enabled', False)
self.producers: List[Producer] = self._emc_config.get('producers', [])
self.wait_timeout = self._emc_config.get('wait_timeout', 300) # in seconds
self.ping_timeout = self._emc_config.get('ping_timeout', 10) # in seconds
self.sleep_time = self._emc_config.get('sleep_time', 10) # in seconds
# The amount of candles per dataframe on the initial request
self.initial_candle_limit = self._emc_config.get('initial_candle_limit', 1500)
# Message size limit, in megabytes. Default 8mb, Use bitwise operator << 20 to convert
# as the websockets client expects bytes.
self.message_size_limit = (self._emc_config.get('message_size_limit', 8) << 20)
# Setting these explicitly as they probably shouldn't be changed by a user
# Unless we somehow integrate this with the strategy to allow creating
# callbacks for the messages
self.topics = [RPCMessageType.WHITELIST, RPCMessageType.ANALYZED_DF]
# Allow setting data for each initial request
self._initial_requests: List[WSRequestSchema] = [
WSSubscribeRequest(data=self.topics),
WSWhitelistRequest(),
WSAnalyzedDFRequest()
]
# Specify which function to use for which RPCMessageType
self._message_handlers: Dict[str, Callable[[str, WSMessageSchema], None]] = {
RPCMessageType.WHITELIST: self._consume_whitelist_message,
RPCMessageType.ANALYZED_DF: self._consume_analyzed_df_message,
}
self.start()
def start(self):
"""
Start the main internal loop in another thread to run coroutines
"""
if self._thread and self._loop:
return
logger.info("Starting ExternalMessageConsumer")
self._loop = asyncio.new_event_loop()
self._thread = Thread(target=self._loop.run_forever)
self._running = True
self._thread.start()
self._main_task = asyncio.run_coroutine_threadsafe(self._main(), loop=self._loop)
def shutdown(self):
"""
Shutdown the loop, thread, and tasks
"""
if self._thread and self._loop:
logger.info("Stopping ExternalMessageConsumer")
self._running = False
if self._sub_tasks:
# Cancel sub tasks
for task in self._sub_tasks:
task.cancel()
if self._main_task:
# Cancel the main task
self._main_task.cancel()
self._thread.join()
self._thread = None
self._loop = None
self._sub_tasks = None
self._main_task = None
async def _main(self):
"""
The main task coroutine
"""
lock = asyncio.Lock()
try:
# Create a connection to each producer
self._sub_tasks = [
self._loop.create_task(self._handle_producer_connection(producer, lock))
for producer in self.producers
]
await asyncio.gather(*self._sub_tasks)
except asyncio.CancelledError:
pass
finally:
# Stop the loop once we are done
self._loop.stop()
async def _handle_producer_connection(self, producer: Producer, lock: asyncio.Lock):
"""
Main connection loop for the consumer
:param producer: Dictionary containing producer info
:param lock: An asyncio Lock
"""
try:
await self._create_connection(producer, lock)
except asyncio.CancelledError:
# Exit silently
pass
async def _create_connection(self, producer: Producer, lock: asyncio.Lock):
"""
Actually creates and handles the websocket connection, pinging on timeout
and handling connection errors.
:param producer: Dictionary containing producer info
:param lock: An asyncio Lock
"""
while self._running:
try:
host, port = producer['host'], producer['port']
token = producer['ws_token']
name = producer['name']
ws_url = f"ws://{host}:{port}/api/v1/message/ws?token={token}"
# This will raise InvalidURI if the url is bad
async with websockets.connect(ws_url, max_size=self.message_size_limit) as ws:
channel = WebSocketChannel(ws, channel_id=name)
logger.info(f"Producer connection success - {channel}")
# Now request the initial data from this Producer
for request in self._initial_requests:
await channel.send(
request.dict(exclude_none=True)
)
# Now receive data, if none is within the time limit, ping
await self._receive_messages(channel, producer, lock)
except (websockets.exceptions.InvalidURI, ValueError) as e:
logger.error(f"{ws_url} is an invalid WebSocket URL - {e}")
break
except (
socket.gaierror,
ConnectionRefusedError,
websockets.exceptions.InvalidStatusCode,
websockets.exceptions.InvalidMessage
) as e:
logger.error(f"Connection Refused - {e} retrying in {self.sleep_time}s")
await asyncio.sleep(self.sleep_time)
continue
except (
websockets.exceptions.ConnectionClosedError,
websockets.exceptions.ConnectionClosedOK
):
# Just keep trying to connect again indefinitely
await asyncio.sleep(self.sleep_time)
continue
except Exception as e:
# An unforseen error has occurred, log and continue
logger.error("Unexpected error has occurred:")
logger.exception(e)
continue
async def _receive_messages(
self,
channel: WebSocketChannel,
producer: Producer,
lock: asyncio.Lock
):
"""
Loop to handle receiving messages from a Producer
:param channel: The WebSocketChannel object for the WebSocket
:param producer: Dictionary containing producer info
:param lock: An asyncio Lock
"""
while self._running:
try:
message = await asyncio.wait_for(
channel.recv(),
timeout=self.wait_timeout
)
try:
async with lock:
# Handle the message
self.handle_producer_message(producer, message)
except Exception as e:
logger.exception(f"Error handling producer message: {e}")
except (asyncio.TimeoutError, websockets.exceptions.ConnectionClosed):
# We haven't received data yet. Check the connection and continue.
try:
# ping
ping = await channel.ping()
await asyncio.wait_for(ping, timeout=self.ping_timeout)
logger.debug(f"Connection to {channel} still alive...")
continue
except Exception as e:
logger.warning(f"Ping error {channel} - retrying in {self.sleep_time}s")
logger.debug(e, exc_info=e)
await asyncio.sleep(self.sleep_time)
break
def handle_producer_message(self, producer: Producer, message: Dict[str, Any]):
"""
Handles external messages from a Producer
"""
producer_name = producer.get('name', 'default')
try:
producer_message = WSMessageSchema.parse_obj(message)
except ValidationError as e:
logger.error(f"Invalid message from `{producer_name}`: {e}")
return
if not producer_message.data:
logger.error(f"Empty message received from `{producer_name}`")
return
logger.info(f"Received message of type `{producer_message.type}` from `{producer_name}`")
message_handler = self._message_handlers.get(producer_message.type)
if not message_handler:
logger.info(f"Received unhandled message: `{producer_message.data}`, ignoring...")
return
message_handler(producer_name, producer_message)
def _consume_whitelist_message(self, producer_name: str, message: WSMessageSchema):
try:
# Validate the message
whitelist_message = WSWhitelistMessage.parse_obj(message)
except ValidationError as e:
logger.error(f"Invalid message from `{producer_name}`: {e}")
return
# Add the pairlist data to the DataProvider
self._dp._set_producer_pairs(whitelist_message.data, producer_name=producer_name)
logger.debug(f"Consumed message from `{producer_name}` of type `RPCMessageType.WHITELIST`")
def _consume_analyzed_df_message(self, producer_name: str, message: WSMessageSchema):
try:
df_message = WSAnalyzedDFMessage.parse_obj(message)
except ValidationError as e:
logger.error(f"Invalid message from `{producer_name}`: {e}")
return
key = df_message.data.key
df = df_message.data.df
la = df_message.data.la
pair, timeframe, candle_type = key
# If set, remove the Entry and Exit signals from the Producer
if self._emc_config.get('remove_entry_exit_signals', False):
df = remove_entry_exit_signals(df)
# Add the dataframe to the dataprovider
self._dp._add_external_df(pair, df,
last_analyzed=la,
timeframe=timeframe,
candle_type=candle_type,
producer_name=producer_name)
logger.debug(
f"Consumed message from `{producer_name}` of type `RPCMessageType.ANALYZED_DF`")

Some files were not shown because too many files have changed in this diff Show More