Merge branch 'develop' into pr/Blackhawke/3764
This commit is contained in:
commit
503d5db113
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@ -88,7 +88,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cp config.json.example config.json
|
cp config.json.example config.json
|
||||||
freqtrade create-userdir --userdir user_data
|
freqtrade create-userdir --userdir user_data
|
||||||
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt SampleHyperOpt
|
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt SampleHyperOpt --print-all
|
||||||
|
|
||||||
- name: Flake8
|
- name: Flake8
|
||||||
run: |
|
run: |
|
||||||
@ -150,7 +150,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cp config.json.example config.json
|
cp config.json.example config.json
|
||||||
freqtrade create-userdir --userdir user_data
|
freqtrade create-userdir --userdir user_data
|
||||||
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt SampleHyperOpt
|
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt SampleHyperOpt --print-all
|
||||||
|
|
||||||
- name: Flake8
|
- name: Flake8
|
||||||
run: |
|
run: |
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
FROM --platform=linux/arm/v7 python:3.7.7-slim-buster
|
FROM --platform=linux/arm/v7 python:3.7.7-slim-buster
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get -y install curl build-essential libssl-dev libatlas3-base libgfortran5 sqlite3 \
|
&& apt-get -y install curl build-essential libssl-dev libffi-dev libatlas3-base libgfortran5 sqlite3 \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& pip install --upgrade pip \
|
&& pip install --upgrade pip \
|
||||||
&& echo "[global]\nextra-index-url=https://www.piwheels.org/simple" > /etc/pip.conf
|
&& echo "[global]\nextra-index-url=https://www.piwheels.org/simple" > /etc/pip.conf
|
||||||
|
@ -123,7 +123,6 @@ Telegram is not mandatory. However, this is a great way to control your bot. Mor
|
|||||||
- `/help`: Show help message
|
- `/help`: Show help message
|
||||||
- `/version`: Show version
|
- `/version`: Show version
|
||||||
|
|
||||||
|
|
||||||
## Development branches
|
## Development branches
|
||||||
|
|
||||||
The project is currently setup in two main branches:
|
The project is currently setup in two main branches:
|
||||||
|
@ -7,7 +7,6 @@
|
|||||||
"timeframe": "5m",
|
"timeframe": "5m",
|
||||||
"dry_run": false,
|
"dry_run": false,
|
||||||
"cancel_open_orders_on_exit": false,
|
"cancel_open_orders_on_exit": false,
|
||||||
"trailing_stop": false,
|
|
||||||
"unfilledtimeout": {
|
"unfilledtimeout": {
|
||||||
"buy": 10,
|
"buy": 10,
|
||||||
"sell": 30
|
"sell": 30
|
||||||
|
@ -7,7 +7,6 @@
|
|||||||
"timeframe": "5m",
|
"timeframe": "5m",
|
||||||
"dry_run": true,
|
"dry_run": true,
|
||||||
"cancel_open_orders_on_exit": false,
|
"cancel_open_orders_on_exit": false,
|
||||||
"trailing_stop": false,
|
|
||||||
"unfilledtimeout": {
|
"unfilledtimeout": {
|
||||||
"buy": 10,
|
"buy": 10,
|
||||||
"sell": 30
|
"sell": 30
|
||||||
|
@ -7,7 +7,6 @@
|
|||||||
"timeframe": "5m",
|
"timeframe": "5m",
|
||||||
"dry_run": true,
|
"dry_run": true,
|
||||||
"cancel_open_orders_on_exit": false,
|
"cancel_open_orders_on_exit": false,
|
||||||
"trailing_stop": false,
|
|
||||||
"unfilledtimeout": {
|
"unfilledtimeout": {
|
||||||
"buy": 10,
|
"buy": 10,
|
||||||
"sell": 30
|
"sell": 30
|
||||||
|
@ -157,17 +157,32 @@ A backtesting result will look like that:
|
|||||||
| ADA/BTC | 1 | 0.89 | 0.89 | 0.00004434 | 0.44 | 6:00:00 | 1 | 0 | 0 |
|
| ADA/BTC | 1 | 0.89 | 0.89 | 0.00004434 | 0.44 | 6:00:00 | 1 | 0 | 0 |
|
||||||
| LTC/BTC | 1 | 0.68 | 0.68 | 0.00003421 | 0.34 | 2:00:00 | 1 | 0 | 0 |
|
| LTC/BTC | 1 | 0.68 | 0.68 | 0.00003421 | 0.34 | 2:00:00 | 1 | 0 | 0 |
|
||||||
| TOTAL | 2 | 0.78 | 1.57 | 0.00007855 | 0.78 | 4:00:00 | 2 | 0 | 0 |
|
| TOTAL | 2 | 0.78 | 1.57 | 0.00007855 | 0.78 | 4:00:00 | 2 | 0 | 0 |
|
||||||
|
=============== SUMMARY METRICS ===============
|
||||||
|
| Metric | Value |
|
||||||
|
|-----------------------+---------------------|
|
||||||
|
| Backtesting from | 2019-01-01 00:00:00 |
|
||||||
|
| Backtesting to | 2019-05-01 00:00:00 |
|
||||||
|
| Total trades | 429 |
|
||||||
|
| First trade | 2019-01-01 18:30:00 |
|
||||||
|
| First trade Pair | EOS/USDT |
|
||||||
|
| Total Profit % | 152.41% |
|
||||||
|
| Trades per day | 3.575 |
|
||||||
|
| Best day | 25.27% |
|
||||||
|
| Worst day | -30.67% |
|
||||||
|
| Avg. Duration Winners | 4:23:00 |
|
||||||
|
| Avg. Duration Loser | 6:55:00 |
|
||||||
|
| | |
|
||||||
|
| Max Drawdown | 50.63% |
|
||||||
|
| Drawdown Start | 2019-02-15 14:10:00 |
|
||||||
|
| Drawdown End | 2019-04-11 18:15:00 |
|
||||||
|
| Market change | -5.88% |
|
||||||
|
===============================================
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Backtesting report table
|
||||||
|
|
||||||
The 1st table contains all trades the bot made, including "left open trades".
|
The 1st table contains all trades the bot made, including "left open trades".
|
||||||
|
|
||||||
The 2nd table contains a recap of sell reasons.
|
|
||||||
This table can tell you which area needs some additional work (i.e. all `sell_signal` trades are losses, so we should disable the sell-signal or work on improving that).
|
|
||||||
|
|
||||||
The 3rd table contains all trades the bot had to `forcesell` at the end of the backtest period to present a full picture.
|
|
||||||
This is necessary to simulate realistic behaviour, since the backtest period has to end at some point, while realistically, you could leave the bot running forever.
|
|
||||||
These trades are also included in the first table, but are extracted separately for clarity.
|
|
||||||
|
|
||||||
The last line will give you the overall performance of your strategy,
|
The last line will give you the overall performance of your strategy,
|
||||||
here:
|
here:
|
||||||
|
|
||||||
@ -196,6 +211,58 @@ On the other hand, if you set a too high `minimal_roi` like `"0": 0.55`
|
|||||||
(55%), there is almost no chance that the bot will ever reach this profit.
|
(55%), there is almost no chance that the bot will ever reach this profit.
|
||||||
Hence, keep in mind that your performance is an integral mix of all different elements of the strategy, your configuration, and the crypto-currency pairs you have set up.
|
Hence, keep in mind that your performance is an integral mix of all different elements of the strategy, your configuration, and the crypto-currency pairs you have set up.
|
||||||
|
|
||||||
|
### Sell reasons table
|
||||||
|
|
||||||
|
The 2nd table contains a recap of sell reasons.
|
||||||
|
This table can tell you which area needs some additional work (e.g. all or many of the `sell_signal` trades are losses, so you should work on improving the sell signal, or consider disabling it).
|
||||||
|
|
||||||
|
### Left open trades table
|
||||||
|
|
||||||
|
The 3rd table contains all trades the bot had to `forcesell` at the end of the backtesting period to present you the full picture.
|
||||||
|
This is necessary to simulate realistic behavior, since the backtest period has to end at some point, while realistically, you could leave the bot running forever.
|
||||||
|
These trades are also included in the first table, but are also shown separately in this table for clarity.
|
||||||
|
|
||||||
|
### Summary metrics
|
||||||
|
|
||||||
|
The last element of the backtest report is the summary metrics table.
|
||||||
|
It contains some useful key metrics about performance of your strategy on backtesting data.
|
||||||
|
|
||||||
|
```
|
||||||
|
=============== SUMMARY METRICS ===============
|
||||||
|
| Metric | Value |
|
||||||
|
|-----------------------+---------------------|
|
||||||
|
| Backtesting from | 2019-01-01 00:00:00 |
|
||||||
|
| Backtesting to | 2019-05-01 00:00:00 |
|
||||||
|
| Total trades | 429 |
|
||||||
|
| First trade | 2019-01-01 18:30:00 |
|
||||||
|
| First trade Pair | EOS/USDT |
|
||||||
|
| Total Profit % | 152.41% |
|
||||||
|
| Trades per day | 3.575 |
|
||||||
|
| Best day | 25.27% |
|
||||||
|
| Worst day | -30.67% |
|
||||||
|
| Avg. Duration Winners | 4:23:00 |
|
||||||
|
| Avg. Duration Loser | 6:55:00 |
|
||||||
|
| | |
|
||||||
|
| Max Drawdown | 50.63% |
|
||||||
|
| Drawdown Start | 2019-02-15 14:10:00 |
|
||||||
|
| Drawdown End | 2019-04-11 18:15:00 |
|
||||||
|
| Market change | -5.88% |
|
||||||
|
===============================================
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
- `Total trades`: Identical to the total trades of the backtest output table.
|
||||||
|
- `First trade`: First trade entered.
|
||||||
|
- `First trade pair`: Which pair was part of the first trade.
|
||||||
|
- `Backtesting from` / `Backtesting to`: Backtesting range (usually defined with the `--timerange` option).
|
||||||
|
- `Total Profit %`: Total profit per stake amount. Aligned to the TOTAL column of the first table.
|
||||||
|
- `Trades per day`: Total trades divided by the backtesting duration in days (this will give you information about how many trades to expect from the strategy).
|
||||||
|
- `Best day` / `Worst day`: Best and worst day based on daily profit.
|
||||||
|
- `Avg. Duration Winners` / `Avg. Duration Loser`: Average durations for winning and losing trades.
|
||||||
|
- `Max Drawdown`: Maximum drawdown experienced. For example, the value of 50% means that from highest to subsequent lowest point, a 50% drop was experienced).
|
||||||
|
- `Drawdown Start` / `Drawdown End`: Start and end datetimes for this largest drawdown (can also be visualized via the `plot-dataframe` sub-command).
|
||||||
|
- `Market change`: Change of the market during the backtest period. Calculated as average of all pairs changes from the first to the last candle using the "close" column.
|
||||||
|
|
||||||
### Assumptions made by backtesting
|
### Assumptions made by backtesting
|
||||||
|
|
||||||
Since backtesting lacks some detailed information about what happens within a candle, it needs to take a few assumptions:
|
Since backtesting lacks some detailed information about what happens within a candle, it needs to take a few assumptions:
|
||||||
|
@ -5,6 +5,9 @@ This page explains the different parameters of the bot and how to run it.
|
|||||||
!!! Note
|
!!! Note
|
||||||
If you've used `setup.sh`, don't forget to activate your virtual environment (`source .env/bin/activate`) before running freqtrade commands.
|
If you've used `setup.sh`, don't forget to activate your virtual environment (`source .env/bin/activate`) before running freqtrade commands.
|
||||||
|
|
||||||
|
!!! Warning "Up-to-date clock"
|
||||||
|
The clock on the system running the bot must be accurate, synchronized to a NTP server frequently enough to avoid problems with communication to the exchanges.
|
||||||
|
|
||||||
## Bot commands
|
## Bot commands
|
||||||
|
|
||||||
```
|
```
|
||||||
|
@ -55,9 +55,9 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
|||||||
| `process_only_new_candles` | Enable processing of indicators only when new candles arrive. If false each loop populates the indicators, this will mean the same candle is processed many times creating system load but can be useful of your strategy depends on tick data not only candle. [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
|
| `process_only_new_candles` | Enable processing of indicators only when new candles arrive. If false each loop populates the indicators, this will mean the same candle is processed many times creating system load but can be useful of your strategy depends on tick data not only candle. [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
|
||||||
| `minimal_roi` | **Required.** Set the threshold as ratio the bot will use to sell a trade. [More information below](#understand-minimal_roi). [Strategy Override](#parameters-in-the-strategy). <br> **Datatype:** Dict
|
| `minimal_roi` | **Required.** Set the threshold as ratio the bot will use to sell a trade. [More information below](#understand-minimal_roi). [Strategy Override](#parameters-in-the-strategy). <br> **Datatype:** Dict
|
||||||
| `stoploss` | **Required.** Value as ratio of the stoploss used by the bot. More details in the [stoploss documentation](stoploss.md). [Strategy Override](#parameters-in-the-strategy). <br> **Datatype:** Float (as ratio)
|
| `stoploss` | **Required.** Value as ratio of the stoploss used by the bot. More details in the [stoploss documentation](stoploss.md). [Strategy Override](#parameters-in-the-strategy). <br> **Datatype:** Float (as ratio)
|
||||||
| `trailing_stop` | Enables trailing stoploss (based on `stoploss` in either configuration or strategy file). More details in the [stoploss documentation](stoploss.md). [Strategy Override](#parameters-in-the-strategy). <br> **Datatype:** Boolean
|
| `trailing_stop` | Enables trailing stoploss (based on `stoploss` in either configuration or strategy file). More details in the [stoploss documentation](stoploss.md#trailing-stop-loss). [Strategy Override](#parameters-in-the-strategy). <br> **Datatype:** Boolean
|
||||||
| `trailing_stop_positive` | Changes stoploss once profit has been reached. More details in the [stoploss documentation](stoploss.md). [Strategy Override](#parameters-in-the-strategy). <br> **Datatype:** Float
|
| `trailing_stop_positive` | Changes stoploss once profit has been reached. More details in the [stoploss documentation](stoploss.md#trailing-stop-loss-custom-positive-loss). [Strategy Override](#parameters-in-the-strategy). <br> **Datatype:** Float
|
||||||
| `trailing_stop_positive_offset` | Offset on when to apply `trailing_stop_positive`. Percentage value which should be positive. More details in the [stoploss documentation](stoploss.md). [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `0.0` (no offset).* <br> **Datatype:** Float
|
| `trailing_stop_positive_offset` | Offset on when to apply `trailing_stop_positive`. Percentage value which should be positive. More details in the [stoploss documentation](stoploss.md#trailing-stop-loss-only-once-the-trade-has-reached-a-certain-offset). [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `0.0` (no offset).* <br> **Datatype:** Float
|
||||||
| `trailing_only_offset_is_reached` | Only apply trailing stoploss when the offset is reached. [stoploss documentation](stoploss.md). [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
|
| `trailing_only_offset_is_reached` | Only apply trailing stoploss when the offset is reached. [stoploss documentation](stoploss.md). [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
|
||||||
| `unfilledtimeout.buy` | **Required.** How long (in minutes) the bot will wait for an unfilled buy order to complete, after which the order will be cancelled. [Strategy Override](#parameters-in-the-strategy).<br> **Datatype:** Integer
|
| `unfilledtimeout.buy` | **Required.** How long (in minutes) the bot will wait for an unfilled buy order to complete, after which the order will be cancelled. [Strategy Override](#parameters-in-the-strategy).<br> **Datatype:** Integer
|
||||||
| `unfilledtimeout.sell` | **Required.** How long (in minutes) the bot will wait for an unfilled sell order to complete, after which the order will be cancelled. [Strategy Override](#parameters-in-the-strategy).<br> **Datatype:** Integer
|
| `unfilledtimeout.sell` | **Required.** How long (in minutes) the bot will wait for an unfilled sell order to complete, after which the order will be cancelled. [Strategy Override](#parameters-in-the-strategy).<br> **Datatype:** Integer
|
||||||
@ -278,24 +278,13 @@ This allows to buy using limit orders, sell using
|
|||||||
limit-orders, and create stoplosses using market orders. It also allows to set the
|
limit-orders, and create stoplosses using market orders. It also allows to set the
|
||||||
stoploss "on exchange" which means stoploss order would be placed immediately once
|
stoploss "on exchange" which means stoploss order would be placed immediately once
|
||||||
the buy order is fulfilled.
|
the buy order is fulfilled.
|
||||||
If `stoploss_on_exchange` and `trailing_stop` are both set, then the bot will use `stoploss_on_exchange_interval` to check and update the stoploss on exchange periodically.
|
|
||||||
`order_types` can be set in the configuration file or in the strategy.
|
|
||||||
`order_types` set in the configuration file overwrites values set in the strategy as a whole, so you need to configure the whole `order_types` dictionary in one place.
|
`order_types` set in the configuration file overwrites values set in the strategy as a whole, so you need to configure the whole `order_types` dictionary in one place.
|
||||||
|
|
||||||
If this is configured, the following 4 values (`buy`, `sell`, `stoploss` and
|
If this is configured, the following 4 values (`buy`, `sell`, `stoploss` and
|
||||||
`stoploss_on_exchange`) need to be present, otherwise the bot will fail to start.
|
`stoploss_on_exchange`) need to be present, otherwise the bot will fail to start.
|
||||||
|
|
||||||
`emergencysell` is an optional value, which defaults to `market` and is used when creating stoploss on exchange orders fails.
|
For information on (`emergencysell`,`stoploss_on_exchange`,`stoploss_on_exchange_interval`,`stoploss_on_exchange_limit_ratio`) please see stop loss documentation [stop loss on exchange](stoploss.md)
|
||||||
The below is the default which is used if this is not configured in either strategy or configuration file.
|
|
||||||
|
|
||||||
Not all Exchanges support `stoploss_on_exchange`. If an exchange supports both limit and market stoploss orders, then the value of `stoploss` will be used to determine the stoploss type.
|
|
||||||
|
|
||||||
If `stoploss_on_exchange` uses limit orders, the exchange needs 2 prices, the stoploss_price and the Limit price.
|
|
||||||
`stoploss` defines the stop-price - and limit should be slightly below this.
|
|
||||||
|
|
||||||
This defaults to 0.99 / 1% (configurable via `stoploss_on_exchange_limit_ratio`).
|
|
||||||
Calculation example: we bought the asset at 100$.
|
|
||||||
Stop-price is 95$, then limit would be `95 * 0.99 = 94.05$` - so the stoploss will happen between 95$ and 94.05$.
|
|
||||||
|
|
||||||
Syntax for Strategy:
|
Syntax for Strategy:
|
||||||
|
|
||||||
@ -663,24 +652,28 @@ Filters low-value coins which would not allow setting stoplosses.
|
|||||||
#### PriceFilter
|
#### PriceFilter
|
||||||
|
|
||||||
The `PriceFilter` allows filtering of pairs by price. Currently the following price filters are supported:
|
The `PriceFilter` allows filtering of pairs by price. Currently the following price filters are supported:
|
||||||
|
|
||||||
* `min_price`
|
* `min_price`
|
||||||
* `max_price`
|
* `max_price`
|
||||||
* `low_price_ratio`
|
* `low_price_ratio`
|
||||||
|
|
||||||
The `min_price` setting removes pairs where the price is below the specified price. This is useful if you wish to avoid trading very low-priced pairs.
|
The `min_price` setting removes pairs where the price is below the specified price. This is useful if you wish to avoid trading very low-priced pairs.
|
||||||
This option is disabled by default, and will only apply if set to <> 0.
|
This option is disabled by default, and will only apply if set to > 0.
|
||||||
|
|
||||||
The `max_price` setting removes pairs where the price is above the specified price. This is useful if you wish to trade only low-priced pairs.
|
The `max_price` setting removes pairs where the price is above the specified price. This is useful if you wish to trade only low-priced pairs.
|
||||||
This option is disabled by default, and will only apply if set to <> 0.
|
This option is disabled by default, and will only apply if set to > 0.
|
||||||
|
|
||||||
The `low_price_ratio` setting removes pairs where a raise of 1 price unit (pip) is above the `low_price_ratio` ratio.
|
The `low_price_ratio` setting removes pairs where a raise of 1 price unit (pip) is above the `low_price_ratio` ratio.
|
||||||
This option is disabled by default, and will only apply if set to <> 0.
|
This option is disabled by default, and will only apply if set to > 0.
|
||||||
|
|
||||||
|
For `PriceFiler` at least one of its `min_price`, `max_price` or `low_price_ratio` settings must be applied.
|
||||||
|
|
||||||
Calculation example:
|
Calculation example:
|
||||||
|
|
||||||
Min price precision is 8 decimals. If price is 0.00000011 - one step would be 0.00000012 - which is almost 10% higher than the previous value.
|
Min price precision for SHITCOIN/BTC is 8 decimals. If its price is 0.00000011 - one price step above would be 0.00000012, which is ~9% higher than the previous price value. You may filter out this pair by using PriceFilter with `low_price_ratio` set to 0.09 (9%) or with `min_price` set to 0.00000011, correspondingly.
|
||||||
|
|
||||||
These pairs are dangerous since it may be impossible to place the desired stoploss - and often result in high losses.
|
!!! Warning "Low priced pairs"
|
||||||
|
Low priced pairs with high "1 pip movements" are dangerous since they are often illiquid and it may also be impossible to place the desired stoploss, which can often result in high losses since price needs to be rounded to the next tradable price - so instead of having a stoploss of -5%, you could end up with a stoploss of -9% simply due to price rounding.
|
||||||
|
|
||||||
#### ShuffleFilter
|
#### ShuffleFilter
|
||||||
|
|
||||||
|
@ -15,61 +15,91 @@ Otherwise `--exchange` becomes mandatory.
|
|||||||
### Usage
|
### Usage
|
||||||
|
|
||||||
```
|
```
|
||||||
usage: freqtrade download-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH] [--userdir PATH] [-p PAIRS [PAIRS ...]]
|
usage: freqtrade download-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||||
[--pairs-file FILE] [--days INT] [--dl-trades] [--exchange EXCHANGE]
|
[-d PATH] [--userdir PATH]
|
||||||
|
[-p PAIRS [PAIRS ...]] [--pairs-file FILE]
|
||||||
|
[--days INT] [--dl-trades]
|
||||||
|
[--exchange EXCHANGE]
|
||||||
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]]
|
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]]
|
||||||
[--erase] [--data-format-ohlcv {json,jsongz}] [--data-format-trades {json,jsongz}]
|
[--erase]
|
||||||
|
[--data-format-ohlcv {json,jsongz,hdf5}]
|
||||||
|
[--data-format-trades {json,jsongz,hdf5}]
|
||||||
|
|
||||||
optional arguments:
|
optional arguments:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
||||||
Show profits for only these pairs. Pairs are space-separated.
|
Show profits for only these pairs. Pairs are space-
|
||||||
|
separated.
|
||||||
--pairs-file FILE File containing a list of pairs to download.
|
--pairs-file FILE File containing a list of pairs to download.
|
||||||
--days INT Download data for given number of days.
|
--days INT Download data for given number of days.
|
||||||
--dl-trades Download trades instead of OHLCV data. The bot will resample trades to the desired timeframe as specified as
|
--dl-trades Download trades instead of OHLCV data. The bot will
|
||||||
--timeframes/-t.
|
resample trades to the desired timeframe as specified
|
||||||
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no config is provided.
|
as --timeframes/-t.
|
||||||
|
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
|
||||||
|
config is provided.
|
||||||
-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...], --timeframes {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]
|
-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...], --timeframes {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]
|
||||||
Specify which tickers to download. Space-separated list. Default: `1m 5m`.
|
Specify which tickers to download. Space-separated
|
||||||
--erase Clean all existing data for the selected exchange/pairs/timeframes.
|
list. Default: `1m 5m`.
|
||||||
--data-format-ohlcv {json,jsongz}
|
--erase Clean all existing data for the selected
|
||||||
Storage format for downloaded candle (OHLCV) data. (default: `json`).
|
exchange/pairs/timeframes.
|
||||||
--data-format-trades {json,jsongz}
|
--data-format-ohlcv {json,jsongz,hdf5}
|
||||||
Storage format for downloaded trades data. (default: `jsongz`).
|
Storage format for downloaded candle (OHLCV) data.
|
||||||
|
(default: `json`).
|
||||||
|
--data-format-trades {json,jsongz,hdf5}
|
||||||
|
Storage format for downloaded trades data. (default:
|
||||||
|
`jsongz`).
|
||||||
|
|
||||||
Common arguments:
|
Common arguments:
|
||||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||||
--logfile FILE Log to the file specified. Special values are: 'syslog', 'journald'. See the documentation for more details.
|
--logfile FILE Log to the file specified. Special values are:
|
||||||
|
'syslog', 'journald'. See the documentation for more
|
||||||
|
details.
|
||||||
-V, --version show program's version number and exit
|
-V, --version show program's version number and exit
|
||||||
-c PATH, --config PATH
|
-c PATH, --config PATH
|
||||||
Specify configuration file (default: `config.json`). Multiple --config options may be used. Can be set to `-`
|
Specify configuration file (default:
|
||||||
to read config from stdin.
|
`userdir/config.json` or `config.json` whichever
|
||||||
|
exists). Multiple --config options may be used. Can be
|
||||||
|
set to `-` to read config from stdin.
|
||||||
-d PATH, --datadir PATH
|
-d PATH, --datadir PATH
|
||||||
Path to directory with historical backtesting data.
|
Path to directory with historical backtesting data.
|
||||||
--userdir PATH, --user-data-dir PATH
|
--userdir PATH, --user-data-dir PATH
|
||||||
Path to userdata directory.
|
Path to userdata directory.
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Data format
|
### Data format
|
||||||
|
|
||||||
Freqtrade currently supports 2 dataformats, `json` (plain "text" json files) and `jsongz` (a gzipped version of json files).
|
Freqtrade currently supports 3 data-formats for both OHLCV and trades data:
|
||||||
|
|
||||||
|
* `json` (plain "text" json files)
|
||||||
|
* `jsongz` (a gzip-zipped version of json files)
|
||||||
|
* `hdf5` (a high performance datastore)
|
||||||
|
|
||||||
By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data.
|
By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data.
|
||||||
|
|
||||||
This can be changed via the `--data-format-ohlcv` and `--data-format-trades` parameters respectivly.
|
This can be changed via the `--data-format-ohlcv` and `--data-format-trades` command line arguments respectively.
|
||||||
|
To persist this change, you can should also add the following snippet to your configuration, so you don't have to insert the above arguments each time:
|
||||||
|
|
||||||
If the default dataformat has been changed during download, then the keys `dataformat_ohlcv` and `dataformat_trades` in the configuration file need to be adjusted to the selected dataformat as well.
|
``` jsonc
|
||||||
|
// ...
|
||||||
|
"dataformat_ohlcv": "hdf5",
|
||||||
|
"dataformat_trades": "hdf5",
|
||||||
|
// ...
|
||||||
|
```
|
||||||
|
|
||||||
|
If the default data-format has been changed during download, then the keys `dataformat_ohlcv` and `dataformat_trades` in the configuration file need to be adjusted to the selected dataformat as well.
|
||||||
|
|
||||||
!!! Note
|
!!! Note
|
||||||
You can convert between data-formats using the [convert-data](#subcommand-convert-data) and [convert-trade-data](#subcommand-convert-trade-data) methods.
|
You can convert between data-formats using the [convert-data](#sub-command-convert-data) and [convert-trade-data](#sub-command-convert-trade-data) methods.
|
||||||
|
|
||||||
#### Subcommand convert data
|
#### Sub-command convert data
|
||||||
|
|
||||||
```
|
```
|
||||||
usage: freqtrade convert-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
usage: freqtrade convert-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||||
[-d PATH] [--userdir PATH]
|
[-d PATH] [--userdir PATH]
|
||||||
[-p PAIRS [PAIRS ...]] --format-from
|
[-p PAIRS [PAIRS ...]] --format-from
|
||||||
{json,jsongz} --format-to {json,jsongz}
|
{json,jsongz,hdf5} --format-to
|
||||||
[--erase]
|
{json,jsongz,hdf5} [--erase]
|
||||||
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]]
|
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w} ...]]
|
||||||
|
|
||||||
optional arguments:
|
optional arguments:
|
||||||
@ -77,9 +107,9 @@ optional arguments:
|
|||||||
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
||||||
Show profits for only these pairs. Pairs are space-
|
Show profits for only these pairs. Pairs are space-
|
||||||
separated.
|
separated.
|
||||||
--format-from {json,jsongz}
|
--format-from {json,jsongz,hdf5}
|
||||||
Source format for data conversion.
|
Source format for data conversion.
|
||||||
--format-to {json,jsongz}
|
--format-to {json,jsongz,hdf5}
|
||||||
Destination format for data conversion.
|
Destination format for data conversion.
|
||||||
--erase Clean all existing data for the selected
|
--erase Clean all existing data for the selected
|
||||||
exchange/pairs/timeframes.
|
exchange/pairs/timeframes.
|
||||||
@ -94,9 +124,10 @@ Common arguments:
|
|||||||
details.
|
details.
|
||||||
-V, --version show program's version number and exit
|
-V, --version show program's version number and exit
|
||||||
-c PATH, --config PATH
|
-c PATH, --config PATH
|
||||||
Specify configuration file (default: `config.json`).
|
Specify configuration file (default:
|
||||||
Multiple --config options may be used. Can be set to
|
`userdir/config.json` or `config.json` whichever
|
||||||
`-` to read config from stdin.
|
exists). Multiple --config options may be used. Can be
|
||||||
|
set to `-` to read config from stdin.
|
||||||
-d PATH, --datadir PATH
|
-d PATH, --datadir PATH
|
||||||
Path to directory with historical backtesting data.
|
Path to directory with historical backtesting data.
|
||||||
--userdir PATH, --user-data-dir PATH
|
--userdir PATH, --user-data-dir PATH
|
||||||
@ -112,23 +143,23 @@ It'll also remove original json data files (`--erase` parameter).
|
|||||||
freqtrade convert-data --format-from json --format-to jsongz --datadir ~/.freqtrade/data/binance -t 5m 15m --erase
|
freqtrade convert-data --format-from json --format-to jsongz --datadir ~/.freqtrade/data/binance -t 5m 15m --erase
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Subcommand convert-trade data
|
#### Sub-command convert trade data
|
||||||
|
|
||||||
```
|
```
|
||||||
usage: freqtrade convert-trade-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
usage: freqtrade convert-trade-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||||
[-d PATH] [--userdir PATH]
|
[-d PATH] [--userdir PATH]
|
||||||
[-p PAIRS [PAIRS ...]] --format-from
|
[-p PAIRS [PAIRS ...]] --format-from
|
||||||
{json,jsongz} --format-to {json,jsongz}
|
{json,jsongz,hdf5} --format-to
|
||||||
[--erase]
|
{json,jsongz,hdf5} [--erase]
|
||||||
|
|
||||||
optional arguments:
|
optional arguments:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
||||||
Show profits for only these pairs. Pairs are space-
|
Show profits for only these pairs. Pairs are space-
|
||||||
separated.
|
separated.
|
||||||
--format-from {json,jsongz}
|
--format-from {json,jsongz,hdf5}
|
||||||
Source format for data conversion.
|
Source format for data conversion.
|
||||||
--format-to {json,jsongz}
|
--format-to {json,jsongz,hdf5}
|
||||||
Destination format for data conversion.
|
Destination format for data conversion.
|
||||||
--erase Clean all existing data for the selected
|
--erase Clean all existing data for the selected
|
||||||
exchange/pairs/timeframes.
|
exchange/pairs/timeframes.
|
||||||
@ -140,13 +171,15 @@ Common arguments:
|
|||||||
details.
|
details.
|
||||||
-V, --version show program's version number and exit
|
-V, --version show program's version number and exit
|
||||||
-c PATH, --config PATH
|
-c PATH, --config PATH
|
||||||
Specify configuration file (default: `config.json`).
|
Specify configuration file (default:
|
||||||
Multiple --config options may be used. Can be set to
|
`userdir/config.json` or `config.json` whichever
|
||||||
`-` to read config from stdin.
|
exists). Multiple --config options may be used. Can be
|
||||||
|
set to `-` to read config from stdin.
|
||||||
-d PATH, --datadir PATH
|
-d PATH, --datadir PATH
|
||||||
Path to directory with historical backtesting data.
|
Path to directory with historical backtesting data.
|
||||||
--userdir PATH, --user-data-dir PATH
|
--userdir PATH, --user-data-dir PATH
|
||||||
Path to userdata directory.
|
Path to userdata directory.
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Example converting trades
|
##### Example converting trades
|
||||||
@ -158,21 +191,21 @@ It'll also remove original jsongz data files (`--erase` parameter).
|
|||||||
freqtrade convert-trade-data --format-from jsongz --format-to json --datadir ~/.freqtrade/data/kraken --erase
|
freqtrade convert-trade-data --format-from jsongz --format-to json --datadir ~/.freqtrade/data/kraken --erase
|
||||||
```
|
```
|
||||||
|
|
||||||
### Subcommand list-data
|
### Sub-command list-data
|
||||||
|
|
||||||
You can get a list of downloaded data using the `list-data` subcommand.
|
You can get a list of downloaded data using the `list-data` sub-command.
|
||||||
|
|
||||||
```
|
```
|
||||||
usage: freqtrade list-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH]
|
usage: freqtrade list-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH]
|
||||||
[--userdir PATH] [--exchange EXCHANGE]
|
[--userdir PATH] [--exchange EXCHANGE]
|
||||||
[--data-format-ohlcv {json,jsongz}]
|
[--data-format-ohlcv {json,jsongz,hdf5}]
|
||||||
[-p PAIRS [PAIRS ...]]
|
[-p PAIRS [PAIRS ...]]
|
||||||
|
|
||||||
optional arguments:
|
optional arguments:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
|
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
|
||||||
config is provided.
|
config is provided.
|
||||||
--data-format-ohlcv {json,jsongz}
|
--data-format-ohlcv {json,jsongz,hdf5}
|
||||||
Storage format for downloaded candle (OHLCV) data.
|
Storage format for downloaded candle (OHLCV) data.
|
||||||
(default: `json`).
|
(default: `json`).
|
||||||
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
||||||
@ -194,6 +227,7 @@ Common arguments:
|
|||||||
Path to directory with historical backtesting data.
|
Path to directory with historical backtesting data.
|
||||||
--userdir PATH, --user-data-dir PATH
|
--userdir PATH, --user-data-dir PATH
|
||||||
Path to userdata directory.
|
Path to userdata directory.
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Example list-data
|
#### Example list-data
|
||||||
@ -249,7 +283,7 @@ This will download historical candle (OHLCV) data for all the currency pairs you
|
|||||||
### Other Notes
|
### Other Notes
|
||||||
|
|
||||||
- To use a different directory than the exchange specific default, use `--datadir user_data/data/some_directory`.
|
- To use a different directory than the exchange specific default, use `--datadir user_data/data/some_directory`.
|
||||||
- To change the exchange used to download the historical data from, please use a different configuration file (you'll probably need to adjust ratelimits etc.)
|
- To change the exchange used to download the historical data from, please use a different configuration file (you'll probably need to adjust rate limits etc.)
|
||||||
- To use `pairs.json` from some other directory, use `--pairs-file some_other_dir/pairs.json`.
|
- To use `pairs.json` from some other directory, use `--pairs-file some_other_dir/pairs.json`.
|
||||||
- To download historical candle (OHLCV) data for only 10 days, use `--days 10` (defaults to 30 days).
|
- To download historical candle (OHLCV) data for only 10 days, use `--days 10` (defaults to 30 days).
|
||||||
- Use `--timeframes` to specify what timeframe download the historical candle (OHLCV) data for. Default is `--timeframes 1m 5m` which will download 1-minute and 5-minute data.
|
- Use `--timeframes` to specify what timeframe download the historical candle (OHLCV) data for. Default is `--timeframes 1m 5m` which will download 1-minute and 5-minute data.
|
||||||
@ -257,7 +291,7 @@ This will download historical candle (OHLCV) data for all the currency pairs you
|
|||||||
|
|
||||||
### Trades (tick) data
|
### Trades (tick) data
|
||||||
|
|
||||||
By default, `download-data` subcommand downloads Candles (OHLCV) data. Some exchanges also provide historic trade-data via their API.
|
By default, `download-data` sub-command downloads Candles (OHLCV) data. Some exchanges also provide historic trade-data via their API.
|
||||||
This data can be useful if you need many different timeframes, since it is only downloaded once, and then resampled locally to the desired timeframes.
|
This data can be useful if you need many different timeframes, since it is only downloaded once, and then resampled locally to the desired timeframes.
|
||||||
|
|
||||||
Since this data is large by default, the files use gzip by default. They are stored in your data-directory with the naming convention of `<pair>-trades.json.gz` (`ETH_BTC-trades.json.gz`). Incremental mode is also supported, as for historic OHLCV data, so downloading the data once per week with `--days 8` will create an incremental data-repository.
|
Since this data is large by default, the files use gzip by default. They are stored in your data-directory with the naming convention of `<pair>-trades.json.gz` (`ETH_BTC-trades.json.gz`). Incremental mode is also supported, as for historic OHLCV data, so downloading the data once per week with `--days 8` will create an incremental data-repository.
|
||||||
|
@ -9,21 +9,20 @@ and are no longer supported. Please avoid their usage in your configuration.
|
|||||||
### the `--refresh-pairs-cached` command line option
|
### the `--refresh-pairs-cached` command line option
|
||||||
|
|
||||||
`--refresh-pairs-cached` in the context of backtesting, hyperopt and edge allows to refresh candle data for backtesting.
|
`--refresh-pairs-cached` in the context of backtesting, hyperopt and edge allows to refresh candle data for backtesting.
|
||||||
Since this leads to much confusion, and slows down backtesting (while not being part of backtesting) this has been singled out
|
Since this leads to much confusion, and slows down backtesting (while not being part of backtesting) this has been singled out as a separate freqtrade sub-command `freqtrade download-data`.
|
||||||
as a seperate freqtrade subcommand `freqtrade download-data`.
|
|
||||||
|
|
||||||
This command line option was deprecated in 2019.7-dev (develop branch) and removed in 2019.9 (master branch).
|
This command line option was deprecated in 2019.7-dev (develop branch) and removed in 2019.9.
|
||||||
|
|
||||||
### The **--dynamic-whitelist** command line option
|
### The **--dynamic-whitelist** command line option
|
||||||
|
|
||||||
This command line option was deprecated in 2018 and removed freqtrade 2019.6-dev (develop branch)
|
This command line option was deprecated in 2018 and removed freqtrade 2019.6-dev (develop branch)
|
||||||
and in freqtrade 2019.7 (master branch).
|
and in freqtrade 2019.7.
|
||||||
|
|
||||||
### the `--live` command line option
|
### the `--live` command line option
|
||||||
|
|
||||||
`--live` in the context of backtesting allowed to download the latest tick data for backtesting.
|
`--live` in the context of backtesting allowed to download the latest tick data for backtesting.
|
||||||
Did only download the latest 500 candles, so was ineffective in getting good backtest data.
|
Did only download the latest 500 candles, so was ineffective in getting good backtest data.
|
||||||
Removed in 2019-7-dev (develop branch) and in freqtrade 2019-8 (master branch)
|
Removed in 2019-7-dev (develop branch) and in freqtrade 2019.8.
|
||||||
|
|
||||||
### Allow running multiple pairlists in sequence
|
### Allow running multiple pairlists in sequence
|
||||||
|
|
||||||
@ -31,6 +30,6 @@ The former `"pairlist"` section in the configuration has been removed, and is re
|
|||||||
|
|
||||||
The old section of configuration parameters (`"pairlist"`) has been deprecated in 2019.11 and has been removed in 2020.4.
|
The old section of configuration parameters (`"pairlist"`) has been deprecated in 2019.11 and has been removed in 2020.4.
|
||||||
|
|
||||||
### deprecation of bidVolume and askVolume from volumepairlist
|
### deprecation of bidVolume and askVolume from volume-pairlist
|
||||||
|
|
||||||
Since only quoteVolume can be compared between assets, the other options (bidVolume, askVolume) have been deprecated in 2020.4.
|
Since only quoteVolume can be compared between assets, the other options (bidVolume, askVolume) have been deprecated in 2020.4, and have been removed in 2020.9.
|
||||||
|
@ -10,6 +10,15 @@ Documentation is available at [https://freqtrade.io](https://www.freqtrade.io/)
|
|||||||
|
|
||||||
Special fields for the documentation (like Note boxes, ...) can be found [here](https://squidfunk.github.io/mkdocs-material/extensions/admonition/).
|
Special fields for the documentation (like Note boxes, ...) can be found [here](https://squidfunk.github.io/mkdocs-material/extensions/admonition/).
|
||||||
|
|
||||||
|
To test the documentation locally use the following commands.
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
pip install -r docs/requirements-docs.txt
|
||||||
|
mkdocs serve
|
||||||
|
```
|
||||||
|
|
||||||
|
This will spin up a local server (usually on port 8000) so you can see if everything looks as you'd like it to.
|
||||||
|
|
||||||
## Developer setup
|
## Developer setup
|
||||||
|
|
||||||
To configure a development environment, best use the `setup.sh` script and answer "y" when asked "Do you want to install dependencies for dev [y/N]? ".
|
To configure a development environment, best use the `setup.sh` script and answer "y" when asked "Do you want to install dependencies for dev [y/N]? ".
|
||||||
@ -52,6 +61,7 @@ The fastest and easiest way to start up is to use docker-compose.develop which g
|
|||||||
* [docker-compose](https://docs.docker.com/compose/install/)
|
* [docker-compose](https://docs.docker.com/compose/install/)
|
||||||
|
|
||||||
#### Starting the bot
|
#### Starting the bot
|
||||||
|
|
||||||
##### Use the develop dockerfile
|
##### Use the develop dockerfile
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
@ -74,7 +84,7 @@ docker-compose up
|
|||||||
docker-compose build
|
docker-compose build
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Execing (effectively SSH into the container)
|
##### Executing (effectively SSH into the container)
|
||||||
|
|
||||||
The `exec` command requires that the container already be running, if you want to start it
|
The `exec` command requires that the container already be running, if you want to start it
|
||||||
that can be effected by `docker-compose up` or `docker-compose run freqtrade_develop`
|
that can be effected by `docker-compose up` or `docker-compose run freqtrade_develop`
|
||||||
@ -85,6 +95,35 @@ docker-compose exec freqtrade_develop /bin/bash
|
|||||||
|
|
||||||
![image](https://user-images.githubusercontent.com/419355/65456522-ba671a80-de06-11e9-9598-df9ca0d8dcac.png)
|
![image](https://user-images.githubusercontent.com/419355/65456522-ba671a80-de06-11e9-9598-df9ca0d8dcac.png)
|
||||||
|
|
||||||
|
## ErrorHandling
|
||||||
|
|
||||||
|
Freqtrade Exceptions all inherit from `FreqtradeException`.
|
||||||
|
This general class of error should however not be used directly. Instead, multiple specialized sub-Exceptions exist.
|
||||||
|
|
||||||
|
Below is an outline of exception inheritance hierarchy:
|
||||||
|
|
||||||
|
```
|
||||||
|
+ FreqtradeException
|
||||||
|
|
|
||||||
|
+---+ OperationalException
|
||||||
|
|
|
||||||
|
+---+ DependencyException
|
||||||
|
| |
|
||||||
|
| +---+ PricingError
|
||||||
|
| |
|
||||||
|
| +---+ ExchangeError
|
||||||
|
| |
|
||||||
|
| +---+ TemporaryError
|
||||||
|
| |
|
||||||
|
| +---+ DDosProtection
|
||||||
|
| |
|
||||||
|
| +---+ InvalidOrderException
|
||||||
|
| |
|
||||||
|
| +---+ RetryableOrderError
|
||||||
|
|
|
||||||
|
+---+ StrategyError
|
||||||
|
```
|
||||||
|
|
||||||
## Modules
|
## Modules
|
||||||
|
|
||||||
### Dynamic Pairlist
|
### Dynamic Pairlist
|
||||||
@ -98,7 +137,7 @@ First of all, have a look at the [VolumePairList](https://github.com/freqtrade/f
|
|||||||
|
|
||||||
This is a simple Handler, which however serves as a good example on how to start developing.
|
This is a simple Handler, which however serves as a good example on how to start developing.
|
||||||
|
|
||||||
Next, modify the classname of the Handler (ideally align this with the module filename).
|
Next, modify the class-name of the Handler (ideally align this with the module filename).
|
||||||
|
|
||||||
The base-class provides an instance of the exchange (`self._exchange`) the pairlist manager (`self._pairlistmanager`), as well as the main configuration (`self._config`), the pairlist dedicated configuration (`self._pairlistconfig`) and the absolute position within the list of pairlists.
|
The base-class provides an instance of the exchange (`self._exchange`) the pairlist manager (`self._pairlistmanager`), as well as the main configuration (`self._config`), the pairlist dedicated configuration (`self._pairlistconfig`) and the absolute position within the list of pairlists.
|
||||||
|
|
||||||
@ -118,7 +157,7 @@ Configuration for the chain of Pairlist Handlers is done in the bot configuratio
|
|||||||
|
|
||||||
By convention, `"number_assets"` is used to specify the maximum number of pairs to keep in the pairlist. Please follow this to ensure a consistent user experience.
|
By convention, `"number_assets"` is used to specify the maximum number of pairs to keep in the pairlist. Please follow this to ensure a consistent user experience.
|
||||||
|
|
||||||
Additional parameters can be configured as needed. For instance, `VolumePairList` uses `"sort_key"` to specify the sorting value - however feel free to specify whatever is necessary for your great algorithm to be successfull and dynamic.
|
Additional parameters can be configured as needed. For instance, `VolumePairList` uses `"sort_key"` to specify the sorting value - however feel free to specify whatever is necessary for your great algorithm to be successful and dynamic.
|
||||||
|
|
||||||
#### short_desc
|
#### short_desc
|
||||||
|
|
||||||
@ -134,7 +173,7 @@ This is called with each iteration of the bot (only if the Pairlist Handler is a
|
|||||||
|
|
||||||
It must return the resulting pairlist (which may then be passed into the chain of Pairlist Handlers).
|
It must return the resulting pairlist (which may then be passed into the chain of Pairlist Handlers).
|
||||||
|
|
||||||
Validations are optional, the parent class exposes a `_verify_blacklist(pairlist)` and `_whitelist_for_active_markets(pairlist)` to do default filtering. Use this if you limit your result to a certain number of pairs - so the endresult is not shorter than expected.
|
Validations are optional, the parent class exposes a `_verify_blacklist(pairlist)` and `_whitelist_for_active_markets(pairlist)` to do default filtering. Use this if you limit your result to a certain number of pairs - so the end-result is not shorter than expected.
|
||||||
|
|
||||||
#### filter_pairlist
|
#### filter_pairlist
|
||||||
|
|
||||||
@ -142,13 +181,13 @@ This method is called for each Pairlist Handler in the chain by the pairlist man
|
|||||||
|
|
||||||
This is called with each iteration of the bot - so consider implementing caching for compute/network heavy calculations.
|
This is called with each iteration of the bot - so consider implementing caching for compute/network heavy calculations.
|
||||||
|
|
||||||
It get's passed a pairlist (which can be the result of previous pairlists) as well as `tickers`, a pre-fetched version of `get_tickers()`.
|
It gets passed a pairlist (which can be the result of previous pairlists) as well as `tickers`, a pre-fetched version of `get_tickers()`.
|
||||||
|
|
||||||
The default implementation in the base class simply calls the `_validate_pair()` method for each pair in the pairlist, but you may override it. So you should either implement the `_validate_pair()` in your Pairlist Handler or override `filter_pairlist()` to do something else.
|
The default implementation in the base class simply calls the `_validate_pair()` method for each pair in the pairlist, but you may override it. So you should either implement the `_validate_pair()` in your Pairlist Handler or override `filter_pairlist()` to do something else.
|
||||||
|
|
||||||
If overridden, it must return the resulting pairlist (which may then be passed into the next Pairlist Handler in the chain).
|
If overridden, it must return the resulting pairlist (which may then be passed into the next Pairlist Handler in the chain).
|
||||||
|
|
||||||
Validations are optional, the parent class exposes a `_verify_blacklist(pairlist)` and `_whitelist_for_active_markets(pairlist)` to do default filters. Use this if you limit your result to a certain number of pairs - so the endresult is not shorter than expected.
|
Validations are optional, the parent class exposes a `_verify_blacklist(pairlist)` and `_whitelist_for_active_markets(pairlist)` to do default filters. Use this if you limit your result to a certain number of pairs - so the end result is not shorter than expected.
|
||||||
|
|
||||||
In `VolumePairList`, this implements different methods of sorting, does early validation so only the expected number of pairs is returned.
|
In `VolumePairList`, this implements different methods of sorting, does early validation so only the expected number of pairs is returned.
|
||||||
|
|
||||||
@ -172,7 +211,7 @@ Most exchanges supported by CCXT should work out of the box.
|
|||||||
|
|
||||||
Check if the new exchange supports Stoploss on Exchange orders through their API.
|
Check if the new exchange supports Stoploss on Exchange orders through their API.
|
||||||
|
|
||||||
Since CCXT does not provide unification for Stoploss On Exchange yet, we'll need to implement the exchange-specific parameters ourselfs. Best look at `binance.py` for an example implementation of this. You'll need to dig through the documentation of the Exchange's API on how exactly this can be done. [CCXT Issues](https://github.com/ccxt/ccxt/issues) may also provide great help, since others may have implemented something similar for their projects.
|
Since CCXT does not provide unification for Stoploss On Exchange yet, we'll need to implement the exchange-specific parameters ourselves. Best look at `binance.py` for an example implementation of this. You'll need to dig through the documentation of the Exchange's API on how exactly this can be done. [CCXT Issues](https://github.com/ccxt/ccxt/issues) may also provide great help, since others may have implemented something similar for their projects.
|
||||||
|
|
||||||
### Incomplete candles
|
### Incomplete candles
|
||||||
|
|
||||||
@ -245,6 +284,7 @@ git checkout -b new_release <commitid>
|
|||||||
|
|
||||||
Determine if crucial bugfixes have been made between this commit and the current state, and eventually cherry-pick these.
|
Determine if crucial bugfixes have been made between this commit and the current state, and eventually cherry-pick these.
|
||||||
|
|
||||||
|
* Merge the release branch (master) into this branch.
|
||||||
* Edit `freqtrade/__init__.py` and add the version matching the current date (for example `2019.7` for July 2019). Minor versions can be `2019.7.1` should we need to do a second release that month. Version numbers must follow allowed versions from PEP0440 to avoid failures pushing to pypi.
|
* Edit `freqtrade/__init__.py` and add the version matching the current date (for example `2019.7` for July 2019). Minor versions can be `2019.7.1` should we need to do a second release that month. Version numbers must follow allowed versions from PEP0440 to avoid failures pushing to pypi.
|
||||||
* Commit this part
|
* Commit this part
|
||||||
* push that branch to the remote and create a PR against the master branch
|
* push that branch to the remote and create a PR against the master branch
|
||||||
@ -252,14 +292,14 @@ Determine if crucial bugfixes have been made between this commit and the current
|
|||||||
### Create changelog from git commits
|
### Create changelog from git commits
|
||||||
|
|
||||||
!!! Note
|
!!! Note
|
||||||
Make sure that the master branch is uptodate!
|
Make sure that the master branch is up-to-date!
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
# Needs to be done before merging / pulling that branch.
|
# Needs to be done before merging / pulling that branch.
|
||||||
git log --oneline --no-decorate --no-merges master..new_release
|
git log --oneline --no-decorate --no-merges master..new_release
|
||||||
```
|
```
|
||||||
|
|
||||||
To keep the release-log short, best wrap the full git changelog into a collapsible details secction.
|
To keep the release-log short, best wrap the full git changelog into a collapsible details section.
|
||||||
|
|
||||||
```markdown
|
```markdown
|
||||||
<details>
|
<details>
|
||||||
@ -283,6 +323,9 @@ Once the PR against master is merged (best right after merging):
|
|||||||
|
|
||||||
### pypi
|
### pypi
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
This process is now automated as part of Github Actions.
|
||||||
|
|
||||||
To create a pypi release, please run the following commands:
|
To create a pypi release, please run the following commands:
|
||||||
|
|
||||||
Additional requirement: `wheel`, `twine` (for uploading), account on pypi with proper permissions.
|
Additional requirement: `wheel`, `twine` (for uploading), account on pypi with proper permissions.
|
||||||
|
206
docs/docker.md
206
docs/docker.md
@ -1,145 +1,7 @@
|
|||||||
# Using Freqtrade with Docker
|
|
||||||
|
|
||||||
## Install Docker
|
|
||||||
|
|
||||||
Start by downloading and installing Docker CE for your platform:
|
|
||||||
|
|
||||||
* [Mac](https://docs.docker.com/docker-for-mac/install/)
|
|
||||||
* [Windows](https://docs.docker.com/docker-for-windows/install/)
|
|
||||||
* [Linux](https://docs.docker.com/install/)
|
|
||||||
|
|
||||||
Optionally, [docker-compose](https://docs.docker.com/compose/install/) should be installed and available to follow the [docker quick start guide](#docker-quick-start).
|
|
||||||
|
|
||||||
Once you have Docker installed, simply prepare the config file (e.g. `config.json`) and run the image for `freqtrade` as explained below.
|
|
||||||
|
|
||||||
## Freqtrade with docker-compose
|
|
||||||
|
|
||||||
Freqtrade provides an official Docker image on [Dockerhub](https://hub.docker.com/r/freqtradeorg/freqtrade/), as well as a [docker-compose file](https://github.com/freqtrade/freqtrade/blob/develop/docker-compose.yml) ready for usage.
|
|
||||||
|
|
||||||
!!! Note
|
|
||||||
The following section assumes that docker and docker-compose is installed and available to the logged in user.
|
|
||||||
|
|
||||||
!!! Note
|
|
||||||
All below comands use relative directories and will have to be executed from the directory containing the `docker-compose.yml` file.
|
|
||||||
|
|
||||||
!!! Note "Docker on Raspberry"
|
|
||||||
If you're running freqtrade on a Raspberry PI, you must change the image from `freqtradeorg/freqtrade:master` to `freqtradeorg/freqtrade:master_pi` or `freqtradeorg/freqtrade:develop_pi`, otherwise the image will not work.
|
|
||||||
|
|
||||||
### Docker quick start
|
|
||||||
|
|
||||||
Create a new directory and place the [docker-compose file](https://github.com/freqtrade/freqtrade/blob/develop/docker-compose.yml) in this directory.
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
mkdir ft_userdata
|
|
||||||
cd ft_userdata/
|
|
||||||
# Download the docker-compose file from the repository
|
|
||||||
curl https://raw.githubusercontent.com/freqtrade/freqtrade/develop/docker-compose.yml -o docker-compose.yml
|
|
||||||
|
|
||||||
# Pull the freqtrade image
|
|
||||||
docker-compose pull
|
|
||||||
|
|
||||||
# Create user directory structure
|
|
||||||
docker-compose run --rm freqtrade create-userdir --userdir user_data
|
|
||||||
|
|
||||||
# Create configuration - Requires answering interactive questions
|
|
||||||
docker-compose run --rm freqtrade new-config --config user_data/config.json
|
|
||||||
```
|
|
||||||
|
|
||||||
The above snippet creates a new directory called "ft_userdata", downloads the latest compose file and pulls the freqtrade image.
|
|
||||||
The last 2 steps in the snippet create the directory with user-data, as well as (interactively) the default configuration based on your selections.
|
|
||||||
|
|
||||||
!!! Note
|
|
||||||
You can edit the configuration at any time, which is available as `user_data/config.json` (within the directory `ft_userdata`) when using the above configuration.
|
|
||||||
|
|
||||||
#### Adding your strategy
|
|
||||||
|
|
||||||
The configuration is now available as `user_data/config.json`.
|
|
||||||
You should now copy your strategy to `user_data/strategies/` - and add the Strategy class name to the `docker-compose.yml` file, replacing `SampleStrategy`. If you wish to run the bot with the SampleStrategy, just leave it as it is.
|
|
||||||
|
|
||||||
!!! Warning
|
|
||||||
The `SampleStrategy` is there for your reference and give you ideas for your own strategy.
|
|
||||||
Please always backtest the strategy and use dry-run for some time before risking real money!
|
|
||||||
|
|
||||||
Once this is done, you're ready to launch the bot in trading mode (Dry-run or Live-trading, depending on your answer to the corresponding question you made above).
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
docker-compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Docker-compose logs
|
|
||||||
|
|
||||||
Logs will be written to `user_data/logs/freqtrade.log`.
|
|
||||||
Alternatively, you can check the latest logs using `docker-compose logs -f`.
|
|
||||||
|
|
||||||
#### Database
|
|
||||||
|
|
||||||
The database will be in the user_data directory as well, and will be called `user_data/tradesv3.sqlite`.
|
|
||||||
|
|
||||||
#### Updating freqtrade with docker-compose
|
|
||||||
|
|
||||||
To update freqtrade when using docker-compose is as simple as running the following 2 commands:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
# Download the latest image
|
|
||||||
docker-compose pull
|
|
||||||
# Restart the image
|
|
||||||
docker-compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
This will first pull the latest image, and will then restart the container with the just pulled version.
|
|
||||||
|
|
||||||
!!! Note
|
|
||||||
You should always check the changelog for breaking changes / manual interventions required and make sure the bot starts correctly after the update.
|
|
||||||
|
|
||||||
#### Going from here
|
|
||||||
|
|
||||||
Advanced users may edit the docker-compose file further to include all possible options or arguments.
|
|
||||||
|
|
||||||
All possible freqtrade arguments will be available by running `docker-compose run --rm freqtrade <command> <optional arguments>`.
|
|
||||||
|
|
||||||
!!! Note "`docker-compose run --rm`"
|
|
||||||
Including `--rm` will clean up the container after completion, and is highly recommended for all modes except trading mode (running with `freqtrade trade` command).
|
|
||||||
|
|
||||||
##### Example: Download data with docker-compose
|
|
||||||
|
|
||||||
Download backtesting data for 5 days for the pair ETH/BTC and 1h timeframe from Binance. The data will be stored in the directory `user_data/data/` on the host.
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
docker-compose run --rm freqtrade download-data --pairs ETH/BTC --exchange binance --days 5 -t 1h
|
|
||||||
```
|
|
||||||
|
|
||||||
Head over to the [Data Downloading Documentation](data-download.md) for more details on downloading data.
|
|
||||||
|
|
||||||
##### Example: Backtest with docker-compose
|
|
||||||
|
|
||||||
Run backtesting in docker-containers for SampleStrategy and specified timerange of historical data, on 5m timeframe:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
docker-compose run --rm freqtrade backtesting --config user_data/config.json --strategy SampleStrategy --timerange 20190801-20191001 -i 5m
|
|
||||||
```
|
|
||||||
|
|
||||||
Head over to the [Backtesting Documentation](backtesting.md) to learn more.
|
|
||||||
|
|
||||||
#### Additional dependencies with docker-compose
|
|
||||||
|
|
||||||
If your strategy requires dependencies not included in the default image (like [technical](https://github.com/freqtrade/technical)) - it will be necessary to build the image on your host.
|
|
||||||
For this, please create a Dockerfile containing installation steps for the additional dependencies (have a look at [Dockerfile.technical](https://github.com/freqtrade/freqtrade/blob/develop/Dockerfile.technical) for an example).
|
|
||||||
|
|
||||||
You'll then also need to modify the `docker-compose.yml` file and uncomment the build step, as well as rename the image to avoid naming collisions.
|
|
||||||
|
|
||||||
``` yaml
|
|
||||||
image: freqtrade_custom
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: "./Dockerfile.<yourextension>"
|
|
||||||
```
|
|
||||||
|
|
||||||
You can then run `docker-compose build` to build the docker image, and run it using the commands described above.
|
|
||||||
|
|
||||||
## Freqtrade with docker without docker-compose
|
## Freqtrade with docker without docker-compose
|
||||||
|
|
||||||
!!! Warning
|
!!! Warning
|
||||||
The below documentation is provided for completeness and assumes that you are somewhat familiar with running docker containers. If you're just starting out with docker, we recommend to follow the [Freqtrade with docker-compose](#freqtrade-with-docker-compose) instructions.
|
The below documentation is provided for completeness and assumes that you are familiar with running docker containers. If you're just starting out with Docker, we recommend to follow the [Quickstart](docker.md) instructions.
|
||||||
|
|
||||||
### Download the official Freqtrade docker image
|
### Download the official Freqtrade docker image
|
||||||
|
|
||||||
@ -148,9 +10,9 @@ Pull the image from docker hub.
|
|||||||
Branches / tags available can be checked out on [Dockerhub tags page](https://hub.docker.com/r/freqtradeorg/freqtrade/tags/).
|
Branches / tags available can be checked out on [Dockerhub tags page](https://hub.docker.com/r/freqtradeorg/freqtrade/tags/).
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker pull freqtradeorg/freqtrade:develop
|
docker pull freqtradeorg/freqtrade:master
|
||||||
# Optionally tag the repository so the run-commands remain shorter
|
# Optionally tag the repository so the run-commands remain shorter
|
||||||
docker tag freqtradeorg/freqtrade:develop freqtrade
|
docker tag freqtradeorg/freqtrade:master freqtrade
|
||||||
```
|
```
|
||||||
|
|
||||||
To update the image, simply run the above commands again and restart your running container.
|
To update the image, simply run the above commands again and restart your running container.
|
||||||
@ -190,20 +52,19 @@ cp -n config.json.example config.json
|
|||||||
|
|
||||||
#### Create your database file
|
#### Create your database file
|
||||||
|
|
||||||
Production
|
=== "Dry-Run"
|
||||||
|
``` bash
|
||||||
|
touch tradesv3.dryrun.sqlite
|
||||||
|
```
|
||||||
|
|
||||||
```bash
|
=== "Production"
|
||||||
touch tradesv3.sqlite
|
``` bash
|
||||||
````
|
touch tradesv3.sqlite
|
||||||
|
```
|
||||||
|
|
||||||
Dry-Run
|
|
||||||
|
|
||||||
```bash
|
!!! Warning "Database File Path"
|
||||||
touch tradesv3.dryrun.sqlite
|
Make sure to use the path to the correct database file when starting the bot in Docker.
|
||||||
```
|
|
||||||
|
|
||||||
!!! Note
|
|
||||||
Make sure to use the path to this file when starting the bot in docker.
|
|
||||||
|
|
||||||
### Build your own Docker image
|
### Build your own Docker image
|
||||||
|
|
||||||
@ -221,8 +82,8 @@ If you are developing using Docker, use `Dockerfile.develop` to build a dev Dock
|
|||||||
docker build -f Dockerfile.develop -t freqtrade-dev .
|
docker build -f Dockerfile.develop -t freqtrade-dev .
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! Note
|
!!! Warning "Include your config file manually"
|
||||||
For security reasons, your configuration file will not be included in the image, you will need to bind mount it. It is also advised to bind mount an SQLite database file (see the "5. Run a restartable docker image" section) to keep it between updates.
|
For security reasons, your configuration file will not be included in the image, you will need to bind mount it. It is also advised to bind mount an SQLite database file (see [5. Run a restartable docker image](#run-a-restartable-docker-image)") to keep it between updates.
|
||||||
|
|
||||||
#### Verify the Docker image
|
#### Verify the Docker image
|
||||||
|
|
||||||
@ -243,37 +104,36 @@ docker run --rm -v `pwd`/config.json:/freqtrade/config.json -it freqtrade
|
|||||||
```
|
```
|
||||||
|
|
||||||
!!! Warning
|
!!! Warning
|
||||||
In this example, the database will be created inside the docker instance and will be lost when you will refresh your image.
|
In this example, the database will be created inside the docker instance and will be lost when you refresh your image.
|
||||||
|
|
||||||
#### Adjust timezone
|
#### Adjust timezone
|
||||||
|
|
||||||
By default, the container will use UTC timezone.
|
By default, the container will use UTC timezone.
|
||||||
Should you find this irritating please add the following to your docker commands:
|
If you would like to change the timezone use the following commands:
|
||||||
|
|
||||||
##### Linux
|
=== "Linux"
|
||||||
|
``` bash
|
||||||
|
-v /etc/timezone:/etc/timezone:ro
|
||||||
|
|
||||||
``` bash
|
# Complete command:
|
||||||
-v /etc/timezone:/etc/timezone:ro
|
docker run --rm -v /etc/timezone:/etc/timezone:ro -v `pwd`/config.json:/freqtrade/config.json -it freqtrade
|
||||||
|
```
|
||||||
|
|
||||||
# Complete command:
|
=== "MacOS"
|
||||||
docker run --rm -v /etc/timezone:/etc/timezone:ro -v `pwd`/config.json:/freqtrade/config.json -it freqtrade
|
```bash
|
||||||
```
|
docker run --rm -e TZ=`ls -la /etc/localtime | cut -d/ -f8-9` -v `pwd`/config.json:/freqtrade/config.json -it freqtrade
|
||||||
|
```
|
||||||
|
|
||||||
##### MacOS
|
!!! Note "MacOS Issues"
|
||||||
|
The OSX Docker versions after 17.09.1 have a known issue whereby `/etc/localtime` cannot be shared causing Docker to not start.<br>
|
||||||
There is known issue in OSX Docker versions after 17.09.1, whereby `/etc/localtime` cannot be shared causing Docker to not start. A work-around for this is to start with the following cmd.
|
A work-around for this is to start with the MacOS command above
|
||||||
|
More information on this docker issue and work-around can be read [here](https://github.com/docker/for-mac/issues/2396).
|
||||||
```bash
|
|
||||||
docker run --rm -e TZ=`ls -la /etc/localtime | cut -d/ -f8-9` -v `pwd`/config.json:/freqtrade/config.json -it freqtrade
|
|
||||||
```
|
|
||||||
|
|
||||||
More information on this docker issue and work-around can be read [here](https://github.com/docker/for-mac/issues/2396).
|
|
||||||
|
|
||||||
### Run a restartable docker image
|
### Run a restartable docker image
|
||||||
|
|
||||||
To run a restartable instance in the background (feel free to place your configuration and database files wherever it feels comfortable on your filesystem).
|
To run a restartable instance in the background (feel free to place your configuration and database files wherever it feels comfortable on your filesystem).
|
||||||
|
|
||||||
#### Move your config file and database
|
#### 1. Move your config file and database
|
||||||
|
|
||||||
The following will assume that you place your configuration / database files to `~/.freqtrade`, which is a hidden directory in your home directory. Feel free to use a different directory and replace the directory in the upcomming commands.
|
The following will assume that you place your configuration / database files to `~/.freqtrade`, which is a hidden directory in your home directory. Feel free to use a different directory and replace the directory in the upcomming commands.
|
||||||
|
|
||||||
@ -283,7 +143,7 @@ mv config.json ~/.freqtrade
|
|||||||
mv tradesv3.sqlite ~/.freqtrade
|
mv tradesv3.sqlite ~/.freqtrade
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Run the docker image
|
#### 2. Run the docker image
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -d \
|
docker run -d \
|
||||||
|
162
docs/docker_quickstart.md
Normal file
162
docs/docker_quickstart.md
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
# Using Freqtrade with Docker
|
||||||
|
|
||||||
|
## Install Docker
|
||||||
|
|
||||||
|
Start by downloading and installing Docker CE for your platform:
|
||||||
|
|
||||||
|
* [Mac](https://docs.docker.com/docker-for-mac/install/)
|
||||||
|
* [Windows](https://docs.docker.com/docker-for-windows/install/)
|
||||||
|
* [Linux](https://docs.docker.com/install/)
|
||||||
|
|
||||||
|
Optionally, [`docker-compose`](https://docs.docker.com/compose/install/) should be installed and available to follow the [docker quick start guide](#docker-quick-start).
|
||||||
|
|
||||||
|
Once you have Docker installed, simply prepare the config file (e.g. `config.json`) and run the image for `freqtrade` as explained below.
|
||||||
|
|
||||||
|
## Freqtrade with docker-compose
|
||||||
|
|
||||||
|
Freqtrade provides an official Docker image on [Dockerhub](https://hub.docker.com/r/freqtradeorg/freqtrade/), as well as a [docker-compose file](https://github.com/freqtrade/freqtrade/blob/develop/docker-compose.yml) ready for usage.
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
- The following section assumes that `docker` and `docker-compose` are installed and available to the logged in user.
|
||||||
|
- All below commands use relative directories and will have to be executed from the directory containing the `docker-compose.yml` file.
|
||||||
|
|
||||||
|
### Docker quick start
|
||||||
|
|
||||||
|
Create a new directory and place the [docker-compose file](https://github.com/freqtrade/freqtrade/blob/develop/docker-compose.yml) in this directory.
|
||||||
|
|
||||||
|
=== "PC/MAC/Linux"
|
||||||
|
``` bash
|
||||||
|
mkdir ft_userdata
|
||||||
|
cd ft_userdata/
|
||||||
|
# Download the docker-compose file from the repository
|
||||||
|
curl https://raw.githubusercontent.com/freqtrade/freqtrade/master/docker-compose.yml -o docker-compose.yml
|
||||||
|
|
||||||
|
# Pull the freqtrade image
|
||||||
|
docker-compose pull
|
||||||
|
|
||||||
|
# Create user directory structure
|
||||||
|
docker-compose run --rm freqtrade create-userdir --userdir user_data
|
||||||
|
|
||||||
|
# Create configuration - Requires answering interactive questions
|
||||||
|
docker-compose run --rm freqtrade new-config --config user_data/config.json
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "RaspberryPi"
|
||||||
|
``` bash
|
||||||
|
mkdir ft_userdata
|
||||||
|
cd ft_userdata/
|
||||||
|
# Download the docker-compose file from the repository
|
||||||
|
curl https://raw.githubusercontent.com/freqtrade/freqtrade/master/docker-compose.yml -o docker-compose.yml
|
||||||
|
|
||||||
|
# Pull the freqtrade image
|
||||||
|
docker-compose pull
|
||||||
|
|
||||||
|
# Create user directory structure
|
||||||
|
docker-compose run --rm freqtrade create-userdir --userdir user_data
|
||||||
|
|
||||||
|
# Create configuration - Requires answering interactive questions
|
||||||
|
docker-compose run --rm freqtrade new-config --config user_data/config.json
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Note "Change your docker Image"
|
||||||
|
You have to change the docker image in the docker-compose file for your Raspberry build to work properly.
|
||||||
|
``` yml
|
||||||
|
image: freqtradeorg/freqtrade:master_pi
|
||||||
|
# image: freqtradeorg/freqtrade:develop_pi
|
||||||
|
```
|
||||||
|
|
||||||
|
The above snippet creates a new directory called `ft_userdata`, downloads the latest compose file and pulls the freqtrade image.
|
||||||
|
The last 2 steps in the snippet create the directory with `user_data`, as well as (interactively) the default configuration based on your selections.
|
||||||
|
|
||||||
|
!!! Question "How to edit the bot configuration?"
|
||||||
|
You can edit the configuration at any time, which is available as `user_data/config.json` (within the directory `ft_userdata`) when using the above configuration.
|
||||||
|
|
||||||
|
You can also change the both Strategy and commands by editing the `docker-compose.yml` file.
|
||||||
|
|
||||||
|
#### Adding a custom strategy
|
||||||
|
|
||||||
|
1. The configuration is now available as `user_data/config.json`
|
||||||
|
2. Copy a custom strategy to the directory `user_data/strategies/`
|
||||||
|
3. add the Strategy' class name to the `docker-compose.yml` file
|
||||||
|
|
||||||
|
The `SampleStrategy` is run by default.
|
||||||
|
|
||||||
|
!!! Warning "`SampleStrategy` is just a demo!"
|
||||||
|
The `SampleStrategy` is there for your reference and give you ideas for your own strategy.
|
||||||
|
Please always backtest the strategy and use dry-run for some time before risking real money!
|
||||||
|
|
||||||
|
Once this is done, you're ready to launch the bot in trading mode (Dry-run or Live-trading, depending on your answer to the corresponding question you made above).
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Docker-compose logs
|
||||||
|
|
||||||
|
Logs will be located at: `user_data/logs/freqtrade.log`.
|
||||||
|
You can check the latest log with the command `docker-compose logs -f`.
|
||||||
|
|
||||||
|
#### Database
|
||||||
|
|
||||||
|
The database will be at: `user_data/tradesv3.sqlite`
|
||||||
|
|
||||||
|
#### Updating freqtrade with docker-compose
|
||||||
|
|
||||||
|
To update freqtrade when using `docker-compose` is as simple as running the following 2 commands:
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
# Download the latest image
|
||||||
|
docker-compose pull
|
||||||
|
# Restart the image
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
This will first pull the latest image, and will then restart the container with the just pulled version.
|
||||||
|
|
||||||
|
!!! Warning "Check the Changelog"
|
||||||
|
You should always check the changelog for breaking changes / manual interventions required and make sure the bot starts correctly after the update.
|
||||||
|
|
||||||
|
### Editing the docker-compose file
|
||||||
|
|
||||||
|
Advanced users may edit the docker-compose file further to include all possible options or arguments.
|
||||||
|
|
||||||
|
All possible freqtrade arguments will be available by running `docker-compose run --rm freqtrade <command> <optional arguments>`.
|
||||||
|
|
||||||
|
!!! Note "`docker-compose run --rm`"
|
||||||
|
Including `--rm` will clean up the container after completion, and is highly recommended for all modes except trading mode (running with `freqtrade trade` command).
|
||||||
|
|
||||||
|
#### Example: Download data with docker-compose
|
||||||
|
|
||||||
|
Download backtesting data for 5 days for the pair ETH/BTC and 1h timeframe from Binance. The data will be stored in the directory `user_data/data/` on the host.
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
docker-compose run --rm freqtrade download-data --pairs ETH/BTC --exchange binance --days 5 -t 1h
|
||||||
|
```
|
||||||
|
|
||||||
|
Head over to the [Data Downloading Documentation](data-download.md) for more details on downloading data.
|
||||||
|
|
||||||
|
#### Example: Backtest with docker-compose
|
||||||
|
|
||||||
|
Run backtesting in docker-containers for SampleStrategy and specified timerange of historical data, on 5m timeframe:
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
docker-compose run --rm freqtrade backtesting --config user_data/config.json --strategy SampleStrategy --timerange 20190801-20191001 -i 5m
|
||||||
|
```
|
||||||
|
|
||||||
|
Head over to the [Backtesting Documentation](backtesting.md) to learn more.
|
||||||
|
|
||||||
|
### Additional dependencies with docker-compose
|
||||||
|
|
||||||
|
If your strategy requires dependencies not included in the default image (like [technical](https://github.com/freqtrade/technical)) - it will be necessary to build the image on your host.
|
||||||
|
For this, please create a Dockerfile containing installation steps for the additional dependencies (have a look at [Dockerfile.technical](https://github.com/freqtrade/freqtrade/blob/develop/Dockerfile.technical) for an example).
|
||||||
|
|
||||||
|
You'll then also need to modify the `docker-compose.yml` file and uncomment the build step, as well as rename the image to avoid naming collisions.
|
||||||
|
|
||||||
|
``` yaml
|
||||||
|
image: freqtrade_custom
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: "./Dockerfile.<yourextension>"
|
||||||
|
```
|
||||||
|
|
||||||
|
You can then run `docker-compose build` to build the docker image, and run it using the commands described above.
|
205
docs/edge.md
205
docs/edge.md
@ -1,92 +1,142 @@
|
|||||||
# Edge positioning
|
# Edge positioning
|
||||||
|
|
||||||
This page explains how to use Edge Positioning module in your bot in order to enter into a trade only if the trade has a reasonable win rate and risk reward ratio, and consequently adjust your position size and stoploss.
|
The `Edge Positioning` module uses probability to calculate your win rate and risk reward ration. It will use these statistics to control your strategy trade entry points, position side and, stoploss.
|
||||||
|
|
||||||
!!! Warning
|
!!! Warning
|
||||||
Edge positioning is not compatible with dynamic (volume-based) whitelist.
|
`Edge positioning` is not compatible with dynamic (volume-based) whitelist.
|
||||||
|
|
||||||
!!! Note
|
!!! Note
|
||||||
Edge does not consider anything other than *its own* buy/sell/stoploss signals. It ignores the stoploss, trailing stoploss, and ROI settings in the strategy configuration file.
|
`Edge Positioning` only considers *its own* buy/sell/stoploss signals. It ignores the stoploss, trailing stoploss, and ROI settings in the strategy configuration file.
|
||||||
Therefore, it is important to understand that Edge can improve the performance of some trading strategies but *decrease* the performance of others.
|
`Edge Positioning` improves the performance of some trading strategies and *decreases* the performance of others.
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
|
|
||||||
Trading is all about probability. No one can claim that he has a strategy working all the time. You have to assume that sometimes you lose.
|
Trading strategies are not perfect. They are frameworks that are susceptible to the market and its indicators. Because the market is not at all predictable, sometimes a strategy will win and sometimes the same strategy will lose.
|
||||||
|
|
||||||
But it doesn't mean there is no rule, it only means rules should work "most of the time". Let's play a game: we toss a coin, heads: I give you 10$, tails: you give me 10$. Is it an interesting game? No, it's quite boring, isn't it?
|
To obtain an edge in the market, a strategy has to make more money than it loses. Making money in trading is not only about *how often* the strategy makes or loses money.
|
||||||
|
|
||||||
But let's say the probability that we have heads is 80% (because our coin has the displaced distribution of mass or other defect), and the probability that we have tails is 20%. Now it is becoming interesting...
|
!!! tip "It doesn't matter how often, but how much!"
|
||||||
|
A bad strategy might make 1 penny in *ten* transactions but lose 1 dollar in *one* transaction. If one only checks the number of winning trades, it would be misleading to think that the strategy is actually making a profit.
|
||||||
|
|
||||||
That means 10$ X 80% versus 10$ X 20%. 8$ versus 2$. That means over time you will win 8$ risking only 2$ on each toss of coin.
|
The Edge Positioning module seeks to improve a strategy's winning probability and the money that the strategy will make *on the long run*.
|
||||||
|
|
||||||
Let's complicate it more: you win 80% of the time but only 2$, I win 20% of the time but 8$. The calculation is: 80% X 2$ versus 20% X 8$. It is becoming boring again because overtime you win $1.6$ (80% X 2$) and me $1.6 (20% X 8$) too.
|
We raise the following question[^1]:
|
||||||
|
|
||||||
The question is: How do you calculate that? How do you know if you wanna play?
|
!!! Question "Which trade is a better option?"
|
||||||
|
a) A trade with 80% of chance of losing $100 and 20% chance of winning $200<br/>
|
||||||
|
b) A trade with 100% of chance of losing $30
|
||||||
|
|
||||||
The answer comes to two factors:
|
???+ Info "Answer"
|
||||||
|
The expected value of *a)* is smaller than the expected value of *b)*.<br/>
|
||||||
|
Hence, *b*) represents a smaller loss in the long run.<br/>
|
||||||
|
However, the answer is: *it depends*
|
||||||
|
|
||||||
- Win Rate
|
Another way to look at it is to ask a similar question:
|
||||||
- Risk Reward Ratio
|
|
||||||
|
|
||||||
### Win Rate
|
!!! Question "Which trade is a better option?"
|
||||||
|
a) A trade with 80% of chance of winning 100 and 20% chance of losing $200<br/>
|
||||||
|
b) A trade with 100% of chance of winning $30
|
||||||
|
|
||||||
Win Rate (*W*) is is the mean over some amount of trades (*N*) what is the percentage of winning trades to total number of trades (note that we don't consider how much you gained but only if you won or not).
|
Edge positioning tries to answer the hard questions about risk/reward and position size automatically, seeking to minimizes the chances of losing of a given strategy.
|
||||||
|
|
||||||
```
|
### Trading, winning and losing
|
||||||
W = (Number of winning trades) / (Total number of trades) = (Number of winning trades) / N
|
|
||||||
```
|
|
||||||
|
|
||||||
Complementary Loss Rate (*L*) is defined as
|
Let's call $o$ the return of a single transaction $o$ where $o \in \mathbb{R}$. The collection $O = \{o_1, o_2, ..., o_N\}$ is the set of all returns of transactions made during a trading session. We say that $N$ is the cardinality of $O$, or, in lay terms, it is the number of transactions made in a trading session.
|
||||||
|
|
||||||
```
|
!!! Example
|
||||||
L = (Number of losing trades) / (Total number of trades) = (Number of losing trades) / N
|
In a session where a strategy made three transactions we can say that $O = \{3.5, -1, 15\}$. That means that $N = 3$ and $o_1 = 3.5$, $o_2 = -1$, $o_3 = 15$.
|
||||||
```
|
|
||||||
|
|
||||||
or, which is the same, as
|
A winning trade is a trade where a strategy *made* money. Making money means that the strategy closed the position in a value that returned a profit, after all deducted fees. Formally, a winning trade will have a return $o_i > 0$. Similarly, a losing trade will have a return $o_j \leq 0$. With that, we can discover the set of all winning trades, $T_{win}$, as follows:
|
||||||
|
|
||||||
```
|
$$ T_{win} = \{ o \in O | o > 0 \} $$
|
||||||
L = 1 – W
|
|
||||||
```
|
Similarly, we can discover the set of losing trades $T_{lose}$ as follows:
|
||||||
|
|
||||||
|
$$ T_{lose} = \{o \in O | o \leq 0\} $$
|
||||||
|
|
||||||
|
!!! Example
|
||||||
|
In a section where a strategy made three transactions $O = \{3.5, -1, 15, 0\}$:<br>
|
||||||
|
$T_{win} = \{3.5, 15\}$<br>
|
||||||
|
$T_{lose} = \{-1, 0\}$<br>
|
||||||
|
|
||||||
|
### Win Rate and Lose Rate
|
||||||
|
|
||||||
|
The win rate $W$ is the proportion of winning trades with respect to all the trades made by a strategy. We use the following function to compute the win rate:
|
||||||
|
|
||||||
|
$$W = \frac{|T_{win}|}{N}$$
|
||||||
|
|
||||||
|
Where $W$ is the win rate, $N$ is the number of trades and, $T_{win}$ is the set of all trades where the strategy made money.
|
||||||
|
|
||||||
|
Similarly, we can compute the rate of losing trades:
|
||||||
|
|
||||||
|
$$
|
||||||
|
L = \frac{|T_{lose}|}{N}
|
||||||
|
$$
|
||||||
|
|
||||||
|
Where $L$ is the lose rate, $N$ is the amount of trades made and, $T_{lose}$ is the set of all trades where the strategy lost money. Note that the above formula is the same as calculating $L = 1 – W$ or $W = 1 – L$
|
||||||
|
|
||||||
### Risk Reward Ratio
|
### Risk Reward Ratio
|
||||||
|
|
||||||
Risk Reward Ratio (*R*) is a formula used to measure the expected gains of a given investment against the risk of loss. It is basically what you potentially win divided by what you potentially lose:
|
Risk Reward Ratio ($R$) is a formula used to measure the expected gains of a given investment against the risk of loss. It is basically what you potentially win divided by what you potentially lose. Formally:
|
||||||
|
|
||||||
```
|
$$ R = \frac{\text{potential_profit}}{\text{potential_loss}} $$
|
||||||
R = Profit / Loss
|
|
||||||
```
|
|
||||||
|
|
||||||
Over time, on many trades, you can calculate your risk reward by dividing your average profit on winning trades by your average loss on losing trades:
|
???+ Example "Worked example of $R$ calculation"
|
||||||
|
Let's say that you think that the price of *stonecoin* today is $10.0. You believe that, because they will start mining stonecoin, it will go up to $15.0 tomorrow. There is the risk that the stone is too hard, and the GPUs can't mine it, so the price might go to $0 tomorrow. You are planning to invest $100.<br>
|
||||||
|
Your potential profit is calculated as:<br>
|
||||||
|
$\begin{aligned}
|
||||||
|
\text{potential_profit} &= (\text{potential_price} - \text{cost_per_unit}) * \frac{\text{investment}}{\text{cost_per_unit}} \\
|
||||||
|
&= (15 - 10) * \frac{100}{15}\\
|
||||||
|
&= 33.33
|
||||||
|
\end{aligned}$<br>
|
||||||
|
Since the price might go to $0, the $100 dolars invested could turn into 0. We can compute the Risk Reward Ratio as follows:<br>
|
||||||
|
$\begin{aligned}
|
||||||
|
R &= \frac{\text{potential_profit}}{\text{potential_loss}}\\
|
||||||
|
&= \frac{33.33}{100}\\
|
||||||
|
&= 0.333...
|
||||||
|
\end{aligned}$<br>
|
||||||
|
What it effectivelly means is that the strategy have the potential to make $0.33 for each $1 invested.
|
||||||
|
|
||||||
```
|
On a long horizon, that is, on many trades, we can calculate the risk reward by dividing the strategy' average profit on winning trades by the strategy' average loss on losing trades. We can calculate the average profit, $\mu_{win}$, as follows:
|
||||||
Average profit = (Sum of profits) / (Number of winning trades)
|
|
||||||
|
|
||||||
Average loss = (Sum of losses) / (Number of losing trades)
|
$$ \text{average_profit} = \mu_{win} = \frac{\text{sum_of_profits}}{\text{count_winning_trades}} = \frac{\sum^{o \in T_{win}} o}{|T_{win}|} $$
|
||||||
|
|
||||||
|
Similarly, we can calculate the average loss, $\mu_{lose}$, as follows:
|
||||||
|
|
||||||
|
$$ \text{average_loss} = \mu_{lose} = \frac{\text{sum_of_losses}}{\text{count_losing_trades}} = \frac{\sum^{o \in T_{lose}} o}{|T_{lose}|} $$
|
||||||
|
|
||||||
|
Finally, we can calculate the Risk Reward ratio, $R$, as follows:
|
||||||
|
|
||||||
|
$$ R = \frac{\text{average_profit}}{\text{average_loss}} = \frac{\mu_{win}}{\mu_{lose}}\\ $$
|
||||||
|
|
||||||
|
|
||||||
|
???+ Example "Worked example of $R$ calculation using mean profit/loss"
|
||||||
|
Let's say the strategy that we are using makes an average win $\mu_{win} = 2.06$ and an average loss $\mu_{loss} = 4.11$.<br>
|
||||||
|
We calculate the risk reward ratio as follows:<br>
|
||||||
|
$R = \frac{\mu_{win}}{\mu_{loss}} = \frac{2.06}{4.11} = 0.5012...$
|
||||||
|
|
||||||
R = (Average profit) / (Average loss)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Expectancy
|
### Expectancy
|
||||||
|
|
||||||
At this point we can combine *W* and *R* to create an expectancy ratio. This is a simple process of multiplying the risk reward ratio by the percentage of winning trades and subtracting the percentage of losing trades, which is calculated as follows:
|
By combining the Win Rate $W$ and and the Risk Reward ratio $R$ to create an expectancy ratio $E$. A expectance ratio is the expected return of the investment made in a trade. We can compute the value of $E$ as follows:
|
||||||
|
|
||||||
```
|
$$E = R * W - L$$
|
||||||
Expectancy Ratio = (Risk Reward Ratio X Win Rate) – Loss Rate = (R X W) – L
|
|
||||||
```
|
|
||||||
|
|
||||||
So lets say your Win rate is 28% and your Risk Reward Ratio is 5:
|
!!! Example "Calculating $E$"
|
||||||
|
Let's say that a strategy has a win rate $W = 0.28$ and a risk reward ratio $R = 5$. What this means is that the strategy is expected to make 5 times the investment around on 28% of the trades it makes. Working out the example:<br>
|
||||||
|
$E = R * W - L = 5 * 0.28 - 0.72 = 0.68$
|
||||||
|
<br>
|
||||||
|
|
||||||
```
|
The expectancy worked out in the example above means that, on average, this strategy' trades will return 1.68 times the size of its losses. Said another way, the strategy makes $1.68 for every $1 it loses, on average.
|
||||||
Expectancy = (5 X 0.28) – 0.72 = 0.68
|
|
||||||
```
|
|
||||||
|
|
||||||
Superficially, this means that on average you expect this strategy’s trades to return 1.68 times the size of your loses. Said another way, you can expect to win $1.68 for every $1 you lose. This is important for two reasons: First, it may seem obvious, but you know right away that you have a positive return. Second, you now have a number you can compare to other candidate systems to make decisions about which ones you employ.
|
This is important for two reasons: First, it may seem obvious, but you know right away that you have a positive return. Second, you now have a number you can compare to other candidate systems to make decisions about which ones you employ.
|
||||||
|
|
||||||
It is important to remember that any system with an expectancy greater than 0 is profitable using past data. The key is finding one that will be profitable in the future.
|
It is important to remember that any system with an expectancy greater than 0 is profitable using past data. The key is finding one that will be profitable in the future.
|
||||||
|
|
||||||
You can also use this value to evaluate the effectiveness of modifications to this system.
|
You can also use this value to evaluate the effectiveness of modifications to this system.
|
||||||
|
|
||||||
**NOTICE:** It's important to keep in mind that Edge is testing your expectancy using historical data, there's no guarantee that you will have a similar edge in the future. It's still vital to do this testing in order to build confidence in your methodology, but be wary of "curve-fitting" your approach to the historical data as things are unlikely to play out the exact same way for future trades.
|
!!! Note
|
||||||
|
It's important to keep in mind that Edge is testing your expectancy using historical data, there's no guarantee that you will have a similar edge in the future. It's still vital to do this testing in order to build confidence in your methodology but be wary of "curve-fitting" your approach to the historical data as things are unlikely to play out the exact same way for future trades.
|
||||||
|
|
||||||
## How does it work?
|
## How does it work?
|
||||||
|
|
||||||
@ -99,13 +149,13 @@ Edge combines dynamic stoploss, dynamic positions, and whitelist generation into
|
|||||||
| XZC/ETH | -0.03 | 0.52 |1.359670 | 0.228 |
|
| XZC/ETH | -0.03 | 0.52 |1.359670 | 0.228 |
|
||||||
| XZC/ETH | -0.04 | 0.51 |1.234539 | 0.117 |
|
| XZC/ETH | -0.04 | 0.51 |1.234539 | 0.117 |
|
||||||
|
|
||||||
The goal here is to find the best stoploss for the strategy in order to have the maximum expectancy. In the above example stoploss at 3% leads to the maximum expectancy according to historical data.
|
The goal here is to find the best stoploss for the strategy in order to have the maximum expectancy. In the above example stoploss at $3%$ leads to the maximum expectancy according to historical data.
|
||||||
|
|
||||||
Edge module then forces stoploss value it evaluated to your strategy dynamically.
|
Edge module then forces stoploss value it evaluated to your strategy dynamically.
|
||||||
|
|
||||||
### Position size
|
### Position size
|
||||||
|
|
||||||
Edge also dictates the stake amount for each trade to the bot according to the following factors:
|
Edge dictates the amount at stake for each trade to the bot according to the following factors:
|
||||||
|
|
||||||
- Allowed capital at risk
|
- Allowed capital at risk
|
||||||
- Stoploss
|
- Stoploss
|
||||||
@ -116,9 +166,9 @@ Allowed capital at risk is calculated as follows:
|
|||||||
Allowed capital at risk = (Capital available_percentage) X (Allowed risk per trade)
|
Allowed capital at risk = (Capital available_percentage) X (Allowed risk per trade)
|
||||||
```
|
```
|
||||||
|
|
||||||
Stoploss is calculated as described above against historical data.
|
Stoploss is calculated as described above with respect to historical data.
|
||||||
|
|
||||||
Your position size then will be:
|
The position size is calculated as follows:
|
||||||
|
|
||||||
```
|
```
|
||||||
Position size = (Allowed capital at risk) / Stoploss
|
Position size = (Allowed capital at risk) / Stoploss
|
||||||
@ -126,19 +176,23 @@ Position size = (Allowed capital at risk) / Stoploss
|
|||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
Let's say the stake currency is ETH and you have 10 ETH on the exchange, your capital available percentage is 50% and you would allow 1% of risk for each trade. thus your available capital for trading is **10 x 0.5 = 5 ETH** and allowed capital at risk would be **5 x 0.01 = 0.05 ETH**.
|
Let's say the stake currency is **ETH** and there is $10$ **ETH** on the wallet. The capital available percentage is $50%$ and the allowed risk per trade is $1\%$. Thus, the available capital for trading is $10 * 0.5 = 5$ **ETH** and the allowed capital at risk would be $5 * 0.01 = 0.05$ **ETH**.
|
||||||
|
|
||||||
Let's assume Edge has calculated that for **XLM/ETH** market your stoploss should be at 2%. So your position size will be **0.05 / 0.02 = 2.5 ETH**.
|
- **Trade 1:** The strategy detects a new buy signal in the **XLM/ETH** market. `Edge Positioning` calculates a stoploss of $2\%$ and a position of $0.05 / 0.02 = 2.5$ **ETH**. The bot takes a position of $2.5$ **ETH** in the **XLM/ETH** market.
|
||||||
|
|
||||||
Bot takes a position of 2.5 ETH on XLM/ETH (call it trade 1). Up next, you receive another buy signal while trade 1 is still open. This time on **BTC/ETH** market. Edge calculated stoploss for this market at 4%. So your position size would be 0.05 / 0.04 = 1.25 ETH (call it trade 2).
|
- **Trade 2:** The strategy detects a buy signal on the **BTC/ETH** market while **Trade 1** is still open. `Edge Positioning` calculates the stoploss of $4\%$ on this market. Thus, **Trade 2** position size is $0.05 / 0.04 = 1.25$ **ETH**.
|
||||||
|
|
||||||
Note that available capital for trading didn’t change for trade 2 even if you had already trade 1. The available capital doesn’t mean the free amount on your wallet.
|
!!! Tip "Available Capital $\neq$ Available in wallet"
|
||||||
|
The available capital for trading didn't change in **Trade 2** even with **Trade 1** still open. The available capital **is not** the free amount in the wallet.
|
||||||
|
|
||||||
Now you have two trades open. The bot receives yet another buy signal for another market: **ADA/ETH**. This time the stoploss is calculated at 1%. So your position size is **0.05 / 0.01 = 5 ETH**. But there are already 3.75 ETH blocked in two previous trades. So the position size for this third trade would be **5 – 3.75 = 1.25 ETH**.
|
- **Trade 3:** The strategy detects a buy signal in the **ADA/ETH** market. `Edge Positioning` calculates a stoploss of $1\%$ and a position of $0.05 / 0.01 = 5$ **ETH**. Since **Trade 1** has $2.5$ **ETH** blocked and **Trade 2** has $1.25$ **ETH** blocked, there is only $5 - 1.25 - 2.5 = 1.25$ **ETH** available. Hence, the position size of **Trade 3** is $1.25$ **ETH**.
|
||||||
|
|
||||||
Available capital doesn’t change before a position is sold. Let’s assume that trade 1 receives a sell signal and it is sold with a profit of 1 ETH. Your total capital on exchange would be 11 ETH and the available capital for trading becomes 5.5 ETH.
|
!!! Tip "Available Capital Updates"
|
||||||
|
The available capital does not change before a position is sold. After a trade is closed the Available Capital goes up if the trade was profitable or goes down if the trade was a loss.
|
||||||
|
|
||||||
So the Bot receives another buy signal for trade 4 with a stoploss at 2% then your position size would be **0.055 / 0.02 = 2.75 ETH**.
|
- The strategy detects a sell signal in the **XLM/ETH** market. The bot exits **Trade 1** for a profit of $1$ **ETH**. The total capital in the wallet becomes $11$ **ETH** and the available capital for trading becomes $5.5$ **ETH**.
|
||||||
|
|
||||||
|
- **Trade 4** The strategy detects a new buy signal int the **XLM/ETH** market. `Edge Positioning` calculates the stoploss of $2%$, and the position size of $0.055 / 0.02 = 2.75$ **ETH**.
|
||||||
|
|
||||||
## Configurations
|
## Configurations
|
||||||
|
|
||||||
@ -169,23 +223,23 @@ freqtrade edge
|
|||||||
|
|
||||||
An example of its output:
|
An example of its output:
|
||||||
|
|
||||||
| pair | stoploss | win rate | risk reward ratio | required risk reward | expectancy | total number of trades | average duration (min) |
|
| **pair** | **stoploss** | **win rate** | **risk reward ratio** | **required risk reward** | **expectancy** | **total number of trades** | **average duration (min)** |
|
||||||
|:----------|-----------:|-----------:|--------------------:|-----------------------:|-------------:|-------------------------:|-------------------------:|
|
|:----------|-----------:|-----------:|--------------------:|-----------------------:|-------------:|-----------------:|---------------:|
|
||||||
| AGI/BTC | -0.02 | 0.64 | 5.86 | 0.56 | 3.41 | 14 | 54 |
|
| **AGI/BTC** | -0.02 | 0.64 | 5.86 | 0.56 | 3.41 | 14 | 54 |
|
||||||
| NXS/BTC | -0.03 | 0.64 | 2.99 | 0.57 | 1.54 | 11 | 26 |
|
| **NXS/BTC** | -0.03 | 0.64 | 2.99 | 0.57 | 1.54 | 11 | 26 |
|
||||||
| LEND/BTC | -0.02 | 0.82 | 2.05 | 0.22 | 1.50 | 11 | 36 |
|
| **LEND/BTC** | -0.02 | 0.82 | 2.05 | 0.22 | 1.50 | 11 | 36 |
|
||||||
| VIA/BTC | -0.01 | 0.55 | 3.01 | 0.83 | 1.19 | 11 | 48 |
|
| **VIA/BTC** | -0.01 | 0.55 | 3.01 | 0.83 | 1.19 | 11 | 48 |
|
||||||
| MTH/BTC | -0.09 | 0.56 | 2.82 | 0.80 | 1.12 | 18 | 52 |
|
| **MTH/BTC** | -0.09 | 0.56 | 2.82 | 0.80 | 1.12 | 18 | 52 |
|
||||||
| ARDR/BTC | -0.04 | 0.42 | 3.14 | 1.40 | 0.73 | 12 | 42 |
|
| **ARDR/BTC** | -0.04 | 0.42 | 3.14 | 1.40 | 0.73 | 12 | 42 |
|
||||||
| BCPT/BTC | -0.01 | 0.71 | 1.34 | 0.40 | 0.67 | 14 | 30 |
|
| **BCPT/BTC** | -0.01 | 0.71 | 1.34 | 0.40 | 0.67 | 14 | 30 |
|
||||||
| WINGS/BTC | -0.02 | 0.56 | 1.97 | 0.80 | 0.65 | 27 | 42 |
|
| **WINGS/BTC** | -0.02 | 0.56 | 1.97 | 0.80 | 0.65 | 27 | 42 |
|
||||||
| VIBE/BTC | -0.02 | 0.83 | 0.91 | 0.20 | 0.59 | 12 | 35 |
|
| **VIBE/BTC** | -0.02 | 0.83 | 0.91 | 0.20 | 0.59 | 12 | 35 |
|
||||||
| MCO/BTC | -0.02 | 0.79 | 0.97 | 0.27 | 0.55 | 14 | 31 |
|
| **MCO/BTC** | -0.02 | 0.79 | 0.97 | 0.27 | 0.55 | 14 | 31 |
|
||||||
| GNT/BTC | -0.02 | 0.50 | 2.06 | 1.00 | 0.53 | 18 | 24 |
|
| **GNT/BTC** | -0.02 | 0.50 | 2.06 | 1.00 | 0.53 | 18 | 24 |
|
||||||
| HOT/BTC | -0.01 | 0.17 | 7.72 | 4.81 | 0.50 | 209 | 7 |
|
| **HOT/BTC** | -0.01 | 0.17 | 7.72 | 4.81 | 0.50 | 209 | 7 |
|
||||||
| SNM/BTC | -0.03 | 0.71 | 1.06 | 0.42 | 0.45 | 17 | 38 |
|
| **SNM/BTC** | -0.03 | 0.71 | 1.06 | 0.42 | 0.45 | 17 | 38 |
|
||||||
| APPC/BTC | -0.02 | 0.44 | 2.28 | 1.27 | 0.44 | 25 | 43 |
|
| **APPC/BTC** | -0.02 | 0.44 | 2.28 | 1.27 | 0.44 | 25 | 43 |
|
||||||
| NEBL/BTC | -0.03 | 0.63 | 1.29 | 0.58 | 0.44 | 19 | 59 |
|
| **NEBL/BTC** | -0.03 | 0.63 | 1.29 | 0.58 | 0.44 | 19 | 59 |
|
||||||
|
|
||||||
Edge produced the above table by comparing `calculate_since_number_of_days` to `minimum_expectancy` to find `min_trade_number` historical information based on the config file. The timerange Edge uses for its comparisons can be further limited by using the `--timerange` switch.
|
Edge produced the above table by comparing `calculate_since_number_of_days` to `minimum_expectancy` to find `min_trade_number` historical information based on the config file. The timerange Edge uses for its comparisons can be further limited by using the `--timerange` switch.
|
||||||
|
|
||||||
@ -218,3 +272,6 @@ The full timerange specification:
|
|||||||
* Use tickframes since 2018/01/31: `--timerange=20180131-`
|
* Use tickframes since 2018/01/31: `--timerange=20180131-`
|
||||||
* Use tickframes since 2018/01/31 till 2018/03/01 : `--timerange=20180131-20180301`
|
* Use tickframes since 2018/01/31 till 2018/03/01 : `--timerange=20180131-20180301`
|
||||||
* Use tickframes between POSIX timestamps 1527595200 1527618600: `--timerange=1527595200-1527618600`
|
* Use tickframes between POSIX timestamps 1527595200 1527618600: `--timerange=1527595200-1527618600`
|
||||||
|
|
||||||
|
|
||||||
|
[^1]: Question extracted from MIT Opencourseware S096 - Mathematics with applications in Finance: https://ocw.mit.edu/courses/mathematics/18-s096-topics-in-mathematics-with-applications-in-finance-fall-2013/
|
||||||
|
33
docs/faq.md
33
docs/faq.md
@ -1,5 +1,9 @@
|
|||||||
# Freqtrade FAQ
|
# Freqtrade FAQ
|
||||||
|
|
||||||
|
## Beginner Tips & Tricks
|
||||||
|
|
||||||
|
* When you work with your strategy & hyperopt file you should use a proper code editor like vscode or Pycharm. A good code editor will provide syntax highlighting as well as line numbers, making it easy to find syntax errors (most likely, pointed out by Freqtrade during startup).
|
||||||
|
|
||||||
## Freqtrade common issues
|
## Freqtrade common issues
|
||||||
|
|
||||||
### The bot does not start
|
### The bot does not start
|
||||||
@ -15,10 +19,12 @@ This could have the following reasons:
|
|||||||
|
|
||||||
### I have waited 5 minutes, why hasn't the bot made any trades yet?!
|
### I have waited 5 minutes, why hasn't the bot made any trades yet?!
|
||||||
|
|
||||||
Depending on the buy strategy, the amount of whitelisted coins, the
|
* Depending on the buy strategy, the amount of whitelisted coins, the
|
||||||
situation of the market etc, it can take up to hours to find good entry
|
situation of the market etc, it can take up to hours to find good entry
|
||||||
position for a trade. Be patient!
|
position for a trade. Be patient!
|
||||||
|
|
||||||
|
* Or it may because of a configuration error? Best check the logs, it's usually telling you if the bot is simply not getting buy signals (only heartbeat messages), or if there is something wrong (errors / exceptions in the log).
|
||||||
|
|
||||||
### I have made 12 trades already, why is my total profit negative?!
|
### I have made 12 trades already, why is my total profit negative?!
|
||||||
|
|
||||||
I understand your disappointment but unfortunately 12 trades is just
|
I understand your disappointment but unfortunately 12 trades is just
|
||||||
@ -129,25 +135,27 @@ to find a great result (unless if you are very lucky), so you probably
|
|||||||
have to run it for 10.000 or more. But it will take an eternity to
|
have to run it for 10.000 or more. But it will take an eternity to
|
||||||
compute.
|
compute.
|
||||||
|
|
||||||
We recommend you to run it at least 10.000 epochs:
|
Since hyperopt uses Bayesian search, running for too many epochs may not produce greater results.
|
||||||
|
|
||||||
|
It's therefore recommended to run between 500-1000 epochs over and over until you hit at least 10.000 epocs in total (or are satisfied with the result). You can best judge by looking at the results - if the bot keeps discovering better strategies, it's best to keep on going.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
freqtrade hyperopt -e 10000
|
freqtrade hyperopt -e 1000
|
||||||
```
|
```
|
||||||
|
|
||||||
or if you want intermediate result to see
|
or if you want intermediate result to see
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
for i in {1..100}; do freqtrade hyperopt -e 100; done
|
for i in {1..100}; do freqtrade hyperopt -e 1000; done
|
||||||
```
|
```
|
||||||
|
|
||||||
### Why it is so long to run hyperopt?
|
### Why does it take a long time to run hyperopt?
|
||||||
|
|
||||||
Finding a great Hyperopt results takes time.
|
* Discovering a great strategy with Hyperopt takes time. Study www.freqtrade.io, the Freqtrade Documentation page, join the Freqtrade [Slack community](https://join.slack.com/t/highfrequencybot/shared_invite/enQtNjU5ODcwNjI1MDU3LTU1MTgxMjkzNmYxNWE1MDEzYzQ3YmU4N2MwZjUyNjJjODRkMDVkNjg4YTAyZGYzYzlhOTZiMTE4ZjQ4YzM0OGE) - or the Freqtrade [discord community](https://discord.gg/X89cVG). While you patiently wait for the most advanced, free crypto bot in the world, to hand you a possible golden strategy specially designed just for you.
|
||||||
|
|
||||||
If you wonder why it takes a while to find great hyperopt results
|
* If you wonder why it can take from 20 minutes to days to do 1000 epocs here are some answers:
|
||||||
|
|
||||||
This answer was written during the under the release 0.15.1, when we had:
|
This answer was written during the release 0.15.1, when we had:
|
||||||
|
|
||||||
- 8 triggers
|
- 8 triggers
|
||||||
- 9 guards: let's say we evaluate even 10 values from each
|
- 9 guards: let's say we evaluate even 10 values from each
|
||||||
@ -157,7 +165,14 @@ The following calculation is still very rough and not very precise
|
|||||||
but it will give the idea. With only these triggers and guards there is
|
but it will give the idea. With only these triggers and guards there is
|
||||||
already 8\*10^9\*10 evaluations. A roughly total of 80 billion evals.
|
already 8\*10^9\*10 evaluations. A roughly total of 80 billion evals.
|
||||||
Did you run 100 000 evals? Congrats, you've done roughly 1 / 100 000 th
|
Did you run 100 000 evals? Congrats, you've done roughly 1 / 100 000 th
|
||||||
of the search space.
|
of the search space, assuming that the bot never tests the same parameters more than once.
|
||||||
|
|
||||||
|
* The time it takes to run 1000 hyperopt epocs depends on things like: The available cpu, harddisk, ram, timeframe, timerange, indicator settings, indicator count, amount of coins that hyperopt test strategies on and the resulting trade count - which can be 650 trades in a year or 10.0000 trades depending if the strategy aims for big profits by trading rarely or for many low profit trades.
|
||||||
|
|
||||||
|
Example: 4% profit 650 times vs 0,3% profit a trade 10.000 times in a year. If we assume you set the --timerange to 365 days.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
`freqtrade --config config.json --strategy SampleStrategy --hyperopt SampleHyperopt -e 1000 --timerange 20190601-20200601`
|
||||||
|
|
||||||
## Edge module
|
## Edge module
|
||||||
|
|
||||||
|
@ -370,6 +370,9 @@ By default, hyperopt prints colorized results -- epochs with positive profit are
|
|||||||
|
|
||||||
You can use the `--print-all` command line option if you would like to see all results in the hyperopt output, not only the best ones. When `--print-all` is used, current best results are also colorized by default -- they are printed in bold (bright) style. This can also be switched off with the `--no-color` command line option.
|
You can use the `--print-all` command line option if you would like to see all results in the hyperopt output, not only the best ones. When `--print-all` is used, current best results are also colorized by default -- they are printed in bold (bright) style. This can also be switched off with the `--no-color` command line option.
|
||||||
|
|
||||||
|
!!! Note "Windows and color output"
|
||||||
|
Windows does not support color-output nativly, therefore it is automatically disabled. To have color-output for hyperopt running under windows, please consider using WSL.
|
||||||
|
|
||||||
### Understand Hyperopt ROI results
|
### Understand Hyperopt ROI results
|
||||||
|
|
||||||
If you are optimizing ROI (i.e. if optimization search-space contains 'all', 'default' or 'roi'), your result will look as follows and include a ROI table:
|
If you are optimizing ROI (i.e. if optimization search-space contains 'all', 'default' or 'roi'), your result will look as follows and include a ROI table:
|
||||||
|
@ -37,13 +37,9 @@ Freqtrade is a crypto-currency algorithmic trading software developed in python
|
|||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
### Up to date clock
|
|
||||||
|
|
||||||
The clock on the system running the bot must be accurate, synchronized to a NTP server frequently enough to avoid problems with communication to the exchanges.
|
|
||||||
|
|
||||||
### Hardware requirements
|
### Hardware requirements
|
||||||
|
|
||||||
To run this bot we recommend you a cloud instance with a minimum of:
|
To run this bot we recommend you a linux cloud instance with a minimum of:
|
||||||
|
|
||||||
- 2GB RAM
|
- 2GB RAM
|
||||||
- 1GB disk space
|
- 1GB disk space
|
||||||
|
@ -18,6 +18,9 @@ Click each one for install guide:
|
|||||||
|
|
||||||
We also recommend a [Telegram bot](telegram-usage.md#setup-your-telegram-bot), which is optional but recommended.
|
We also recommend a [Telegram bot](telegram-usage.md#setup-your-telegram-bot), which is optional but recommended.
|
||||||
|
|
||||||
|
!!! Warning "Up-to-date clock"
|
||||||
|
The clock on the system running the bot must be accurate, synchronized to a NTP server frequently enough to avoid problems with communication to the exchanges.
|
||||||
|
|
||||||
## Quick start
|
## Quick start
|
||||||
|
|
||||||
Freqtrade provides the Linux/MacOS Easy Installation script to install all dependencies and help you configure the bot.
|
Freqtrade provides the Linux/MacOS Easy Installation script to install all dependencies and help you configure the bot.
|
||||||
@ -38,7 +41,7 @@ This can be achieved with the following commands:
|
|||||||
```bash
|
```bash
|
||||||
git clone https://github.com/freqtrade/freqtrade.git
|
git clone https://github.com/freqtrade/freqtrade.git
|
||||||
cd freqtrade
|
cd freqtrade
|
||||||
git checkout master # Optional, see (1)
|
# git checkout master # Optional, see (1)
|
||||||
./setup.sh --install
|
./setup.sh --install
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -79,11 +82,9 @@ This option will hard reset your branch (only if you are on either `master` or `
|
|||||||
|
|
||||||
DEPRECATED - use `freqtrade new-config -c config.json` instead.
|
DEPRECATED - use `freqtrade new-config -c config.json` instead.
|
||||||
|
|
||||||
** Setup your virtual environment **
|
### Activate your virtual environment
|
||||||
|
|
||||||
Each time you open a new terminal, you must run `source .env/bin/activate`
|
|
||||||
|
|
||||||
|
|
||||||
|
Each time you open a new terminal, you must run `source .env/bin/activate`.
|
||||||
|
|
||||||
------
|
------
|
||||||
|
|
||||||
@ -95,36 +96,34 @@ OS Specific steps are listed first, the [Common](#common) section below is neces
|
|||||||
!!! Note
|
!!! Note
|
||||||
Python3.6 or higher and the corresponding pip are assumed to be available.
|
Python3.6 or higher and the corresponding pip are assumed to be available.
|
||||||
|
|
||||||
### Linux - Ubuntu 16.04
|
=== "Ubuntu 16.04"
|
||||||
|
#### Install necessary dependencies
|
||||||
|
|
||||||
#### Install necessary dependencies
|
```bash
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install build-essential git
|
||||||
|
```
|
||||||
|
|
||||||
```bash
|
=== "RaspberryPi/Raspbian"
|
||||||
sudo apt-get update
|
The following assumes the latest [Raspbian Buster lite image](https://www.raspberrypi.org/downloads/raspbian/) from at least September 2019.
|
||||||
sudo apt-get install build-essential git
|
This image comes with python3.7 preinstalled, making it easy to get freqtrade up and running.
|
||||||
```
|
|
||||||
|
|
||||||
### Raspberry Pi / Raspbian
|
Tested using a Raspberry Pi 3 with the Raspbian Buster lite image, all updates applied.
|
||||||
|
|
||||||
The following assumes the latest [Raspbian Buster lite image](https://www.raspberrypi.org/downloads/raspbian/) from at least September 2019.
|
``` bash
|
||||||
This image comes with python3.7 preinstalled, making it easy to get freqtrade up and running.
|
sudo apt-get install python3-venv libatlas-base-dev
|
||||||
|
git clone https://github.com/freqtrade/freqtrade.git
|
||||||
|
cd freqtrade
|
||||||
|
|
||||||
Tested using a Raspberry Pi 3 with the Raspbian Buster lite image, all updates applied.
|
bash setup.sh -i
|
||||||
|
```
|
||||||
|
|
||||||
``` bash
|
!!! Note "Installation duration"
|
||||||
sudo apt-get install python3-venv libatlas-base-dev
|
Depending on your internet speed and the Raspberry Pi version, installation can take multiple hours to complete.
|
||||||
git clone https://github.com/freqtrade/freqtrade.git
|
|
||||||
cd freqtrade
|
|
||||||
|
|
||||||
bash setup.sh -i
|
!!! Note
|
||||||
```
|
The above does not install hyperopt dependencies. To install these, please use `python3 -m pip install -e .[hyperopt]`.
|
||||||
|
We do not advise to run hyperopt on a Raspberry Pi, since this is a very resource-heavy operation, which should be done on powerful machine.
|
||||||
!!! Note "Installation duration"
|
|
||||||
Depending on your internet speed and the Raspberry Pi version, installation can take multiple hours to complete.
|
|
||||||
|
|
||||||
!!! Note
|
|
||||||
The above does not install hyperopt dependencies. To install these, please use `python3 -m pip install -e .[hyperopt]`.
|
|
||||||
We do not advise to run hyperopt on a Raspberry Pi, since this is a very resource-heavy operation, which should be done on powerful machine.
|
|
||||||
|
|
||||||
### Common
|
### Common
|
||||||
|
|
||||||
@ -175,11 +174,6 @@ Clone the git repository:
|
|||||||
```bash
|
```bash
|
||||||
git clone https://github.com/freqtrade/freqtrade.git
|
git clone https://github.com/freqtrade/freqtrade.git
|
||||||
cd freqtrade
|
cd freqtrade
|
||||||
```
|
|
||||||
|
|
||||||
Optionally checkout the master branch to get the latest stable release:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git checkout master
|
git checkout master
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -218,72 +212,18 @@ On Linux, as an optional post-installation task, you may wish to setup the bot t
|
|||||||
|
|
||||||
------
|
------
|
||||||
|
|
||||||
## Using Conda
|
### Anaconda
|
||||||
|
|
||||||
Freqtrade can also be installed using Anaconda (or Miniconda).
|
Freqtrade can also be installed using Anaconda (or Miniconda).
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
This requires the [ta-lib](#1-install-ta-lib) C-library to be installed first. See below.
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
conda env create -f environment.yml
|
conda env create -f environment.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! Note
|
-----
|
||||||
This requires the [ta-lib](#1-install-ta-lib) C-library to be installed first.
|
|
||||||
|
|
||||||
## Windows
|
|
||||||
|
|
||||||
We recommend that Windows users use [Docker](docker.md) as this will work much easier and smoother (also more secure).
|
|
||||||
|
|
||||||
If that is not possible, try using the Windows Linux subsystem (WSL) - for which the Ubuntu instructions should work.
|
|
||||||
If that is not available on your system, feel free to try the instructions below, which led to success for some.
|
|
||||||
|
|
||||||
### Install freqtrade manually
|
|
||||||
|
|
||||||
!!! Note
|
|
||||||
Make sure to use 64bit Windows and 64bit Python to avoid problems with backtesting or hyperopt due to the memory constraints 32bit applications have under Windows.
|
|
||||||
|
|
||||||
!!! Hint
|
|
||||||
Using the [Anaconda Distribution](https://www.anaconda.com/distribution/) under Windows can greatly help with installation problems. Check out the [Conda section](#using-conda) in this document for more information.
|
|
||||||
|
|
||||||
#### Clone the git repository
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/freqtrade/freqtrade.git
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Install ta-lib
|
|
||||||
|
|
||||||
Install ta-lib according to the [ta-lib documentation](https://github.com/mrjbq7/ta-lib#windows).
|
|
||||||
|
|
||||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), there is also a repository of unofficial precompiled windows Wheels [here](https://www.lfd.uci.edu/~gohlke/pythonlibs/#ta-lib), which needs to be downloaded and installed using `pip install TA_Lib‑0.4.18‑cp38‑cp38‑win_amd64.whl` (make sure to use the version matching your python version)
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
>cd \path\freqtrade-develop
|
|
||||||
>python -m venv .env
|
|
||||||
>.env\Scripts\activate.bat
|
|
||||||
REM optionally install ta-lib from wheel
|
|
||||||
REM >pip install TA_Lib‑0.4.18‑cp38‑cp38‑win_amd64.whl
|
|
||||||
>pip install -r requirements.txt
|
|
||||||
>pip install -e .
|
|
||||||
>freqtrade
|
|
||||||
```
|
|
||||||
|
|
||||||
> Thanks [Owdr](https://github.com/Owdr) for the commands. Source: [Issue #222](https://github.com/freqtrade/freqtrade/issues/222)
|
|
||||||
|
|
||||||
#### Error during installation under Windows
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
error: Microsoft Visual C++ 14.0 is required. Get it with "Microsoft Visual C++ Build Tools": http://landinghub.visualstudio.com/visual-cpp-build-tools
|
|
||||||
```
|
|
||||||
|
|
||||||
Unfortunately, many packages requiring compilation don't provide a pre-build wheel. It is therefore mandatory to have a C/C++ compiler installed and available for your python environment to use.
|
|
||||||
|
|
||||||
The easiest way is to download install Microsoft Visual Studio Community [here](https://visualstudio.microsoft.com/downloads/) and make sure to install "Common Tools for Visual C++" to enable building c code on Windows. Unfortunately, this is a heavy download / dependency (~4Gb) so you might want to consider WSL or [docker](docker.md) first.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Now you have an environment ready, the next step is
|
|
||||||
[Bot Configuration](configuration.md).
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
### MacOS installation error
|
### MacOS installation error
|
||||||
@ -297,4 +237,9 @@ For MacOS 10.14, this can be accomplished with the below command.
|
|||||||
open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg
|
open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg
|
||||||
```
|
```
|
||||||
|
|
||||||
If this file is inexistant, then you're probably on a different version of MacOS, so you may need to consult the internet for specific resolution details.
|
If this file is inexistent, then you're probably on a different version of MacOS, so you may need to consult the internet for specific resolution details.
|
||||||
|
|
||||||
|
-----
|
||||||
|
|
||||||
|
Now you have an environment ready, the next step is
|
||||||
|
[Bot Configuration](configuration.md).
|
||||||
|
12
docs/javascripts/config.js
Normal file
12
docs/javascripts/config.js
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
window.MathJax = {
|
||||||
|
tex: {
|
||||||
|
inlineMath: [["\\(", "\\)"]],
|
||||||
|
displayMath: [["\\[", "\\]"]],
|
||||||
|
processEscapes: true,
|
||||||
|
processEnvironments: true
|
||||||
|
},
|
||||||
|
options: {
|
||||||
|
ignoreHtmlClass: ".*|",
|
||||||
|
processHtmlClass: "arithmatex"
|
||||||
|
}
|
||||||
|
};
|
@ -224,7 +224,8 @@ Possible options for the `freqtrade plot-profit` subcommand:
|
|||||||
|
|
||||||
```
|
```
|
||||||
usage: freqtrade plot-profit [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
usage: freqtrade plot-profit [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||||
[-d PATH] [--userdir PATH] [-p PAIRS [PAIRS ...]]
|
[-d PATH] [--userdir PATH] [-s NAME]
|
||||||
|
[--strategy-path PATH] [-p PAIRS [PAIRS ...]]
|
||||||
[--timerange TIMERANGE] [--export EXPORT]
|
[--timerange TIMERANGE] [--export EXPORT]
|
||||||
[--export-filename PATH] [--db-url PATH]
|
[--export-filename PATH] [--db-url PATH]
|
||||||
[--trade-source {DB,file}] [-i TIMEFRAME]
|
[--trade-source {DB,file}] [-i TIMEFRAME]
|
||||||
@ -270,6 +271,11 @@ Common arguments:
|
|||||||
--userdir PATH, --user-data-dir PATH
|
--userdir PATH, --user-data-dir PATH
|
||||||
Path to userdata directory.
|
Path to userdata directory.
|
||||||
|
|
||||||
|
Strategy arguments:
|
||||||
|
-s NAME, --strategy NAME
|
||||||
|
Specify strategy class name which will be used by the
|
||||||
|
bot.
|
||||||
|
--strategy-path PATH Specify additional strategy lookup path.
|
||||||
```
|
```
|
||||||
|
|
||||||
The `-p/--pairs` argument, can be used to limit the pairs that are considered for this calculation.
|
The `-p/--pairs` argument, can be used to limit the pairs that are considered for this calculation.
|
||||||
@ -279,7 +285,7 @@ Examples:
|
|||||||
Use custom backtest-export file
|
Use custom backtest-export file
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
freqtrade plot-profit -p LTC/BTC --export-filename user_data/backtest_results/backtest-result-Strategy005.json
|
freqtrade plot-profit -p LTC/BTC --export-filename user_data/backtest_results/backtest-result.json
|
||||||
```
|
```
|
||||||
|
|
||||||
Use custom database
|
Use custom database
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
mkdocs-material==5.5.3
|
mkdocs-material==5.5.12
|
||||||
mdx_truly_sane_lists==1.2
|
mdx_truly_sane_lists==1.2
|
||||||
|
@ -116,6 +116,7 @@ python3 scripts/rest_client.py --config rest_config.json <command> [optional par
|
|||||||
| `trades` | List last trades.
|
| `trades` | List last trades.
|
||||||
| `delete_trade <trade_id>` | Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
|
| `delete_trade <trade_id>` | Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
|
||||||
| `show_config` | Shows part of the current configuration with relevant settings to operation
|
| `show_config` | Shows part of the current configuration with relevant settings to operation
|
||||||
|
| `logs` | Shows last log messages
|
||||||
| `status` | Lists all open trades
|
| `status` | Lists all open trades
|
||||||
| `count` | Displays number of trades used and available
|
| `count` | Displays number of trades used and available
|
||||||
| `profit` | Display a summary of your profit/loss from close trades and some stats about your performance
|
| `profit` | Display a summary of your profit/loss from close trades and some stats about your performance
|
||||||
@ -138,78 +139,83 @@ python3 scripts/rest_client.py help
|
|||||||
|
|
||||||
``` output
|
``` output
|
||||||
Possible commands:
|
Possible commands:
|
||||||
|
|
||||||
balance
|
balance
|
||||||
Get the account balance
|
Get the account balance.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
blacklist
|
blacklist
|
||||||
Show the current blacklist
|
Show the current blacklist.
|
||||||
|
|
||||||
:param add: List of coins to add (example: "BNB/BTC")
|
:param add: List of coins to add (example: "BNB/BTC")
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
count
|
count
|
||||||
Returns the amount of open trades
|
Return the amount of open trades.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
daily
|
daily
|
||||||
Returns the amount of open trades
|
Return the amount of open trades.
|
||||||
:returns: json object
|
|
||||||
|
delete_trade
|
||||||
|
Delete trade from the database.
|
||||||
|
Tries to close open orders. Requires manual handling of this asset on the exchange.
|
||||||
|
|
||||||
|
:param trade_id: Deletes the trade with this ID from the database.
|
||||||
|
|
||||||
edge
|
edge
|
||||||
Returns information about edge
|
Return information about edge.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
forcebuy
|
forcebuy
|
||||||
Buy an asset
|
Buy an asset.
|
||||||
|
|
||||||
:param pair: Pair to buy (ETH/BTC)
|
:param pair: Pair to buy (ETH/BTC)
|
||||||
:param price: Optional - price to buy
|
:param price: Optional - price to buy
|
||||||
:returns: json object of the trade
|
|
||||||
|
|
||||||
forcesell
|
forcesell
|
||||||
Force-sell a trade
|
Force-sell a trade.
|
||||||
|
|
||||||
:param tradeid: Id of the trade (can be received via status command)
|
:param tradeid: Id of the trade (can be received via status command)
|
||||||
:returns: json object
|
|
||||||
|
logs
|
||||||
|
Show latest logs.
|
||||||
|
|
||||||
|
:param limit: Limits log messages to the last <limit> logs. No limit to get all the trades.
|
||||||
|
|
||||||
performance
|
performance
|
||||||
Returns the performance of the different coins
|
Return the performance of the different coins.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
profit
|
profit
|
||||||
Returns the profit summary
|
Return the profit summary.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
reload_config
|
reload_config
|
||||||
Reload configuration
|
Reload configuration.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
show_config
|
show_config
|
||||||
|
|
||||||
Returns part of the configuration, relevant for trading operations.
|
Returns part of the configuration, relevant for trading operations.
|
||||||
:return: json object containing the version
|
|
||||||
|
|
||||||
start
|
start
|
||||||
Start the bot if it's in stopped state.
|
Start the bot if it's in the stopped state.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
status
|
status
|
||||||
Get the status of open trades
|
Get the status of open trades.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
stop
|
stop
|
||||||
Stop the bot. Use start to restart
|
Stop the bot. Use `start` to restart.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
stopbuy
|
stopbuy
|
||||||
Stop buying (but handle sells gracefully).
|
Stop buying (but handle sells gracefully). Use `reload_config` to reset.
|
||||||
use reload_config to reset
|
|
||||||
:returns: json object
|
trades
|
||||||
|
Return trades history.
|
||||||
|
|
||||||
|
:param limit: Limits trades to the X last trades. No limit to get all the trades.
|
||||||
|
|
||||||
version
|
version
|
||||||
Returns the version of the bot
|
Return the version of the bot.
|
||||||
:returns: json object containing the version
|
|
||||||
|
|
||||||
whitelist
|
whitelist
|
||||||
Show the current whitelist
|
Show the current whitelist.
|
||||||
:returns: json object
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Advanced API usage using JWT tokens
|
## Advanced API usage using JWT tokens
|
||||||
|
@ -1,104 +1,59 @@
|
|||||||
# Sandbox API testing
|
# Sandbox API testing
|
||||||
|
|
||||||
Where an exchange provides a sandbox for risk-free integration, or end-to-end, testing CCXT provides access to these.
|
Some exchanges provide sandboxes or testbeds for risk-free testing, while running the bot against a real exchange.
|
||||||
|
With some configuration, freqtrade (in combination with ccxt) provides access to these.
|
||||||
|
|
||||||
This document is a *light overview of configuring Freqtrade and GDAX sandbox.
|
This document is an overview to configure Freqtrade to be used with sandboxes.
|
||||||
This can be useful to developers and trader alike as Freqtrade is quite customisable.
|
This can be useful to developers and trader alike.
|
||||||
|
|
||||||
When testing your API connectivity, make sure to use the following URLs.
|
## Exchanges known to have a sandbox / testnet
|
||||||
***Website**
|
|
||||||
https://public.sandbox.gdax.com
|
* [binance](https://testnet.binance.vision/)
|
||||||
***REST API**
|
* [coinbasepro](https://public.sandbox.pro.coinbase.com)
|
||||||
https://api-public.sandbox.gdax.com
|
* [gemini](https://exchange.sandbox.gemini.com/)
|
||||||
|
* [huobipro](https://www.testnet.huobi.pro/)
|
||||||
|
* [kucoin](https://sandbox.kucoin.com/)
|
||||||
|
* [phemex](https://testnet.phemex.com/)
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
We did not test correct functioning of all of the above testnets. Please report your experiences with each sandbox.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
# Configure a Sandbox account on Gdax
|
## Configure a Sandbox account
|
||||||
|
|
||||||
Aim of this document section
|
When testing your API connectivity, make sure to use the appropriate sandbox / testnet URL.
|
||||||
|
|
||||||
- An sanbox account
|
In general, you should follow these steps to enable an exchange's sandbox:
|
||||||
- create 2FA (needed to create an API)
|
|
||||||
- Add test 50BTC to account
|
|
||||||
- Create :
|
|
||||||
- - API-KEY
|
|
||||||
- - API-Secret
|
|
||||||
- - API Password
|
|
||||||
|
|
||||||
## Acccount
|
* Figure out if an exchange has a sandbox (most likely by using google or the exchange's support documents)
|
||||||
|
* Create a sandbox account (often the sandbox-account requires separate registration)
|
||||||
|
* [Add some test assets to account](#add-test-funds)
|
||||||
|
* Create API keys
|
||||||
|
|
||||||
This link will redirect to the sandbox main page to login / create account dialogues:
|
### Add test funds
|
||||||
https://public.sandbox.pro.coinbase.com/orders/
|
|
||||||
|
|
||||||
After registration and Email confimation you wil be redirected into your sanbox account. It is easy to verify you're in sandbox by checking the URL bar.
|
Usually, sandbox exchanges allow depositing funds directly via web-interface.
|
||||||
> https://public.sandbox.pro.coinbase.com/
|
You should make sure to have a realistic amount of funds available to your test-account, so results are representable of your real account funds.
|
||||||
|
|
||||||
## Enable 2Fa (a prerequisite to creating sandbox API Keys)
|
!!! Warning
|
||||||
|
Test exchanges will **NEVER** require your real credit card or banking details!
|
||||||
|
|
||||||
From within sand box site select your profile, top right.
|
## Configure freqtrade to use a exchange's sandbox
|
||||||
>Or as a direct link: https://public.sandbox.pro.coinbase.com/profile
|
|
||||||
|
|
||||||
From the menu panel to the left of the screen select
|
### Sandbox URLs
|
||||||
|
|
||||||
> Security: "*View or Update*"
|
|
||||||
|
|
||||||
In the new site select "enable authenticator" as typical google Authenticator.
|
|
||||||
|
|
||||||
- open Google Authenticator on your phone
|
|
||||||
- scan barcode
|
|
||||||
- enter your generated 2fa
|
|
||||||
|
|
||||||
## Enable API Access
|
|
||||||
|
|
||||||
From within sandbox select profile>api>create api-keys
|
|
||||||
>or as a direct link: https://public.sandbox.pro.coinbase.com/profile/api
|
|
||||||
|
|
||||||
Click on "create one" and ensure **view** and **trade** are "checked" and sumbit your 2FA
|
|
||||||
|
|
||||||
- **Copy and paste the Passphase** into a notepade this will be needed later
|
|
||||||
- **Copy and paste the API Secret** popup into a notepad this will needed later
|
|
||||||
- **Copy and paste the API Key** into a notepad this will needed later
|
|
||||||
|
|
||||||
## Add 50 BTC test funds
|
|
||||||
|
|
||||||
To add funds, use the web interface deposit and withdraw buttons.
|
|
||||||
|
|
||||||
To begin select 'Wallets' from the top menu.
|
|
||||||
> Or as a direct link: https://public.sandbox.pro.coinbase.com/wallets
|
|
||||||
|
|
||||||
- Deposits (bottom left of screen)
|
|
||||||
- - Deposit Funds Bitcoin
|
|
||||||
- - - Coinbase BTC Wallet
|
|
||||||
- - - - Max (50 BTC)
|
|
||||||
- - - - - Deposit
|
|
||||||
|
|
||||||
*This process may be repeated for other currencies, ETH as example*
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Configure Freqtrade to use Gax Sandbox
|
|
||||||
|
|
||||||
The aim of this document section
|
|
||||||
|
|
||||||
- Enable sandbox URLs in Freqtrade
|
|
||||||
- Configure API
|
|
||||||
- - secret
|
|
||||||
- - key
|
|
||||||
- - passphrase
|
|
||||||
|
|
||||||
## Sandbox URLs
|
|
||||||
|
|
||||||
Freqtrade makes use of CCXT which in turn provides a list of URLs to Freqtrade.
|
Freqtrade makes use of CCXT which in turn provides a list of URLs to Freqtrade.
|
||||||
These include `['test']` and `['api']`.
|
These include `['test']` and `['api']`.
|
||||||
|
|
||||||
- `[Test]` if available will point to an Exchanges sandbox.
|
* `[Test]` if available will point to an Exchanges sandbox.
|
||||||
- `[Api]` normally used, and resolves to live API target on the exchange
|
* `[Api]` normally used, and resolves to live API target on the exchange.
|
||||||
|
|
||||||
To make use of sandbox / test add "sandbox": true, to your config.json
|
To make use of sandbox / test add "sandbox": true, to your config.json
|
||||||
|
|
||||||
```json
|
```json
|
||||||
"exchange": {
|
"exchange": {
|
||||||
"name": "gdax",
|
"name": "coinbasepro",
|
||||||
"sandbox": true,
|
"sandbox": true,
|
||||||
"key": "5wowfxemogxeowo;heiohgmd",
|
"key": "5wowfxemogxeowo;heiohgmd",
|
||||||
"secret": "/ZMH1P62rCVmwefewrgcewX8nh4gob+lywxfwfxwwfxwfNsH1ySgvWCUR/w==",
|
"secret": "/ZMH1P62rCVmwefewrgcewX8nh4gob+lywxfwfxwwfxwfNsH1ySgvWCUR/w==",
|
||||||
@ -106,36 +61,57 @@ To make use of sandbox / test add "sandbox": true, to your config.json
|
|||||||
"outdated_offset": 5
|
"outdated_offset": 5
|
||||||
"pair_whitelist": [
|
"pair_whitelist": [
|
||||||
"BTC/USD"
|
"BTC/USD"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"datadir": "user_data/data/coinbasepro_sandbox"
|
||||||
```
|
```
|
||||||
|
|
||||||
Also insert your
|
Also the following information:
|
||||||
|
|
||||||
- api-key (noted earlier)
|
* api-key (created for the sandbox webpage)
|
||||||
- api-secret (noted earlier)
|
* api-secret (noted earlier)
|
||||||
- password (the passphrase - noted earlier)
|
* password (the passphrase - noted earlier)
|
||||||
|
|
||||||
|
!!! Tip "Different data directory"
|
||||||
|
We also recommend to set `datadir` to something identifying downloaded data as sandbox data, to avoid having sandbox data mixed with data from the real exchange.
|
||||||
|
This can be done by adding the `"datadir"` key to the configuration.
|
||||||
|
Now, whenever you use this configuration, your data directory will be set to this directory.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## You should now be ready to test your sandbox
|
## You should now be ready to test your sandbox
|
||||||
|
|
||||||
Ensure Freqtrade logs show the sandbox URL, and trades made are shown in sandbox.
|
Ensure Freqtrade logs show the sandbox URL, and trades made are shown in sandbox. Also make sure to select a pair which shows at least some decent value (which very often is BTC/<somestablecoin>).
|
||||||
** Typically the BTC/USD has the most activity in sandbox to test against.
|
|
||||||
|
|
||||||
## GDAX - Old Candles problem
|
## Common problems with sandbox exchanges
|
||||||
|
|
||||||
It is my experience that GDAX sandbox candles may be 20+- minutes out of date. This can cause trades to fail as one of Freqtrades safety checks.
|
Sandbox exchange instances often have very low volume, which can cause some problems which usually are not seen on a real exchange instance.
|
||||||
|
|
||||||
To disable this check, add / change the `"outdated_offset"` parameter in the exchange section of your configuration to adjust for this delay.
|
### Old Candles problem
|
||||||
Example based on the above configuration:
|
|
||||||
|
|
||||||
```json
|
Since Sandboxes often have low volume, candles can be quite old and show no volume.
|
||||||
"exchange": {
|
To disable the error "Outdated history for pair ...", best increase the parameter `"outdated_offset"` to a number that seems realistic for the sandbox you're using.
|
||||||
"name": "gdax",
|
|
||||||
"sandbox": true,
|
### Unfilled orders
|
||||||
"key": "5wowfxemogxeowo;heiohgmd",
|
|
||||||
"secret": "/ZMH1P62rCVmwefewrgcewX8nh4gob+lywxfwfxwwfxwfNsH1ySgvWCUR/w==",
|
Sandboxes often have very low volumes - which means that many trades can go unfilled, or can go unfilled for a very long time.
|
||||||
"password": "1bkjfkhfhfu6sr",
|
|
||||||
"outdated_offset": 30
|
To mitigate this, you can try to match the first order on the opposite orderbook side using the following configuration:
|
||||||
"pair_whitelist": [
|
|
||||||
"BTC/USD"
|
``` jsonc
|
||||||
```
|
"order_types": {
|
||||||
|
"buy": "limit",
|
||||||
|
"sell": "limit"
|
||||||
|
// ...
|
||||||
|
},
|
||||||
|
"bid_strategy": {
|
||||||
|
"price_side": "ask",
|
||||||
|
// ...
|
||||||
|
},
|
||||||
|
"ask_strategy":{
|
||||||
|
"price_side": "bid",
|
||||||
|
// ...
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
The configuration is similar to the suggested configuration for market orders - however by using limit-orders you can avoid moving the price too much, and you can set the worst price you might get.
|
||||||
|
@ -46,7 +46,7 @@ sqlite3
|
|||||||
### Trade table structure
|
### Trade table structure
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
CREATE TABLE trades
|
CREATE TABLE trades(
|
||||||
id INTEGER NOT NULL,
|
id INTEGER NOT NULL,
|
||||||
exchange VARCHAR NOT NULL,
|
exchange VARCHAR NOT NULL,
|
||||||
pair VARCHAR NOT NULL,
|
pair VARCHAR NOT NULL,
|
||||||
@ -110,7 +110,7 @@ SET is_open=0,
|
|||||||
close_date=<close_date>,
|
close_date=<close_date>,
|
||||||
close_rate=<close_rate>,
|
close_rate=<close_rate>,
|
||||||
close_profit = close_rate / open_rate - 1,
|
close_profit = close_rate / open_rate - 1,
|
||||||
close_profit_abs = (amount * <close_rate> * (1 - fee_close) - (amount * (open_rate * 1 - fee_open))),
|
close_profit_abs = (amount * <close_rate> * (1 - fee_close) - (amount * (open_rate * (1 - fee_open)))),
|
||||||
sell_reason=<sell_reason>
|
sell_reason=<sell_reason>
|
||||||
WHERE id=<trade_ID_to_update>;
|
WHERE id=<trade_ID_to_update>;
|
||||||
```
|
```
|
||||||
@ -123,7 +123,7 @@ SET is_open=0,
|
|||||||
close_date='2020-06-20 03:08:45.103418',
|
close_date='2020-06-20 03:08:45.103418',
|
||||||
close_rate=0.19638016,
|
close_rate=0.19638016,
|
||||||
close_profit=0.0496,
|
close_profit=0.0496,
|
||||||
close_profit_abs = (amount * 0.19638016 * (1 - fee_close) - (amount * open_rate * (1 - fee_open))),
|
close_profit_abs = (amount * 0.19638016 * (1 - fee_close) - (amount * (open_rate * (1 - fee_open)))),
|
||||||
sell_reason='force_sell'
|
sell_reason='force_sell'
|
||||||
WHERE id=31;
|
WHERE id=31;
|
||||||
```
|
```
|
||||||
|
151
docs/stoploss.md
151
docs/stoploss.md
@ -6,7 +6,63 @@ For example, value `-0.10` will cause immediate sell if the profit dips below -1
|
|||||||
Most of the strategy files already include the optimal `stoploss` value.
|
Most of the strategy files already include the optimal `stoploss` value.
|
||||||
|
|
||||||
!!! Info
|
!!! Info
|
||||||
All stoploss properties mentioned in this file can be set in the Strategy, or in the configuration. Configuration values will override the strategy values.
|
All stoploss properties mentioned in this file can be set in the Strategy, or in the configuration.
|
||||||
|
<ins>Configuration values will override the strategy values.</ins>
|
||||||
|
|
||||||
|
## Stop Loss On-Exchange/Freqtrade
|
||||||
|
|
||||||
|
Those stoploss modes can be *on exchange* or *off exchange*.
|
||||||
|
|
||||||
|
These modes can be configured with these values:
|
||||||
|
|
||||||
|
``` python
|
||||||
|
'emergencysell': 'market',
|
||||||
|
'stoploss_on_exchange': False
|
||||||
|
'stoploss_on_exchange_interval': 60,
|
||||||
|
'stoploss_on_exchange_limit_ratio': 0.99
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
Stoploss on exchange is only supported for Binance (stop-loss-limit), Kraken (stop-loss-market) and FTX (stop limit and stop-market) as of now.
|
||||||
|
<ins>Do not set too low stoploss value if using stop loss on exchange!</ins>
|
||||||
|
If set to low/tight then you have greater risk of missing fill on the order and stoploss will not work
|
||||||
|
|
||||||
|
### stoploss_on_exchange and stoploss_on_exchange_limit_ratio
|
||||||
|
Enable or Disable stop loss on exchange.
|
||||||
|
If the stoploss is *on exchange* it means a stoploss limit order is placed on the exchange immediately after buy order happens successfully. This will protect you against sudden crashes in market as the order will be in the queue immediately and if market goes down then the order has more chance of being fulfilled.
|
||||||
|
|
||||||
|
If `stoploss_on_exchange` uses limit orders, the exchange needs 2 prices, the stoploss_price and the Limit price.
|
||||||
|
`stoploss` defines the stop-price where the limit order is placed - and limit should be slightly below this.
|
||||||
|
If an exchange supports both limit and market stoploss orders, then the value of `stoploss` will be used to determine the stoploss type.
|
||||||
|
|
||||||
|
Calculation example: we bought the asset at 100$.
|
||||||
|
Stop-price is 95$, then limit would be `95 * 0.99 = 94.05$` - so the limit order fill can happen between 95$ and 94.05$.
|
||||||
|
|
||||||
|
For example, assuming the stoploss is on exchange, and trailing stoploss is enabled, and the market is going up, then the bot automatically cancels the previous stoploss order and puts a new one with a stop value higher than the previous stoploss order.
|
||||||
|
|
||||||
|
### stoploss_on_exchange_interval
|
||||||
|
In case of stoploss on exchange there is another parameter called `stoploss_on_exchange_interval`. This configures the interval in seconds at which the bot will check the stoploss and update it if necessary.
|
||||||
|
The bot cannot do these every 5 seconds (at each iteration), otherwise it would get banned by the exchange.
|
||||||
|
So this parameter will tell the bot how often it should update the stoploss order. The default value is 60 (1 minute).
|
||||||
|
This same logic will reapply a stoploss order on the exchange should you cancel it accidentally.
|
||||||
|
|
||||||
|
### emergencysell
|
||||||
|
`emergencysell` is an optional value, which defaults to `market` and is used when creating stop loss on exchange orders fails.
|
||||||
|
The below is the default which is used if not changed in strategy or configuration file.
|
||||||
|
|
||||||
|
Example from strategy file:
|
||||||
|
|
||||||
|
``` python
|
||||||
|
order_types = {
|
||||||
|
'buy': 'limit',
|
||||||
|
'sell': 'limit',
|
||||||
|
'emergencysell': 'market',
|
||||||
|
'stoploss': 'market',
|
||||||
|
'stoploss_on_exchange': True,
|
||||||
|
'stoploss_on_exchange_interval': 60,
|
||||||
|
'stoploss_on_exchange_limit_ratio': 0.99
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Stop Loss Types
|
## Stop Loss Types
|
||||||
|
|
||||||
@ -17,29 +73,29 @@ At this stage the bot contains the following stoploss support modes:
|
|||||||
3. Trailing stop loss, custom positive loss.
|
3. Trailing stop loss, custom positive loss.
|
||||||
4. Trailing stop loss only once the trade has reached a certain offset.
|
4. Trailing stop loss only once the trade has reached a certain offset.
|
||||||
|
|
||||||
Those stoploss modes can be *on exchange* or *off exchange*. If the stoploss is *on exchange* it means a stoploss limit order is placed on the exchange immediately after buy order happens successfully. This will protect you against sudden crashes in market as the order will be in the queue immediately and if market goes down then the order has more chance of being fulfilled.
|
### Static Stop Loss
|
||||||
|
|
||||||
In case of stoploss on exchange there is another parameter called `stoploss_on_exchange_interval`. This configures the interval in seconds at which the bot will check the stoploss and update it if necessary.
|
|
||||||
|
|
||||||
For example, assuming the stoploss is on exchange, and trailing stoploss is enabled, and the market is going up, then the bot automatically cancels the previous stoploss order and puts a new one with a stop value higher than the previous stoploss order.
|
|
||||||
The bot cannot do this every 5 seconds (at each iteration), otherwise it would get banned by the exchange.
|
|
||||||
So this parameter will tell the bot how often it should update the stoploss order. The default value is 60 (1 minute).
|
|
||||||
This same logic will reapply a stoploss order on the exchange should you cancel it accidentally.
|
|
||||||
|
|
||||||
!!! Note
|
|
||||||
Stoploss on exchange is only supported for Binance (stop-loss-limit), Kraken (stop-loss-market) and FTX (stop limit and stop-market) as of now.
|
|
||||||
|
|
||||||
## Static Stop Loss
|
|
||||||
|
|
||||||
This is very simple, you define a stop loss of x (as a ratio of price, i.e. x * 100% of price). This will try to sell the asset once the loss exceeds the defined loss.
|
This is very simple, you define a stop loss of x (as a ratio of price, i.e. x * 100% of price). This will try to sell the asset once the loss exceeds the defined loss.
|
||||||
|
|
||||||
## Trailing Stop Loss
|
Example of stop loss:
|
||||||
|
|
||||||
|
``` python
|
||||||
|
stoploss = -0.10
|
||||||
|
```
|
||||||
|
|
||||||
|
For example, simplified math:
|
||||||
|
* the bot buys an asset at a price of 100$
|
||||||
|
* the stop loss is defined at -10%
|
||||||
|
* the stop loss would get triggered once the asset drops below 90$
|
||||||
|
|
||||||
|
### Trailing Stop Loss
|
||||||
|
|
||||||
The initial value for this is `stoploss`, just as you would define your static Stop loss.
|
The initial value for this is `stoploss`, just as you would define your static Stop loss.
|
||||||
To enable trailing stoploss:
|
To enable trailing stoploss:
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
trailing_stop = True
|
stoploss = -0.10
|
||||||
|
trailing_stop = True
|
||||||
```
|
```
|
||||||
|
|
||||||
This will now activate an algorithm, which automatically moves the stop loss up every time the price of your asset increases.
|
This will now activate an algorithm, which automatically moves the stop loss up every time the price of your asset increases.
|
||||||
@ -47,35 +103,43 @@ This will now activate an algorithm, which automatically moves the stop loss up
|
|||||||
For example, simplified math:
|
For example, simplified math:
|
||||||
|
|
||||||
* the bot buys an asset at a price of 100$
|
* the bot buys an asset at a price of 100$
|
||||||
* the stop loss is defined at 2%
|
* the stop loss is defined at -10%
|
||||||
* the stop loss would get triggered once the asset dropps below 98$
|
* the stop loss would get triggered once the asset drops below 90$
|
||||||
* assuming the asset now increases to 102$
|
* assuming the asset now increases to 102$
|
||||||
* the stop loss will now be 2% of 102$ or 99.96$
|
* the stop loss will now be -10% of 102$ = 91.8$
|
||||||
* now the asset drops in value to 101$, the stop loss will still be 99.96$ and would trigger at 99.96$.
|
* now the asset drops in value to 101$, the stop loss will still be 91.8$ and would trigger at 91.8$.
|
||||||
|
|
||||||
In summary: The stoploss will be adjusted to be always be 2% of the highest observed price.
|
In summary: The stoploss will be adjusted to be always be -10% of the highest observed price.
|
||||||
|
|
||||||
### Custom positive stoploss
|
### Trailing stop loss, custom positive loss
|
||||||
|
|
||||||
It is also possible to have a default stop loss, when you are in the red with your buy, but once your profit surpasses a certain percentage, the system will utilize a new stop loss, which can have a different value.
|
It is also possible to have a default stop loss, when you are in the red with your buy (buy - fee), but once you hit positive result the system will utilize a new stop loss, which can have a different value.
|
||||||
For example your default stop loss is 5%, but once you have 1.1% profit, it will be changed to be only a 1% stop loss, which trails the green candles until it goes below them.
|
For example, your default stop loss is -10%, but once you have more than 0% profit (example 0.1%) a different trailing stoploss will be used.
|
||||||
|
|
||||||
Both values require `trailing_stop` to be set to true.
|
!!! Note
|
||||||
|
If you want the stoploss to only be changed when you break even of making a profit (what most users want) please refer to next section with [offset enabled](#Trailing-stop-loss-only-once-the-trade-has-reached-a-certain-offset).
|
||||||
|
|
||||||
|
Both values require `trailing_stop` to be set to true and `trailing_stop_positive` with a value.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
trailing_stop_positive = 0.01
|
stoploss = -0.10
|
||||||
trailing_stop_positive_offset = 0.011
|
trailing_stop = True
|
||||||
|
trailing_stop_positive = 0.02
|
||||||
```
|
```
|
||||||
|
|
||||||
The 0.01 would translate to a 1% stop loss, once you hit 1.1% profit.
|
For example, simplified math:
|
||||||
|
|
||||||
|
* the bot buys an asset at a price of 100$
|
||||||
|
* the stop loss is defined at -10%
|
||||||
|
* the stop loss would get triggered once the asset drops below 90$
|
||||||
|
* assuming the asset now increases to 102$
|
||||||
|
* the stop loss will now be -2% of 102$ = 99.96$ (99.96$ stop loss will be locked in and will follow asset price increasements with -2%)
|
||||||
|
* now the asset drops in value to 101$, the stop loss will still be 99.96$ and would trigger at 99.96$
|
||||||
|
|
||||||
|
The 0.02 would translate to a -2% stop loss.
|
||||||
Before this, `stoploss` is used for the trailing stoploss.
|
Before this, `stoploss` is used for the trailing stoploss.
|
||||||
|
|
||||||
Read the [next section](#trailing-only-once-offset-is-reached) to keep stoploss at 5% of the entry point.
|
### Trailing stop loss only once the trade has reached a certain offset
|
||||||
|
|
||||||
!!! Tip
|
|
||||||
Make sure to have this value (`trailing_stop_positive_offset`) lower than minimal ROI, otherwise minimal ROI will apply first and sell the trade.
|
|
||||||
|
|
||||||
### Trailing only once offset is reached
|
|
||||||
|
|
||||||
It is also possible to use a static stoploss until the offset is reached, and then trail the trade to take profits once the market turns.
|
It is also possible to use a static stoploss until the offset is reached, and then trail the trade to take profits once the market turns.
|
||||||
|
|
||||||
@ -87,17 +151,28 @@ This option can be used with or without `trailing_stop_positive`, but uses `trai
|
|||||||
trailing_only_offset_is_reached = True
|
trailing_only_offset_is_reached = True
|
||||||
```
|
```
|
||||||
|
|
||||||
Simplified example:
|
Configuration (offset is buyprice + 3%):
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
stoploss = 0.05
|
stoploss = -0.10
|
||||||
|
trailing_stop = True
|
||||||
|
trailing_stop_positive = 0.02
|
||||||
trailing_stop_positive_offset = 0.03
|
trailing_stop_positive_offset = 0.03
|
||||||
trailing_only_offset_is_reached = True
|
trailing_only_offset_is_reached = True
|
||||||
```
|
```
|
||||||
|
|
||||||
|
For example, simplified math:
|
||||||
|
|
||||||
* the bot buys an asset at a price of 100$
|
* the bot buys an asset at a price of 100$
|
||||||
* the stop loss is defined at 5%
|
* the stop loss is defined at -10%
|
||||||
* the stop loss will remain at 95% until profit reaches +3%
|
* the stop loss would get triggered once the asset drops below 90$
|
||||||
|
* stoploss will remain at 90$ unless asset increases to or above our configured offset
|
||||||
|
* assuming the asset now increases to 103$ (where we have the offset configured)
|
||||||
|
* the stop loss will now be -2% of 103$ = 100.94$
|
||||||
|
* now the asset drops in value to 101$, the stop loss will still be 100.94$ and would trigger at 100.94$
|
||||||
|
|
||||||
|
!!! Tip
|
||||||
|
Make sure to have this value (`trailing_stop_positive_offset`) lower than minimal ROI, otherwise minimal ROI will apply first and sell the trade.
|
||||||
|
|
||||||
## Changing stoploss on open trades
|
## Changing stoploss on open trades
|
||||||
|
|
||||||
|
@ -199,3 +199,24 @@ class Awesomestrategy(IStrategy):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Derived strategies
|
||||||
|
|
||||||
|
The strategies can be derived from other strategies. This avoids duplication of your custom strategy code. You can use this technique to override small parts of your main strategy, leaving the rest untouched:
|
||||||
|
|
||||||
|
``` python
|
||||||
|
class MyAwesomeStrategy(IStrategy):
|
||||||
|
...
|
||||||
|
stoploss = 0.13
|
||||||
|
trailing_stop = False
|
||||||
|
# All other attributes and methods are here as they
|
||||||
|
# should be in any custom strategy...
|
||||||
|
...
|
||||||
|
|
||||||
|
class MyAwesomeStrategy2(MyAwesomeStrategy):
|
||||||
|
# Override something
|
||||||
|
stoploss = 0.08
|
||||||
|
trailing_stop = True
|
||||||
|
```
|
||||||
|
|
||||||
|
Both attributes and methods may be overriden, altering behavior of the original strategy in a way you need.
|
||||||
|
@ -58,12 +58,12 @@ file as reference.**
|
|||||||
|
|
||||||
!!! Note "Strategies and Backtesting"
|
!!! Note "Strategies and Backtesting"
|
||||||
To avoid problems and unexpected differences between Backtesting and dry/live modes, please be aware
|
To avoid problems and unexpected differences between Backtesting and dry/live modes, please be aware
|
||||||
that during backtesting the full time-interval is passed to the `populate_*()` methods at once.
|
that during backtesting the full time range is passed to the `populate_*()` methods at once.
|
||||||
It is therefore best to use vectorized operations (across the whole dataframe, not loops) and
|
It is therefore best to use vectorized operations (across the whole dataframe, not loops) and
|
||||||
avoid index referencing (`df.iloc[-1]`), but instead use `df.shift()` to get to the previous candle.
|
avoid index referencing (`df.iloc[-1]`), but instead use `df.shift()` to get to the previous candle.
|
||||||
|
|
||||||
!!! Warning "Warning: Using future data"
|
!!! Warning "Warning: Using future data"
|
||||||
Since backtesting passes the full time interval to the `populate_*()` methods, the strategy author
|
Since backtesting passes the full time range to the `populate_*()` methods, the strategy author
|
||||||
needs to take care to avoid having the strategy utilize data from the future.
|
needs to take care to avoid having the strategy utilize data from the future.
|
||||||
Some common patterns for this are listed in the [Common Mistakes](#common-mistakes-when-developing-strategies) section of this document.
|
Some common patterns for this are listed in the [Common Mistakes](#common-mistakes-when-developing-strategies) section of this document.
|
||||||
|
|
||||||
@ -251,7 +251,7 @@ minimal_roi = {
|
|||||||
While technically not completely disabled, this would sell once the trade reaches 10000% Profit.
|
While technically not completely disabled, this would sell once the trade reaches 10000% Profit.
|
||||||
|
|
||||||
To use times based on candle duration (timeframe), the following snippet can be handy.
|
To use times based on candle duration (timeframe), the following snippet can be handy.
|
||||||
This will allow you to change the ticket_interval for the strategy, and ROI times will still be set as candles (e.g. after 3 candles ...)
|
This will allow you to change the timeframe for the strategy, and ROI times will still be set as candles (e.g. after 3 candles ...)
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from freqtrade.exchange import timeframe_to_minutes
|
from freqtrade.exchange import timeframe_to_minutes
|
||||||
@ -285,7 +285,7 @@ If your exchange supports it, it's recommended to also set `"stoploss_on_exchang
|
|||||||
|
|
||||||
For more information on order_types please look [here](configuration.md#understand-order_types).
|
For more information on order_types please look [here](configuration.md#understand-order_types).
|
||||||
|
|
||||||
### Timeframe (ticker interval)
|
### Timeframe (formerly ticker interval)
|
||||||
|
|
||||||
This is the set of candles the bot should download and use for the analysis.
|
This is the set of candles the bot should download and use for the analysis.
|
||||||
Common values are `"1m"`, `"5m"`, `"15m"`, `"1h"`, however all values supported by your exchange should work.
|
Common values are `"1m"`, `"5m"`, `"15m"`, `"1h"`, however all values supported by your exchange should work.
|
||||||
@ -328,15 +328,15 @@ class Awesomestrategy(IStrategy):
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### Additional data (informative_pairs)
|
## Additional data (informative_pairs)
|
||||||
|
|
||||||
#### Get data for non-tradeable pairs
|
### Get data for non-tradeable pairs
|
||||||
|
|
||||||
Data for additional, informative pairs (reference pairs) can be beneficial for some strategies.
|
Data for additional, informative pairs (reference pairs) can be beneficial for some strategies.
|
||||||
Ohlcv data for these pairs will be downloaded as part of the regular whitelist refresh process and is available via `DataProvider` just as other pairs (see below).
|
OHLCV data for these pairs will be downloaded as part of the regular whitelist refresh process and is available via `DataProvider` just as other pairs (see below).
|
||||||
These parts will **not** be traded unless they are also specified in the pair whitelist, or have been selected by Dynamic Whitelisting.
|
These parts will **not** be traded unless they are also specified in the pair whitelist, or have been selected by Dynamic Whitelisting.
|
||||||
|
|
||||||
The pairs need to be specified as tuples in the format `("pair", "interval")`, with pair as the first and time interval as the second argument.
|
The pairs need to be specified as tuples in the format `("pair", "timeframe")`, with pair as the first and timeframe as the second argument.
|
||||||
|
|
||||||
Sample:
|
Sample:
|
||||||
|
|
||||||
@ -347,15 +347,17 @@ def informative_pairs(self):
|
|||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
A full sample can be found [in the DataProvider section](#complete-data-provider-sample).
|
||||||
|
|
||||||
!!! Warning
|
!!! Warning
|
||||||
As these pairs will be refreshed as part of the regular whitelist refresh, it's best to keep this list short.
|
As these pairs will be refreshed as part of the regular whitelist refresh, it's best to keep this list short.
|
||||||
All intervals and all pairs can be specified as long as they are available (and active) on the used exchange.
|
All timeframes and all pairs can be specified as long as they are available (and active) on the used exchange.
|
||||||
It is however better to use resampling to longer time-intervals when possible
|
It is however better to use resampling to longer timeframes whenever possible
|
||||||
to avoid hammering the exchange with too many requests and risk being blocked.
|
to avoid hammering the exchange with too many requests and risk being blocked.
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### Additional data (DataProvider)
|
## Additional data (DataProvider)
|
||||||
|
|
||||||
The strategy provides access to the `DataProvider`. This allows you to get additional data to use in your strategy.
|
The strategy provides access to the `DataProvider`. This allows you to get additional data to use in your strategy.
|
||||||
|
|
||||||
@ -363,10 +365,14 @@ All methods return `None` in case of failure (do not raise an exception).
|
|||||||
|
|
||||||
Please always check the mode of operation to select the correct method to get data (samples see below).
|
Please always check the mode of operation to select the correct method to get data (samples see below).
|
||||||
|
|
||||||
#### Possible options for DataProvider
|
!!! Warning "Hyperopt"
|
||||||
|
Dataprovider is available during hyperopt, however it can only be used in `populate_indicators()` within a strategy.
|
||||||
|
It is not available in `populate_buy()` and `populate_sell()` methods, nor in `populate_indicators()`, if this method located in the hyperopt file.
|
||||||
|
|
||||||
- [`available_pairs`](#available_pairs) - Property with tuples listing cached pairs with their intervals (pair, interval).
|
### Possible options for DataProvider
|
||||||
- [`current_whitelist()`](#current_whitelist) - Returns a current list of whitelisted pairs. Useful for accessing dynamic whitelists (ie. VolumePairlist)
|
|
||||||
|
- [`available_pairs`](#available_pairs) - Property with tuples listing cached pairs with their timeframe (pair, timeframe).
|
||||||
|
- [`current_whitelist()`](#current_whitelist) - Returns a current list of whitelisted pairs. Useful for accessing dynamic whitelists (i.e. VolumePairlist)
|
||||||
- [`get_pair_dataframe(pair, timeframe)`](#get_pair_dataframepair-timeframe) - This is a universal method, which returns either historical data (for backtesting) or cached live data (for the Dry-Run and Live-Run modes).
|
- [`get_pair_dataframe(pair, timeframe)`](#get_pair_dataframepair-timeframe) - This is a universal method, which returns either historical data (for backtesting) or cached live data (for the Dry-Run and Live-Run modes).
|
||||||
- [`get_analyzed_dataframe(pair, timeframe)`](#get_analyzed_dataframepair-timeframe) - Returns the analyzed dataframe (after calling `populate_indicators()`, `populate_buy()`, `populate_sell()`) and the time of the latest analysis.
|
- [`get_analyzed_dataframe(pair, timeframe)`](#get_analyzed_dataframepair-timeframe) - Returns the analyzed dataframe (after calling `populate_indicators()`, `populate_buy()`, `populate_sell()`) and the time of the latest analysis.
|
||||||
- `historic_ohlcv(pair, timeframe)` - Returns historical data stored on disk.
|
- `historic_ohlcv(pair, timeframe)` - Returns historical data stored on disk.
|
||||||
@ -376,9 +382,9 @@ Please always check the mode of operation to select the correct method to get da
|
|||||||
- [`ticker(pair)`](#tickerpair) - Returns current ticker data for the pair. See [ccxt documentation](https://github.com/ccxt/ccxt/wiki/Manual#price-tickers) for more details on the Ticker data structure.
|
- [`ticker(pair)`](#tickerpair) - Returns current ticker data for the pair. See [ccxt documentation](https://github.com/ccxt/ccxt/wiki/Manual#price-tickers) for more details on the Ticker data structure.
|
||||||
- `runmode` - Property containing the current runmode.
|
- `runmode` - Property containing the current runmode.
|
||||||
|
|
||||||
#### Example Usages:
|
### Example Usages
|
||||||
|
|
||||||
#### *available_pairs*
|
### *available_pairs*
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
if self.dp:
|
if self.dp:
|
||||||
@ -386,7 +392,7 @@ if self.dp:
|
|||||||
print(f"available {pair}, {timeframe}")
|
print(f"available {pair}, {timeframe}")
|
||||||
```
|
```
|
||||||
|
|
||||||
#### *current_whitelist()*
|
### *current_whitelist()*
|
||||||
|
|
||||||
Imagine you've developed a strategy that trades the `5m` timeframe using signals generated from a `1d` timeframe on the top 10 volume pairs by volume.
|
Imagine you've developed a strategy that trades the `5m` timeframe using signals generated from a `1d` timeframe on the top 10 volume pairs by volume.
|
||||||
|
|
||||||
@ -401,6 +407,85 @@ Since we can't resample our data we will have to use an informative pair; and si
|
|||||||
This is where calling `self.dp.current_whitelist()` comes in handy.
|
This is where calling `self.dp.current_whitelist()` comes in handy.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
def informative_pairs(self):
|
||||||
|
|
||||||
|
# get access to all pairs available in whitelist.
|
||||||
|
pairs = self.dp.current_whitelist()
|
||||||
|
# Assign tf to each pair so they can be downloaded and cached for strategy.
|
||||||
|
informative_pairs = [(pair, '1d') for pair in pairs]
|
||||||
|
return informative_pairs
|
||||||
|
```
|
||||||
|
|
||||||
|
### *get_pair_dataframe(pair, timeframe)*
|
||||||
|
|
||||||
|
``` python
|
||||||
|
# fetch live / historical candle (OHLCV) data for the first informative pair
|
||||||
|
if self.dp:
|
||||||
|
inf_pair, inf_timeframe = self.informative_pairs()[0]
|
||||||
|
informative = self.dp.get_pair_dataframe(pair=inf_pair,
|
||||||
|
timeframe=inf_timeframe)
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Warning "Warning about backtesting"
|
||||||
|
Be careful when using dataprovider in backtesting. `historic_ohlcv()` (and `get_pair_dataframe()`
|
||||||
|
for the backtesting runmode) provides the full time-range in one go,
|
||||||
|
so please be aware of it and make sure to not "look into the future" to avoid surprises when running in dry/live mode.
|
||||||
|
|
||||||
|
### *get_analyzed_dataframe(pair, timeframe)*
|
||||||
|
|
||||||
|
This method is used by freqtrade internally to determine the last signal.
|
||||||
|
It can also be used in specific callbacks to get the signal that caused the action (see [Advanced Strategy Documentation](strategy-advanced.md) for more details on available callbacks).
|
||||||
|
|
||||||
|
``` python
|
||||||
|
# fetch current dataframe
|
||||||
|
if self.dp:
|
||||||
|
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=metadata['pair'],
|
||||||
|
timeframe=self.timeframe)
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Note "No data available"
|
||||||
|
Returns an empty dataframe if the requested pair was not cached.
|
||||||
|
This should not happen when using whitelisted pairs.
|
||||||
|
|
||||||
|
### *orderbook(pair, maximum)*
|
||||||
|
|
||||||
|
``` python
|
||||||
|
if self.dp:
|
||||||
|
if self.dp.runmode.value in ('live', 'dry_run'):
|
||||||
|
ob = self.dp.orderbook(metadata['pair'], 1)
|
||||||
|
dataframe['best_bid'] = ob['bids'][0][0]
|
||||||
|
dataframe['best_ask'] = ob['asks'][0][0]
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Warning
|
||||||
|
The order book is not part of the historic data which means backtesting and hyperopt will not work correctly if this method is used.
|
||||||
|
|
||||||
|
### *ticker(pair)*
|
||||||
|
|
||||||
|
``` python
|
||||||
|
if self.dp:
|
||||||
|
if self.dp.runmode.value in ('live', 'dry_run'):
|
||||||
|
ticker = self.dp.ticker(metadata['pair'])
|
||||||
|
dataframe['last_price'] = ticker['last']
|
||||||
|
dataframe['volume24h'] = ticker['quoteVolume']
|
||||||
|
dataframe['vwap'] = ticker['vwap']
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Warning
|
||||||
|
Although the ticker data structure is a part of the ccxt Unified Interface, the values returned by this method can
|
||||||
|
vary for different exchanges. For instance, many exchanges do not return `vwap` values, the FTX exchange
|
||||||
|
does not always fills in the `last` field (so it can be None), etc. So you need to carefully verify the ticker
|
||||||
|
data returned from the exchange and add appropriate error handling / defaults.
|
||||||
|
|
||||||
|
!!! Warning "Warning about backtesting"
|
||||||
|
This method will always return up-to-date values - so usage during backtesting / hyperopt will lead to wrong results.
|
||||||
|
|
||||||
|
### Complete Data-provider sample
|
||||||
|
|
||||||
|
```python
|
||||||
|
from freqtrade.strategy import IStrategy, merge_informative_pair
|
||||||
|
from pandas import DataFrame
|
||||||
|
|
||||||
class SampleStrategy(IStrategy):
|
class SampleStrategy(IStrategy):
|
||||||
# strategy init stuff...
|
# strategy init stuff...
|
||||||
|
|
||||||
@ -414,27 +499,30 @@ class SampleStrategy(IStrategy):
|
|||||||
pairs = self.dp.current_whitelist()
|
pairs = self.dp.current_whitelist()
|
||||||
# Assign tf to each pair so they can be downloaded and cached for strategy.
|
# Assign tf to each pair so they can be downloaded and cached for strategy.
|
||||||
informative_pairs = [(pair, '1d') for pair in pairs]
|
informative_pairs = [(pair, '1d') for pair in pairs]
|
||||||
|
# Optionally Add additional "static" pairs
|
||||||
|
informative_pairs += [("ETH/USDT", "5m"),
|
||||||
|
("BTC/TUSD", "15m"),
|
||||||
|
]
|
||||||
return informative_pairs
|
return informative_pairs
|
||||||
|
|
||||||
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||||
|
if not self.dp:
|
||||||
|
# Don't do anything if DataProvider is not available.
|
||||||
|
return dataframe
|
||||||
|
|
||||||
inf_tf = '1d'
|
inf_tf = '1d'
|
||||||
# Get the informative pair
|
# Get the informative pair
|
||||||
informative = self.dp.get_pair_dataframe(pair=metadata['pair'], timeframe='1d')
|
informative = self.dp.get_pair_dataframe(pair=metadata['pair'], timeframe=inf_tf)
|
||||||
# Get the 14 day rsi
|
# Get the 14 day rsi
|
||||||
informative['rsi'] = ta.RSI(informative, timeperiod=14)
|
informative['rsi'] = ta.RSI(informative, timeperiod=14)
|
||||||
|
|
||||||
# Rename columns to be unique
|
# Use the helper function merge_informative_pair to safely merge the pair
|
||||||
informative.columns = [f"{col}_{inf_tf}" for col in informative.columns]
|
# Automatically renames the columns and merges a shorter timeframe dataframe and a longer timeframe informative pair
|
||||||
# Assuming inf_tf = '1d' - then the columns will now be:
|
# use ffill to have the 1d value available in every row throughout the day.
|
||||||
# date_1d, open_1d, high_1d, low_1d, close_1d, rsi_1d
|
# Without this, comparisons between columns of the original and the informative pair would only work once per day.
|
||||||
|
# Full documentation of this method, see below
|
||||||
|
dataframe = merge_informative_pair(dataframe, informative, self.timeframe, inf_tf, ffill=True)
|
||||||
|
|
||||||
# Combine the 2 dataframes
|
|
||||||
# all indicators on the informative sample MUST be calculated before this point
|
|
||||||
dataframe = pd.merge(dataframe, informative, left_on='date', right_on=f'date_{inf_tf}', how='left')
|
|
||||||
# FFill to have the 1d value available in every row throughout the day.
|
|
||||||
# Without this, comparisons would only work once per day.
|
|
||||||
dataframe = dataframe.ffill()
|
|
||||||
# Calculate rsi of the original dataframe (5m timeframe)
|
# Calculate rsi of the original dataframe (5m timeframe)
|
||||||
dataframe['rsi'] = ta.RSI(dataframe, timeperiod=14)
|
dataframe['rsi'] = ta.RSI(dataframe, timeperiod=14)
|
||||||
|
|
||||||
@ -455,77 +543,72 @@ class SampleStrategy(IStrategy):
|
|||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### *get_pair_dataframe(pair, timeframe)*
|
***
|
||||||
|
|
||||||
``` python
|
## Helper functions
|
||||||
# fetch live / historical candle (OHLCV) data for the first informative pair
|
|
||||||
if self.dp:
|
|
||||||
inf_pair, inf_timeframe = self.informative_pairs()[0]
|
|
||||||
informative = self.dp.get_pair_dataframe(pair=inf_pair,
|
|
||||||
timeframe=inf_timeframe)
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! Warning "Warning about backtesting"
|
### *merge_informative_pair()*
|
||||||
Be careful when using dataprovider in backtesting. `historic_ohlcv()` (and `get_pair_dataframe()`
|
|
||||||
for the backtesting runmode) provides the full time-range in one go,
|
|
||||||
so please be aware of it and make sure to not "look into the future" to avoid surprises when running in dry/live mode).
|
|
||||||
|
|
||||||
!!! Warning "Warning in hyperopt"
|
This method helps you merge an informative pair to a regular dataframe without lookahead bias.
|
||||||
This option cannot currently be used during hyperopt.
|
It's there to help you merge the dataframe in a safe and consistent way.
|
||||||
|
|
||||||
#### *get_analyzed_dataframe(pair, timeframe)*
|
Options:
|
||||||
|
|
||||||
This method is used by freqtrade internally to determine the last signal.
|
- Rename the columns for you to create unique columns
|
||||||
It can also be used in specific callbacks to get the signal that caused the action (see [Advanced Strategy Documentation](strategy-advanced.md) for more details on available callbacks).
|
- Merge the dataframe without lookahead bias
|
||||||
|
- Forward-fill (optional)
|
||||||
|
|
||||||
``` python
|
All columns of the informative dataframe will be available on the returning dataframe in a renamed fashion:
|
||||||
# fetch current dataframe
|
|
||||||
if self.dp:
|
|
||||||
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=metadata['pair'],
|
|
||||||
timeframe=self.ticker_interval)
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! Note "No data available"
|
!!! Example "Column renaming"
|
||||||
Returns an empty dataframe if the requested pair was not cached.
|
Assuming `inf_tf = '1d'` the resulting columns will be:
|
||||||
This should not happen when using whitelisted pairs.
|
|
||||||
|
|
||||||
!!! Warning "Warning in hyperopt"
|
``` python
|
||||||
This option cannot currently be used during hyperopt.
|
'date', 'open', 'high', 'low', 'close', 'rsi' # from the original dataframe
|
||||||
|
'date_1d', 'open_1d', 'high_1d', 'low_1d', 'close_1d', 'rsi_1d' # from the informative dataframe
|
||||||
|
```
|
||||||
|
|
||||||
#### *orderbook(pair, maximum)*
|
??? Example "Column renaming - 1h"
|
||||||
|
Assuming `inf_tf = '1h'` the resulting columns will be:
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
if self.dp:
|
'date', 'open', 'high', 'low', 'close', 'rsi' # from the original dataframe
|
||||||
if self.dp.runmode.value in ('live', 'dry_run'):
|
'date_1h', 'open_1h', 'high_1h', 'low_1h', 'close_1h', 'rsi_1h' # from the informative dataframe
|
||||||
ob = self.dp.orderbook(metadata['pair'], 1)
|
```
|
||||||
dataframe['best_bid'] = ob['bids'][0][0]
|
|
||||||
dataframe['best_ask'] = ob['asks'][0][0]
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! Warning
|
??? Example "Custom implementation"
|
||||||
The order book is not part of the historic data which means backtesting and hyperopt will not work if this
|
A custom implementation for this is possible, and can be done as follows:
|
||||||
method is used.
|
|
||||||
|
|
||||||
#### *ticker(pair)*
|
``` python
|
||||||
|
|
||||||
``` python
|
# Shift date by 1 candle
|
||||||
if self.dp:
|
# This is necessary since the data is always the "open date"
|
||||||
if self.dp.runmode.value in ('live', 'dry_run'):
|
# and a 15m candle starting at 12:15 should not know the close of the 1h candle from 12:00 to 13:00
|
||||||
ticker = self.dp.ticker(metadata['pair'])
|
minutes = timeframe_to_minutes(inf_tf)
|
||||||
dataframe['last_price'] = ticker['last']
|
# Only do this if the timeframes are different:
|
||||||
dataframe['volume24h'] = ticker['quoteVolume']
|
informative['date_merge'] = informative["date"] + pd.to_timedelta(minutes, 'm')
|
||||||
dataframe['vwap'] = ticker['vwap']
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! Warning
|
# Rename columns to be unique
|
||||||
Although the ticker data structure is a part of the ccxt Unified Interface, the values returned by this method can
|
informative.columns = [f"{col}_{inf_tf}" for col in informative.columns]
|
||||||
vary for different exchanges. For instance, many exchanges do not return `vwap` values, the FTX exchange
|
# Assuming inf_tf = '1d' - then the columns will now be:
|
||||||
does not always fills in the `last` field (so it can be None), etc. So you need to carefully verify the ticker
|
# date_1d, open_1d, high_1d, low_1d, close_1d, rsi_1d
|
||||||
data returned from the exchange and add appropriate error handling / defaults.
|
|
||||||
|
# Combine the 2 dataframes
|
||||||
|
# all indicators on the informative sample MUST be calculated before this point
|
||||||
|
dataframe = pd.merge(dataframe, informative, left_on='date', right_on=f'date_merge_{inf_tf}', how='left')
|
||||||
|
# FFill to have the 1d value available in every row throughout the day.
|
||||||
|
# Without this, comparisons would only work once per day.
|
||||||
|
dataframe = dataframe.ffill()
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Warning "Informative timeframe < timeframe"
|
||||||
|
Using informative timeframes smaller than the dataframe timeframe is not recommended with this method, as it will not use any of the additional information this would provide.
|
||||||
|
To use the more detailed information properly, more advanced methods should be applied (which are out of scope for freqtrade documentation, as it'll depend on the respective need).
|
||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### Additional data (Wallets)
|
## Additional data (Wallets)
|
||||||
|
|
||||||
The strategy provides access to the `Wallets` object. This contains the current balances on the exchange.
|
The strategy provides access to the `Wallets` object. This contains the current balances on the exchange.
|
||||||
|
|
||||||
@ -541,7 +624,7 @@ if self.wallets:
|
|||||||
total_eth = self.wallets.get_total('ETH')
|
total_eth = self.wallets.get_total('ETH')
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Possible options for Wallets
|
### Possible options for Wallets
|
||||||
|
|
||||||
- `get_free(asset)` - currently available balance to trade
|
- `get_free(asset)` - currently available balance to trade
|
||||||
- `get_used(asset)` - currently tied up balance (open orders)
|
- `get_used(asset)` - currently tied up balance (open orders)
|
||||||
@ -549,7 +632,7 @@ if self.wallets:
|
|||||||
|
|
||||||
***
|
***
|
||||||
|
|
||||||
### Additional data (Trades)
|
## Additional data (Trades)
|
||||||
|
|
||||||
A history of Trades can be retrieved in the strategy by querying the database.
|
A history of Trades can be retrieved in the strategy by querying the database.
|
||||||
|
|
||||||
@ -595,13 +678,13 @@ Sample return value: ETH/BTC had 5 trades, with a total profit of 1.5% (ratio of
|
|||||||
!!! Warning
|
!!! Warning
|
||||||
Trade history is not available during backtesting or hyperopt.
|
Trade history is not available during backtesting or hyperopt.
|
||||||
|
|
||||||
### Prevent trades from happening for a specific pair
|
## Prevent trades from happening for a specific pair
|
||||||
|
|
||||||
Freqtrade locks pairs automatically for the current candle (until that candle is over) when a pair is sold, preventing an immediate re-buy of that pair.
|
Freqtrade locks pairs automatically for the current candle (until that candle is over) when a pair is sold, preventing an immediate re-buy of that pair.
|
||||||
|
|
||||||
Locked pairs will show the message `Pair <pair> is currently locked.`.
|
Locked pairs will show the message `Pair <pair> is currently locked.`.
|
||||||
|
|
||||||
#### Locking pairs from within the strategy
|
### Locking pairs from within the strategy
|
||||||
|
|
||||||
Sometimes it may be desired to lock a pair after certain events happen (e.g. multiple losing trades in a row).
|
Sometimes it may be desired to lock a pair after certain events happen (e.g. multiple losing trades in a row).
|
||||||
|
|
||||||
@ -618,7 +701,7 @@ To verify if a pair is currently locked, use `self.is_pair_locked(pair)`.
|
|||||||
!!! Warning
|
!!! Warning
|
||||||
Locking pairs is not functioning during backtesting.
|
Locking pairs is not functioning during backtesting.
|
||||||
|
|
||||||
##### Pair locking example
|
#### Pair locking example
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
@ -640,7 +723,7 @@ if self.config['runmode'].value in ('live', 'dry_run'):
|
|||||||
self.lock_pair(metadata['pair'], until=datetime.now(timezone.utc) + timedelta(hours=12))
|
self.lock_pair(metadata['pair'], until=datetime.now(timezone.utc) + timedelta(hours=12))
|
||||||
```
|
```
|
||||||
|
|
||||||
### Print created dataframe
|
## Print created dataframe
|
||||||
|
|
||||||
To inspect the created dataframe, you can issue a print-statement in either `populate_buy_trend()` or `populate_sell_trend()`.
|
To inspect the created dataframe, you can issue a print-statement in either `populate_buy_trend()` or `populate_sell_trend()`.
|
||||||
You may also want to print the pair so it's clear what data is currently shown.
|
You may also want to print the pair so it's clear what data is currently shown.
|
||||||
@ -664,36 +747,7 @@ def populate_buy_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
|||||||
|
|
||||||
Printing more than a few rows is also possible (simply use `print(dataframe)` instead of `print(dataframe.tail())`), however not recommended, as that will be very verbose (~500 lines per pair every 5 seconds).
|
Printing more than a few rows is also possible (simply use `print(dataframe)` instead of `print(dataframe.tail())`), however not recommended, as that will be very verbose (~500 lines per pair every 5 seconds).
|
||||||
|
|
||||||
### Specify custom strategy location
|
## Common mistakes when developing strategies
|
||||||
|
|
||||||
If you want to use a strategy from a different directory you can pass `--strategy-path`
|
|
||||||
|
|
||||||
```bash
|
|
||||||
freqtrade trade --strategy AwesomeStrategy --strategy-path /some/directory
|
|
||||||
```
|
|
||||||
|
|
||||||
### Derived strategies
|
|
||||||
|
|
||||||
The strategies can be derived from other strategies. This avoids duplication of your custom strategy code. You can use this technique to override small parts of your main strategy, leaving the rest untouched:
|
|
||||||
|
|
||||||
``` python
|
|
||||||
class MyAwesomeStrategy(IStrategy):
|
|
||||||
...
|
|
||||||
stoploss = 0.13
|
|
||||||
trailing_stop = False
|
|
||||||
# All other attributes and methods are here as they
|
|
||||||
# should be in any custom strategy...
|
|
||||||
...
|
|
||||||
|
|
||||||
class MyAwesomeStrategy2(MyAwesomeStrategy):
|
|
||||||
# Override something
|
|
||||||
stoploss = 0.08
|
|
||||||
trailing_stop = True
|
|
||||||
```
|
|
||||||
|
|
||||||
Both attributes and methods may be overriden, altering behavior of the original strategy in a way you need.
|
|
||||||
|
|
||||||
### Common mistakes when developing strategies
|
|
||||||
|
|
||||||
Backtesting analyzes the whole time-range at once for performance reasons. Because of this, strategy authors need to make sure that strategies do not look-ahead into the future.
|
Backtesting analyzes the whole time-range at once for performance reasons. Because of this, strategy authors need to make sure that strategies do not look-ahead into the future.
|
||||||
This is a common pain-point, which can cause huge differences between backtesting and dry/live run methods, since they all use data which is not available during dry/live runs, so these strategies will perform well during backtesting, but will fail / perform badly in real conditions.
|
This is a common pain-point, which can cause huge differences between backtesting and dry/live run methods, since they all use data which is not available during dry/live runs, so these strategies will perform well during backtesting, but will fail / perform badly in real conditions.
|
||||||
@ -705,7 +759,7 @@ The following lists some common patterns which should be avoided to prevent frus
|
|||||||
- don't use `dataframe['volume'].mean()`. This uses the full DataFrame for backtesting, including data from the future. Use `dataframe['volume'].rolling(<window>).mean()` instead
|
- don't use `dataframe['volume'].mean()`. This uses the full DataFrame for backtesting, including data from the future. Use `dataframe['volume'].rolling(<window>).mean()` instead
|
||||||
- don't use `.resample('1h')`. This uses the left border of the interval, so moves data from an hour to the start of the hour. Use `.resample('1h', label='right')` instead.
|
- don't use `.resample('1h')`. This uses the left border of the interval, so moves data from an hour to the start of the hour. Use `.resample('1h', label='right')` instead.
|
||||||
|
|
||||||
### Further strategy ideas
|
## Further strategy ideas
|
||||||
|
|
||||||
To get additional Ideas for strategies, head over to our [strategy repository](https://github.com/freqtrade/freqtrade-strategies). Feel free to use them as they are - but results will depend on the current market situation, pairs used etc. - therefore please backtest the strategy for your exchange/desired pairs first, evaluate carefully, use at your own risk.
|
To get additional Ideas for strategies, head over to our [strategy repository](https://github.com/freqtrade/freqtrade-strategies). Feel free to use them as they are - but results will depend on the current market situation, pairs used etc. - therefore please backtest the strategy for your exchange/desired pairs first, evaluate carefully, use at your own risk.
|
||||||
Feel free to use any of them as inspiration for your own strategies.
|
Feel free to use any of them as inspiration for your own strategies.
|
||||||
|
@ -85,10 +85,44 @@ Analyze a trades dataframe (also used below for plotting)
|
|||||||
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from freqtrade.data.btanalysis import load_backtest_data
|
from freqtrade.data.btanalysis import load_backtest_data, load_backtest_stats
|
||||||
|
|
||||||
# Load backtest results
|
# if backtest_dir points to a directory, it'll automatically load the last backtest file.
|
||||||
trades = load_backtest_data(config["user_data_dir"] / "backtest_results/backtest-result.json")
|
backtest_dir = config["user_data_dir"] / "backtest_results"
|
||||||
|
# backtest_dir can also point to a specific file
|
||||||
|
# backtest_dir = config["user_data_dir"] / "backtest_results/backtest-result-2020-07-01_20-04-22.json"
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
```python
|
||||||
|
# You can get the full backtest statistics by using the following command.
|
||||||
|
# This contains all information used to generate the backtest result.
|
||||||
|
stats = load_backtest_stats(backtest_dir)
|
||||||
|
|
||||||
|
strategy = 'SampleStrategy'
|
||||||
|
# All statistics are available per strategy, so if `--strategy-list` was used during backtest, this will be reflected here as well.
|
||||||
|
# Example usages:
|
||||||
|
print(stats['strategy'][strategy]['results_per_pair'])
|
||||||
|
# Get pairlist used for this backtest
|
||||||
|
print(stats['strategy'][strategy]['pairlist'])
|
||||||
|
# Get market change (average change of all pairs from start to end of the backtest period)
|
||||||
|
print(stats['strategy'][strategy]['market_change'])
|
||||||
|
# Maximum drawdown ()
|
||||||
|
print(stats['strategy'][strategy]['max_drawdown'])
|
||||||
|
# Maximum drawdown start and end
|
||||||
|
print(stats['strategy'][strategy]['drawdown_start'])
|
||||||
|
print(stats['strategy'][strategy]['drawdown_end'])
|
||||||
|
|
||||||
|
|
||||||
|
# Get strategy comparison (only relevant if multiple strategies were compared)
|
||||||
|
print(stats['strategy_comparison'])
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Load backtested trades as dataframe
|
||||||
|
trades = load_backtest_data(backtest_dir)
|
||||||
|
|
||||||
# Show value-counts per pair
|
# Show value-counts per pair
|
||||||
trades.groupby("pair")["sell_reason"].value_counts()
|
trades.groupby("pair")["sell_reason"].value_counts()
|
||||||
|
@ -54,6 +54,7 @@ official commands. You can ask at any moment for help with `/help`.
|
|||||||
| `/stopbuy` | Stops the trader from opening new trades. Gracefully closes open trades according to their rules.
|
| `/stopbuy` | Stops the trader from opening new trades. Gracefully closes open trades according to their rules.
|
||||||
| `/reload_config` | Reloads the configuration file
|
| `/reload_config` | Reloads the configuration file
|
||||||
| `/show_config` | Shows part of the current configuration with relevant settings to operation
|
| `/show_config` | Shows part of the current configuration with relevant settings to operation
|
||||||
|
| `/logs [limit]` | Show last log messages.
|
||||||
| `/status` | Lists all open trades
|
| `/status` | Lists all open trades
|
||||||
| `/status table` | List all open trades in a table format. Pending buy orders are marked with an asterisk (*) Pending sell orders are marked with a double asterisk (**)
|
| `/status table` | List all open trades in a table format. Pending buy orders are marked with an asterisk (*) Pending sell orders are marked with a double asterisk (**)
|
||||||
| `/trades [limit]` | List all recently closed trades in a table format.
|
| `/trades [limit]` | List all recently closed trades in a table format.
|
||||||
|
57
docs/windows_installation.md
Normal file
57
docs/windows_installation.md
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
We **strongly** recommend that Windows users use [Docker](docker.md) as this will work much easier and smoother (also more secure).
|
||||||
|
|
||||||
|
If that is not possible, try using the Windows Linux subsystem (WSL) - for which the Ubuntu instructions should work.
|
||||||
|
Otherwise, try the instructions below.
|
||||||
|
|
||||||
|
## Install freqtrade manually
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
Make sure to use 64bit Windows and 64bit Python to avoid problems with backtesting or hyperopt due to the memory constraints 32bit applications have under Windows.
|
||||||
|
|
||||||
|
!!! Hint
|
||||||
|
Using the [Anaconda Distribution](https://www.anaconda.com/distribution/) under Windows can greatly help with installation problems. Check out the [Anaconda installation section](installation.md#Anaconda) in this document for more information.
|
||||||
|
|
||||||
|
### 1. Clone the git repository
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/freqtrade/freqtrade.git
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Install ta-lib
|
||||||
|
|
||||||
|
Install ta-lib according to the [ta-lib documentation](https://github.com/mrjbq7/ta-lib#windows).
|
||||||
|
|
||||||
|
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), there is also a repository of unofficial precompiled windows Wheels [here](https://www.lfd.uci.edu/~gohlke/pythonlibs/#ta-lib), which needs to be downloaded and installed using `pip install TA_Lib‑0.4.18‑cp38‑cp38‑win_amd64.whl` (make sure to use the version matching your python version)
|
||||||
|
|
||||||
|
Freqtrade provides these dependencies for the latest 2 Python versions (3.7 and 3.8) and for 64bit Windows.
|
||||||
|
Other versions must be downloaded from the above link.
|
||||||
|
|
||||||
|
``` powershell
|
||||||
|
cd \path\freqtrade
|
||||||
|
python -m venv .env
|
||||||
|
.env\Scripts\activate.ps1
|
||||||
|
# optionally install ta-lib from wheel
|
||||||
|
# Eventually adjust the below filename to match the downloaded wheel
|
||||||
|
pip install build_helpes/TA_Lib‑0.4.18‑cp38‑cp38‑win_amd64.whl
|
||||||
|
pip install -r requirements.txt
|
||||||
|
pip install -e .
|
||||||
|
freqtrade
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Note "Use Powershell"
|
||||||
|
The above installation script assumes you're using powershell on a 64bit windows.
|
||||||
|
Commands for the legacy CMD windows console may differ.
|
||||||
|
|
||||||
|
> Thanks [Owdr](https://github.com/Owdr) for the commands. Source: [Issue #222](https://github.com/freqtrade/freqtrade/issues/222)
|
||||||
|
|
||||||
|
### Error during installation on Windows
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
error: Microsoft Visual C++ 14.0 is required. Get it with "Microsoft Visual C++ Build Tools": http://landinghub.visualstudio.com/visual-cpp-build-tools
|
||||||
|
```
|
||||||
|
|
||||||
|
Unfortunately, many packages requiring compilation don't provide a pre-build wheel. It is therefore mandatory to have a C/C++ compiler installed and available for your python environment to use.
|
||||||
|
|
||||||
|
The easiest way is to download install Microsoft Visual Studio Community [here](https://visualstudio.microsoft.com/downloads/) and make sure to install "Common Tools for Visual C++" to enable building c code on Windows. Unfortunately, this is a heavy download / dependency (~4Gb) so you might want to consider WSL or [docker](docker.md) first.
|
||||||
|
|
||||||
|
---
|
@ -15,7 +15,7 @@ ARGS_STRATEGY = ["strategy", "strategy_path"]
|
|||||||
|
|
||||||
ARGS_TRADE = ["db_url", "sd_notify", "dry_run"]
|
ARGS_TRADE = ["db_url", "sd_notify", "dry_run"]
|
||||||
|
|
||||||
ARGS_COMMON_OPTIMIZE = ["timeframe", "timerange",
|
ARGS_COMMON_OPTIMIZE = ["timeframe", "timerange", "dataformat_ohlcv",
|
||||||
"max_open_trades", "stake_amount", "fee"]
|
"max_open_trades", "stake_amount", "fee"]
|
||||||
|
|
||||||
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + ["position_stacking", "use_max_market_positions",
|
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + ["position_stacking", "use_max_market_positions",
|
||||||
@ -366,7 +366,7 @@ class Arguments:
|
|||||||
plot_profit_cmd = subparsers.add_parser(
|
plot_profit_cmd = subparsers.add_parser(
|
||||||
'plot-profit',
|
'plot-profit',
|
||||||
help='Generate plot showing profits.',
|
help='Generate plot showing profits.',
|
||||||
parents=[_common_parser],
|
parents=[_common_parser, _strategy_parser],
|
||||||
)
|
)
|
||||||
plot_profit_cmd.set_defaults(func=start_plot_profit)
|
plot_profit_cmd.set_defaults(func=start_plot_profit)
|
||||||
self._build_args(optionlist=ARGS_PLOT_PROFIT, parser=plot_profit_cmd)
|
self._build_args(optionlist=ARGS_PLOT_PROFIT, parser=plot_profit_cmd)
|
||||||
|
@ -35,8 +35,8 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
|||||||
"Downloading data requires a list of pairs. "
|
"Downloading data requires a list of pairs. "
|
||||||
"Please check the documentation on how to configure this.")
|
"Please check the documentation on how to configure this.")
|
||||||
|
|
||||||
logger.info(f'About to download pairs: {config["pairs"]}, '
|
logger.info(f"About to download pairs: {config['pairs']}, "
|
||||||
f'intervals: {config["timeframes"]} to {config["datadir"]}')
|
f"intervals: {config['timeframes']} to {config['datadir']}")
|
||||||
|
|
||||||
pairs_not_available: List[str] = []
|
pairs_not_available: List[str] = []
|
||||||
|
|
||||||
@ -51,21 +51,21 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
|||||||
|
|
||||||
if config.get('download_trades'):
|
if config.get('download_trades'):
|
||||||
pairs_not_available = refresh_backtest_trades_data(
|
pairs_not_available = refresh_backtest_trades_data(
|
||||||
exchange, pairs=config["pairs"], datadir=config['datadir'],
|
exchange, pairs=config['pairs'], datadir=config['datadir'],
|
||||||
timerange=timerange, erase=bool(config.get("erase")),
|
timerange=timerange, erase=bool(config.get('erase')),
|
||||||
data_format=config['dataformat_trades'])
|
data_format=config['dataformat_trades'])
|
||||||
|
|
||||||
# Convert downloaded trade data to different timeframes
|
# Convert downloaded trade data to different timeframes
|
||||||
convert_trades_to_ohlcv(
|
convert_trades_to_ohlcv(
|
||||||
pairs=config["pairs"], timeframes=config["timeframes"],
|
pairs=config['pairs'], timeframes=config['timeframes'],
|
||||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get("erase")),
|
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||||
data_format_trades=config['dataformat_trades'],
|
data_format_trades=config['dataformat_trades'],
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
pairs_not_available = refresh_backtest_ohlcv_data(
|
pairs_not_available = refresh_backtest_ohlcv_data(
|
||||||
exchange, pairs=config["pairs"], timeframes=config["timeframes"],
|
exchange, pairs=config['pairs'], timeframes=config['timeframes'],
|
||||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get("erase")),
|
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||||
data_format=config['dataformat_ohlcv'])
|
data_format=config['dataformat_ohlcv'])
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
|
@ -75,7 +75,7 @@ def start_new_strategy(args: Dict[str, Any]) -> None:
|
|||||||
if args["strategy"] == "DefaultStrategy":
|
if args["strategy"] == "DefaultStrategy":
|
||||||
raise OperationalException("DefaultStrategy is not allowed as name.")
|
raise OperationalException("DefaultStrategy is not allowed as name.")
|
||||||
|
|
||||||
new_path = config['user_data_dir'] / USERPATH_STRATEGIES / (args["strategy"] + ".py")
|
new_path = config['user_data_dir'] / USERPATH_STRATEGIES / (args['strategy'] + '.py')
|
||||||
|
|
||||||
if new_path.exists():
|
if new_path.exists():
|
||||||
raise OperationalException(f"`{new_path}` already exists. "
|
raise OperationalException(f"`{new_path}` already exists. "
|
||||||
@ -125,11 +125,11 @@ def start_new_hyperopt(args: Dict[str, Any]) -> None:
|
|||||||
|
|
||||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||||
|
|
||||||
if "hyperopt" in args and args["hyperopt"]:
|
if 'hyperopt' in args and args['hyperopt']:
|
||||||
if args["hyperopt"] == "DefaultHyperopt":
|
if args['hyperopt'] == 'DefaultHyperopt':
|
||||||
raise OperationalException("DefaultHyperopt is not allowed as name.")
|
raise OperationalException("DefaultHyperopt is not allowed as name.")
|
||||||
|
|
||||||
new_path = config['user_data_dir'] / USERPATH_HYPEROPTS / (args["hyperopt"] + ".py")
|
new_path = config['user_data_dir'] / USERPATH_HYPEROPTS / (args['hyperopt'] + '.py')
|
||||||
|
|
||||||
if new_path.exists():
|
if new_path.exists():
|
||||||
raise OperationalException(f"`{new_path}` already exists. "
|
raise OperationalException(f"`{new_path}` already exists. "
|
||||||
|
@ -14,7 +14,7 @@ from freqtrade.configuration import setup_utils_configuration
|
|||||||
from freqtrade.constants import USERPATH_HYPEROPTS, USERPATH_STRATEGIES
|
from freqtrade.constants import USERPATH_HYPEROPTS, USERPATH_STRATEGIES
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.exchange import (available_exchanges, ccxt_exchanges,
|
from freqtrade.exchange import (available_exchanges, ccxt_exchanges,
|
||||||
market_is_active, symbol_is_pair)
|
market_is_active)
|
||||||
from freqtrade.misc import plural
|
from freqtrade.misc import plural
|
||||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||||
from freqtrade.state import RunMode
|
from freqtrade.state import RunMode
|
||||||
@ -163,7 +163,7 @@ def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
|||||||
tabular_data.append({'Id': v['id'], 'Symbol': v['symbol'],
|
tabular_data.append({'Id': v['id'], 'Symbol': v['symbol'],
|
||||||
'Base': v['base'], 'Quote': v['quote'],
|
'Base': v['base'], 'Quote': v['quote'],
|
||||||
'Active': market_is_active(v),
|
'Active': market_is_active(v),
|
||||||
**({'Is pair': symbol_is_pair(v['symbol'])}
|
**({'Is pair': exchange.market_is_tradable(v)}
|
||||||
if not pairs_only else {})})
|
if not pairs_only else {})})
|
||||||
|
|
||||||
if (args.get('print_one_column', False) or
|
if (args.get('print_one_column', False) or
|
||||||
|
@ -54,7 +54,7 @@ class Configuration:
|
|||||||
:param files: List of file paths
|
:param files: List of file paths
|
||||||
:return: configuration dictionary
|
:return: configuration dictionary
|
||||||
"""
|
"""
|
||||||
c = Configuration({"config": files}, RunMode.OTHER)
|
c = Configuration({'config': files}, RunMode.OTHER)
|
||||||
return c.get_config()
|
return c.get_config()
|
||||||
|
|
||||||
def load_from_files(self, files: List[str]) -> Dict[str, Any]:
|
def load_from_files(self, files: List[str]) -> Dict[str, Any]:
|
||||||
@ -123,10 +123,10 @@ class Configuration:
|
|||||||
the -v/--verbose, --logfile options
|
the -v/--verbose, --logfile options
|
||||||
"""
|
"""
|
||||||
# Log level
|
# Log level
|
||||||
config.update({'verbosity': self.args.get("verbosity", 0)})
|
config.update({'verbosity': self.args.get('verbosity', 0)})
|
||||||
|
|
||||||
if 'logfile' in self.args and self.args["logfile"]:
|
if 'logfile' in self.args and self.args['logfile']:
|
||||||
config.update({'logfile': self.args["logfile"]})
|
config.update({'logfile': self.args['logfile']})
|
||||||
|
|
||||||
setup_logging(config)
|
setup_logging(config)
|
||||||
|
|
||||||
@ -149,22 +149,22 @@ class Configuration:
|
|||||||
def _process_common_options(self, config: Dict[str, Any]) -> None:
|
def _process_common_options(self, config: Dict[str, Any]) -> None:
|
||||||
|
|
||||||
# Set strategy if not specified in config and or if it's non default
|
# Set strategy if not specified in config and or if it's non default
|
||||||
if self.args.get("strategy") or not config.get('strategy'):
|
if self.args.get('strategy') or not config.get('strategy'):
|
||||||
config.update({'strategy': self.args.get("strategy")})
|
config.update({'strategy': self.args.get('strategy')})
|
||||||
|
|
||||||
self._args_to_config(config, argname='strategy_path',
|
self._args_to_config(config, argname='strategy_path',
|
||||||
logstring='Using additional Strategy lookup path: {}')
|
logstring='Using additional Strategy lookup path: {}')
|
||||||
|
|
||||||
if ('db_url' in self.args and self.args["db_url"] and
|
if ('db_url' in self.args and self.args['db_url'] and
|
||||||
self.args["db_url"] != constants.DEFAULT_DB_PROD_URL):
|
self.args['db_url'] != constants.DEFAULT_DB_PROD_URL):
|
||||||
config.update({'db_url': self.args["db_url"]})
|
config.update({'db_url': self.args['db_url']})
|
||||||
logger.info('Parameter --db-url detected ...')
|
logger.info('Parameter --db-url detected ...')
|
||||||
|
|
||||||
if config.get('forcebuy_enable', False):
|
if config.get('forcebuy_enable', False):
|
||||||
logger.warning('`forcebuy` RPC message enabled.')
|
logger.warning('`forcebuy` RPC message enabled.')
|
||||||
|
|
||||||
# Support for sd_notify
|
# Support for sd_notify
|
||||||
if 'sd_notify' in self.args and self.args["sd_notify"]:
|
if 'sd_notify' in self.args and self.args['sd_notify']:
|
||||||
config['internals'].update({'sd_notify': True})
|
config['internals'].update({'sd_notify': True})
|
||||||
|
|
||||||
def _process_datadir_options(self, config: Dict[str, Any]) -> None:
|
def _process_datadir_options(self, config: Dict[str, Any]) -> None:
|
||||||
@ -173,24 +173,24 @@ class Configuration:
|
|||||||
--user-data, --datadir
|
--user-data, --datadir
|
||||||
"""
|
"""
|
||||||
# Check exchange parameter here - otherwise `datadir` might be wrong.
|
# Check exchange parameter here - otherwise `datadir` might be wrong.
|
||||||
if "exchange" in self.args and self.args["exchange"]:
|
if 'exchange' in self.args and self.args['exchange']:
|
||||||
config['exchange']['name'] = self.args["exchange"]
|
config['exchange']['name'] = self.args['exchange']
|
||||||
logger.info(f"Using exchange {config['exchange']['name']}")
|
logger.info(f"Using exchange {config['exchange']['name']}")
|
||||||
|
|
||||||
if 'pair_whitelist' not in config['exchange']:
|
if 'pair_whitelist' not in config['exchange']:
|
||||||
config['exchange']['pair_whitelist'] = []
|
config['exchange']['pair_whitelist'] = []
|
||||||
|
|
||||||
if 'user_data_dir' in self.args and self.args["user_data_dir"]:
|
if 'user_data_dir' in self.args and self.args['user_data_dir']:
|
||||||
config.update({'user_data_dir': self.args["user_data_dir"]})
|
config.update({'user_data_dir': self.args['user_data_dir']})
|
||||||
elif 'user_data_dir' not in config:
|
elif 'user_data_dir' not in config:
|
||||||
# Default to cwd/user_data (legacy option ...)
|
# Default to cwd/user_data (legacy option ...)
|
||||||
config.update({'user_data_dir': str(Path.cwd() / "user_data")})
|
config.update({'user_data_dir': str(Path.cwd() / 'user_data')})
|
||||||
|
|
||||||
# reset to user_data_dir so this contains the absolute path.
|
# reset to user_data_dir so this contains the absolute path.
|
||||||
config['user_data_dir'] = create_userdata_dir(config['user_data_dir'], create_dir=False)
|
config['user_data_dir'] = create_userdata_dir(config['user_data_dir'], create_dir=False)
|
||||||
logger.info('Using user-data directory: %s ...', config['user_data_dir'])
|
logger.info('Using user-data directory: %s ...', config['user_data_dir'])
|
||||||
|
|
||||||
config.update({'datadir': create_datadir(config, self.args.get("datadir", None))})
|
config.update({'datadir': create_datadir(config, self.args.get('datadir', None))})
|
||||||
logger.info('Using data directory: %s ...', config.get('datadir'))
|
logger.info('Using data directory: %s ...', config.get('datadir'))
|
||||||
|
|
||||||
if self.args.get('exportfilename'):
|
if self.args.get('exportfilename'):
|
||||||
@ -199,7 +199,7 @@ class Configuration:
|
|||||||
config['exportfilename'] = Path(config['exportfilename'])
|
config['exportfilename'] = Path(config['exportfilename'])
|
||||||
else:
|
else:
|
||||||
config['exportfilename'] = (config['user_data_dir']
|
config['exportfilename'] = (config['user_data_dir']
|
||||||
/ 'backtest_results/backtest-result.json')
|
/ 'backtest_results')
|
||||||
|
|
||||||
def _process_optimize_options(self, config: Dict[str, Any]) -> None:
|
def _process_optimize_options(self, config: Dict[str, Any]) -> None:
|
||||||
|
|
||||||
@ -219,8 +219,8 @@ class Configuration:
|
|||||||
config.update({'use_max_market_positions': False})
|
config.update({'use_max_market_positions': False})
|
||||||
logger.info('Parameter --disable-max-market-positions detected ...')
|
logger.info('Parameter --disable-max-market-positions detected ...')
|
||||||
logger.info('max_open_trades set to unlimited ...')
|
logger.info('max_open_trades set to unlimited ...')
|
||||||
elif 'max_open_trades' in self.args and self.args["max_open_trades"]:
|
elif 'max_open_trades' in self.args and self.args['max_open_trades']:
|
||||||
config.update({'max_open_trades': self.args["max_open_trades"]})
|
config.update({'max_open_trades': self.args['max_open_trades']})
|
||||||
logger.info('Parameter --max-open-trades detected, '
|
logger.info('Parameter --max-open-trades detected, '
|
||||||
'overriding max_open_trades to: %s ...', config.get('max_open_trades'))
|
'overriding max_open_trades to: %s ...', config.get('max_open_trades'))
|
||||||
elif config['runmode'] in NON_UTIL_MODES:
|
elif config['runmode'] in NON_UTIL_MODES:
|
||||||
@ -447,12 +447,12 @@ class Configuration:
|
|||||||
config['pairs'].sort()
|
config['pairs'].sort()
|
||||||
return
|
return
|
||||||
|
|
||||||
if "config" in self.args and self.args["config"]:
|
if 'config' in self.args and self.args['config']:
|
||||||
logger.info("Using pairlist from configuration.")
|
logger.info("Using pairlist from configuration.")
|
||||||
config['pairs'] = config.get('exchange', {}).get('pair_whitelist')
|
config['pairs'] = config.get('exchange', {}).get('pair_whitelist')
|
||||||
else:
|
else:
|
||||||
# Fall back to /dl_path/pairs.json
|
# Fall back to /dl_path/pairs.json
|
||||||
pairs_file = config['datadir'] / "pairs.json"
|
pairs_file = config['datadir'] / 'pairs.json'
|
||||||
if pairs_file.exists():
|
if pairs_file.exists():
|
||||||
with pairs_file.open('r') as f:
|
with pairs_file.open('r') as f:
|
||||||
config['pairs'] = json_load(f)
|
config['pairs'] = json_load(f)
|
||||||
|
@ -24,14 +24,17 @@ ORDERTIF_POSSIBILITIES = ['gtc', 'fok', 'ioc']
|
|||||||
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList',
|
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList',
|
||||||
'AgeFilter', 'PrecisionFilter', 'PriceFilter',
|
'AgeFilter', 'PrecisionFilter', 'PriceFilter',
|
||||||
'ShuffleFilter', 'SpreadFilter']
|
'ShuffleFilter', 'SpreadFilter']
|
||||||
AVAILABLE_DATAHANDLERS = ['json', 'jsongz']
|
AVAILABLE_DATAHANDLERS = ['json', 'jsongz', 'hdf5']
|
||||||
DRY_RUN_WALLET = 1000
|
DRY_RUN_WALLET = 1000
|
||||||
|
DATETIME_PRINT_FORMAT = '%Y-%m-%d %H:%M:%S'
|
||||||
MATH_CLOSE_PREC = 1e-14 # Precision used for float comparisons
|
MATH_CLOSE_PREC = 1e-14 # Precision used for float comparisons
|
||||||
DEFAULT_DATAFRAME_COLUMNS = ['date', 'open', 'high', 'low', 'close', 'volume']
|
DEFAULT_DATAFRAME_COLUMNS = ['date', 'open', 'high', 'low', 'close', 'volume']
|
||||||
# Don't modify sequence of DEFAULT_TRADES_COLUMNS
|
# Don't modify sequence of DEFAULT_TRADES_COLUMNS
|
||||||
# it has wide consequences for stored trades files
|
# it has wide consequences for stored trades files
|
||||||
DEFAULT_TRADES_COLUMNS = ['timestamp', 'id', 'type', 'side', 'price', 'amount', 'cost']
|
DEFAULT_TRADES_COLUMNS = ['timestamp', 'id', 'type', 'side', 'price', 'amount', 'cost']
|
||||||
|
|
||||||
|
LAST_BT_RESULT_FN = '.last_result.json'
|
||||||
|
|
||||||
USERPATH_HYPEROPTS = 'hyperopts'
|
USERPATH_HYPEROPTS = 'hyperopts'
|
||||||
USERPATH_STRATEGIES = 'strategies'
|
USERPATH_STRATEGIES = 'strategies'
|
||||||
USERPATH_NOTEBOOKS = 'notebooks'
|
USERPATH_NOTEBOOKS = 'notebooks'
|
||||||
@ -335,9 +338,12 @@ SCHEMA_MINIMAL_REQUIRED = [
|
|||||||
|
|
||||||
CANCEL_REASON = {
|
CANCEL_REASON = {
|
||||||
"TIMEOUT": "cancelled due to timeout",
|
"TIMEOUT": "cancelled due to timeout",
|
||||||
"PARTIALLY_FILLED": "partially filled - keeping order open",
|
"PARTIALLY_FILLED_KEEP_OPEN": "partially filled - keeping order open",
|
||||||
|
"PARTIALLY_FILLED": "partially filled",
|
||||||
|
"FULLY_CANCELLED": "fully cancelled",
|
||||||
"ALL_CANCELLED": "cancelled (all unfilled and partially filled open orders cancelled)",
|
"ALL_CANCELLED": "cancelled (all unfilled and partially filled open orders cancelled)",
|
||||||
"CANCELLED_ON_EXCHANGE": "cancelled on exchange",
|
"CANCELLED_ON_EXCHANGE": "cancelled on exchange",
|
||||||
|
"FORCE_SELL": "forcesold",
|
||||||
}
|
}
|
||||||
|
|
||||||
# List of pairs with their timeframes
|
# List of pairs with their timeframes
|
||||||
|
@ -3,52 +3,123 @@ Helpers when analyzing backtest data
|
|||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, Union, Tuple
|
from typing import Dict, Union, Tuple, Any, Optional
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
|
|
||||||
from freqtrade import persistence
|
from freqtrade import persistence
|
||||||
|
from freqtrade.constants import LAST_BT_RESULT_FN
|
||||||
from freqtrade.misc import json_load
|
from freqtrade.misc import json_load
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# must align with columns in backtest.py
|
# must align with columns in backtest.py
|
||||||
BT_DATA_COLUMNS = ["pair", "profit_percent", "open_time", "close_time", "index", "duration",
|
BT_DATA_COLUMNS = ["pair", "profit_percent", "open_date", "close_date", "index", "trade_duration",
|
||||||
"open_rate", "close_rate", "open_at_end", "sell_reason"]
|
"open_rate", "close_rate", "open_at_end", "sell_reason"]
|
||||||
|
|
||||||
|
|
||||||
def load_backtest_data(filename: Union[Path, str]) -> pd.DataFrame:
|
def get_latest_backtest_filename(directory: Union[Path, str]) -> str:
|
||||||
"""
|
"""
|
||||||
Load backtest data file.
|
Get latest backtest export based on '.last_result.json'.
|
||||||
:param filename: pathlib.Path object, or string pointing to the file.
|
:param directory: Directory to search for last result
|
||||||
:return: a dataframe with the analysis results
|
:return: string containing the filename of the latest backtest result
|
||||||
|
:raises: ValueError in the following cases:
|
||||||
|
* Directory does not exist
|
||||||
|
* `directory/.last_result.json` does not exist
|
||||||
|
* `directory/.last_result.json` has the wrong content
|
||||||
"""
|
"""
|
||||||
if isinstance(filename, str):
|
if isinstance(directory, str):
|
||||||
filename = Path(filename)
|
directory = Path(directory)
|
||||||
|
if not directory.is_dir():
|
||||||
|
raise ValueError(f"Directory '{directory}' does not exist.")
|
||||||
|
filename = directory / LAST_BT_RESULT_FN
|
||||||
|
|
||||||
if not filename.is_file():
|
if not filename.is_file():
|
||||||
raise ValueError(f"File {filename} does not exist.")
|
raise ValueError(
|
||||||
|
f"Directory '{directory}' does not seem to contain backtest statistics yet.")
|
||||||
|
|
||||||
with filename.open() as file:
|
with filename.open() as file:
|
||||||
data = json_load(file)
|
data = json_load(file)
|
||||||
|
|
||||||
df = pd.DataFrame(data, columns=BT_DATA_COLUMNS)
|
if 'latest_backtest' not in data:
|
||||||
|
raise ValueError(f"Invalid '{LAST_BT_RESULT_FN}' format.")
|
||||||
|
|
||||||
df['open_time'] = pd.to_datetime(df['open_time'],
|
return data['latest_backtest']
|
||||||
unit='s',
|
|
||||||
utc=True,
|
|
||||||
infer_datetime_format=True
|
def load_backtest_stats(filename: Union[Path, str]) -> Dict[str, Any]:
|
||||||
)
|
"""
|
||||||
df['close_time'] = pd.to_datetime(df['close_time'],
|
Load backtest statistics file.
|
||||||
unit='s',
|
:param filename: pathlib.Path object, or string pointing to the file.
|
||||||
utc=True,
|
:return: a dictionary containing the resulting file.
|
||||||
infer_datetime_format=True
|
"""
|
||||||
)
|
if isinstance(filename, str):
|
||||||
df['profit'] = df['close_rate'] - df['open_rate']
|
filename = Path(filename)
|
||||||
df = df.sort_values("open_time").reset_index(drop=True)
|
if filename.is_dir():
|
||||||
|
filename = filename / get_latest_backtest_filename(filename)
|
||||||
|
if not filename.is_file():
|
||||||
|
raise ValueError(f"File {filename} does not exist.")
|
||||||
|
logger.info(f"Loading backtest result from {filename}")
|
||||||
|
with filename.open() as file:
|
||||||
|
data = json_load(file)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = None) -> pd.DataFrame:
|
||||||
|
"""
|
||||||
|
Load backtest data file.
|
||||||
|
:param filename: pathlib.Path object, or string pointing to a file or directory
|
||||||
|
:param strategy: Strategy to load - mainly relevant for multi-strategy backtests
|
||||||
|
Can also serve as protection to load the correct result.
|
||||||
|
:return: a dataframe with the analysis results
|
||||||
|
:raise: ValueError if loading goes wrong.
|
||||||
|
"""
|
||||||
|
data = load_backtest_stats(filename)
|
||||||
|
if not isinstance(data, list):
|
||||||
|
# new, nested format
|
||||||
|
if 'strategy' not in data:
|
||||||
|
raise ValueError("Unknown dataformat.")
|
||||||
|
|
||||||
|
if not strategy:
|
||||||
|
if len(data['strategy']) == 1:
|
||||||
|
strategy = list(data['strategy'].keys())[0]
|
||||||
|
else:
|
||||||
|
raise ValueError("Detected backtest result with more than one strategy. "
|
||||||
|
"Please specify a strategy.")
|
||||||
|
|
||||||
|
if strategy not in data['strategy']:
|
||||||
|
raise ValueError(f"Strategy {strategy} not available in the backtest result.")
|
||||||
|
|
||||||
|
data = data['strategy'][strategy]['trades']
|
||||||
|
df = pd.DataFrame(data)
|
||||||
|
df['open_date'] = pd.to_datetime(df['open_date'],
|
||||||
|
utc=True,
|
||||||
|
infer_datetime_format=True
|
||||||
|
)
|
||||||
|
df['close_date'] = pd.to_datetime(df['close_date'],
|
||||||
|
utc=True,
|
||||||
|
infer_datetime_format=True
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# old format - only with lists.
|
||||||
|
df = pd.DataFrame(data, columns=BT_DATA_COLUMNS)
|
||||||
|
|
||||||
|
df['open_date'] = pd.to_datetime(df['open_date'],
|
||||||
|
unit='s',
|
||||||
|
utc=True,
|
||||||
|
infer_datetime_format=True
|
||||||
|
)
|
||||||
|
df['close_date'] = pd.to_datetime(df['close_date'],
|
||||||
|
unit='s',
|
||||||
|
utc=True,
|
||||||
|
infer_datetime_format=True
|
||||||
|
)
|
||||||
|
df['profit_abs'] = df['close_rate'] - df['open_rate']
|
||||||
|
df = df.sort_values("open_date").reset_index(drop=True)
|
||||||
return df
|
return df
|
||||||
|
|
||||||
|
|
||||||
@ -62,9 +133,9 @@ def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataF
|
|||||||
"""
|
"""
|
||||||
from freqtrade.exchange import timeframe_to_minutes
|
from freqtrade.exchange import timeframe_to_minutes
|
||||||
timeframe_min = timeframe_to_minutes(timeframe)
|
timeframe_min = timeframe_to_minutes(timeframe)
|
||||||
dates = [pd.Series(pd.date_range(row[1].open_time, row[1].close_time,
|
dates = [pd.Series(pd.date_range(row[1]['open_date'], row[1]['close_date'],
|
||||||
freq=f"{timeframe_min}min"))
|
freq=f"{timeframe_min}min"))
|
||||||
for row in results[['open_time', 'close_time']].iterrows()]
|
for row in results[['open_date', 'close_date']].iterrows()]
|
||||||
deltas = [len(x) for x in dates]
|
deltas = [len(x) for x in dates]
|
||||||
dates = pd.Series(pd.concat(dates).values, name='date')
|
dates = pd.Series(pd.concat(dates).values, name='date')
|
||||||
df2 = pd.DataFrame(np.repeat(results.values, deltas, axis=0), columns=results.columns)
|
df2 = pd.DataFrame(np.repeat(results.values, deltas, axis=0), columns=results.columns)
|
||||||
@ -90,21 +161,26 @@ def evaluate_result_multi(results: pd.DataFrame, timeframe: str,
|
|||||||
return df_final[df_final['open_trades'] > max_open_trades]
|
return df_final[df_final['open_trades'] > max_open_trades]
|
||||||
|
|
||||||
|
|
||||||
def load_trades_from_db(db_url: str) -> pd.DataFrame:
|
def load_trades_from_db(db_url: str, strategy: Optional[str] = None) -> pd.DataFrame:
|
||||||
"""
|
"""
|
||||||
Load trades from a DB (using dburl)
|
Load trades from a DB (using dburl)
|
||||||
:param db_url: Sqlite url (default format sqlite:///tradesv3.dry-run.sqlite)
|
:param db_url: Sqlite url (default format sqlite:///tradesv3.dry-run.sqlite)
|
||||||
|
:param strategy: Strategy to load - mainly relevant for multi-strategy backtests
|
||||||
|
Can also serve as protection to load the correct result.
|
||||||
:return: Dataframe containing Trades
|
:return: Dataframe containing Trades
|
||||||
"""
|
"""
|
||||||
trades: pd.DataFrame = pd.DataFrame([], columns=BT_DATA_COLUMNS)
|
|
||||||
persistence.init(db_url, clean_open_orders=False)
|
persistence.init(db_url, clean_open_orders=False)
|
||||||
|
|
||||||
columns = ["pair", "open_time", "close_time", "profit", "profit_percent",
|
columns = ["pair", "open_date", "close_date", "profit", "profit_percent",
|
||||||
"open_rate", "close_rate", "amount", "duration", "sell_reason",
|
"open_rate", "close_rate", "amount", "trade_duration", "sell_reason",
|
||||||
"fee_open", "fee_close", "open_rate_requested", "close_rate_requested",
|
"fee_open", "fee_close", "open_rate_requested", "close_rate_requested",
|
||||||
"stake_amount", "max_rate", "min_rate", "id", "exchange",
|
"stake_amount", "max_rate", "min_rate", "id", "exchange",
|
||||||
"stop_loss", "initial_stop_loss", "strategy", "timeframe"]
|
"stop_loss", "initial_stop_loss", "strategy", "timeframe"]
|
||||||
|
|
||||||
|
filters = []
|
||||||
|
if strategy:
|
||||||
|
filters.append(Trade.strategy == strategy)
|
||||||
|
|
||||||
trades = pd.DataFrame([(t.pair,
|
trades = pd.DataFrame([(t.pair,
|
||||||
t.open_date.replace(tzinfo=timezone.utc),
|
t.open_date.replace(tzinfo=timezone.utc),
|
||||||
t.close_date.replace(tzinfo=timezone.utc) if t.close_date else None,
|
t.close_date.replace(tzinfo=timezone.utc) if t.close_date else None,
|
||||||
@ -123,16 +199,16 @@ def load_trades_from_db(db_url: str) -> pd.DataFrame:
|
|||||||
t.stop_loss, t.initial_stop_loss,
|
t.stop_loss, t.initial_stop_loss,
|
||||||
t.strategy, t.timeframe
|
t.strategy, t.timeframe
|
||||||
)
|
)
|
||||||
for t in Trade.get_trades().all()],
|
for t in Trade.get_trades(filters).all()],
|
||||||
columns=columns)
|
columns=columns)
|
||||||
|
|
||||||
return trades
|
return trades
|
||||||
|
|
||||||
|
|
||||||
def load_trades(source: str, db_url: str, exportfilename: Path,
|
def load_trades(source: str, db_url: str, exportfilename: Path,
|
||||||
no_trades: bool = False) -> pd.DataFrame:
|
no_trades: bool = False, strategy: Optional[str] = None) -> pd.DataFrame:
|
||||||
"""
|
"""
|
||||||
Based on configuration option "trade_source":
|
Based on configuration option 'trade_source':
|
||||||
* loads data from DB (using `db_url`)
|
* loads data from DB (using `db_url`)
|
||||||
* loads data from backtestfile (using `exportfilename`)
|
* loads data from backtestfile (using `exportfilename`)
|
||||||
:param source: "DB" or "file" - specify source to load from
|
:param source: "DB" or "file" - specify source to load from
|
||||||
@ -148,7 +224,7 @@ def load_trades(source: str, db_url: str, exportfilename: Path,
|
|||||||
if source == "DB":
|
if source == "DB":
|
||||||
return load_trades_from_db(db_url)
|
return load_trades_from_db(db_url)
|
||||||
elif source == "file":
|
elif source == "file":
|
||||||
return load_backtest_data(exportfilename)
|
return load_backtest_data(exportfilename, strategy)
|
||||||
|
|
||||||
|
|
||||||
def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame,
|
def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame,
|
||||||
@ -163,11 +239,31 @@ def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame,
|
|||||||
else:
|
else:
|
||||||
trades_start = dataframe.iloc[0]['date']
|
trades_start = dataframe.iloc[0]['date']
|
||||||
trades_stop = dataframe.iloc[-1]['date']
|
trades_stop = dataframe.iloc[-1]['date']
|
||||||
trades = trades.loc[(trades['open_time'] >= trades_start) &
|
trades = trades.loc[(trades['open_date'] >= trades_start) &
|
||||||
(trades['close_time'] <= trades_stop)]
|
(trades['close_date'] <= trades_stop)]
|
||||||
return trades
|
return trades
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close") -> float:
|
||||||
|
"""
|
||||||
|
Calculate market change based on "column".
|
||||||
|
Calculation is done by taking the first non-null and the last non-null element of each column
|
||||||
|
and calculating the pctchange as "(last - first) / first".
|
||||||
|
Then the results per pair are combined as mean.
|
||||||
|
|
||||||
|
:param data: Dict of Dataframes, dict key should be pair.
|
||||||
|
:param column: Column in the original dataframes to use
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
tmp_means = []
|
||||||
|
for pair, df in data.items():
|
||||||
|
start = df[column].dropna().iloc[0]
|
||||||
|
end = df[column].dropna().iloc[-1]
|
||||||
|
tmp_means.append((end - start) / start)
|
||||||
|
|
||||||
|
return np.mean(tmp_means)
|
||||||
|
|
||||||
|
|
||||||
def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
||||||
column: str = "close") -> pd.DataFrame:
|
column: str = "close") -> pd.DataFrame:
|
||||||
"""
|
"""
|
||||||
@ -190,7 +286,7 @@ def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
|||||||
"""
|
"""
|
||||||
Adds a column `col_name` with the cumulative profit for the given trades array.
|
Adds a column `col_name` with the cumulative profit for the given trades array.
|
||||||
:param df: DataFrame with date index
|
:param df: DataFrame with date index
|
||||||
:param trades: DataFrame containing trades (requires columns close_time and profit_percent)
|
:param trades: DataFrame containing trades (requires columns close_date and profit_percent)
|
||||||
:param col_name: Column name that will be assigned the results
|
:param col_name: Column name that will be assigned the results
|
||||||
:param timeframe: Timeframe used during the operations
|
:param timeframe: Timeframe used during the operations
|
||||||
:return: Returns df with one additional column, col_name, containing the cumulative profit.
|
:return: Returns df with one additional column, col_name, containing the cumulative profit.
|
||||||
@ -201,7 +297,7 @@ def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
|||||||
from freqtrade.exchange import timeframe_to_minutes
|
from freqtrade.exchange import timeframe_to_minutes
|
||||||
timeframe_minutes = timeframe_to_minutes(timeframe)
|
timeframe_minutes = timeframe_to_minutes(timeframe)
|
||||||
# Resample to timeframe to make sure trades match candles
|
# Resample to timeframe to make sure trades match candles
|
||||||
_trades_sum = trades.resample(f'{timeframe_minutes}min', on='close_time'
|
_trades_sum = trades.resample(f'{timeframe_minutes}min', on='close_date'
|
||||||
)[['profit_percent']].sum()
|
)[['profit_percent']].sum()
|
||||||
df.loc[:, col_name] = _trades_sum.cumsum()
|
df.loc[:, col_name] = _trades_sum.cumsum()
|
||||||
# Set first value to 0
|
# Set first value to 0
|
||||||
@ -211,13 +307,13 @@ def create_cum_profit(df: pd.DataFrame, trades: pd.DataFrame, col_name: str,
|
|||||||
return df
|
return df
|
||||||
|
|
||||||
|
|
||||||
def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_time',
|
def calculate_max_drawdown(trades: pd.DataFrame, *, date_col: str = 'close_date',
|
||||||
value_col: str = 'profit_percent'
|
value_col: str = 'profit_percent'
|
||||||
) -> Tuple[float, pd.Timestamp, pd.Timestamp]:
|
) -> Tuple[float, pd.Timestamp, pd.Timestamp]:
|
||||||
"""
|
"""
|
||||||
Calculate max drawdown and the corresponding close dates
|
Calculate max drawdown and the corresponding close dates
|
||||||
:param trades: DataFrame containing trades (requires columns close_time and profit_percent)
|
:param trades: DataFrame containing trades (requires columns close_date and profit_percent)
|
||||||
:param date_col: Column in DataFrame to use for dates (defaults to 'close_time')
|
:param date_col: Column in DataFrame to use for dates (defaults to 'close_date')
|
||||||
:param value_col: Column in DataFrame to use for values (defaults to 'profit_percent')
|
:param value_col: Column in DataFrame to use for values (defaults to 'profit_percent')
|
||||||
:return: Tuple (float, highdate, lowdate) with absolute max drawdown, high and low time
|
:return: Tuple (float, highdate, lowdate) with absolute max drawdown, high and low time
|
||||||
:raise: ValueError if trade-dataframe was found empty.
|
:raise: ValueError if trade-dataframe was found empty.
|
||||||
|
@ -255,7 +255,8 @@ def convert_ohlcv_format(config: Dict[str, Any], convert_from: str, convert_to:
|
|||||||
drop_incomplete=False,
|
drop_incomplete=False,
|
||||||
startup_candles=0)
|
startup_candles=0)
|
||||||
logger.info(f"Converting {len(data)} candles for {pair}")
|
logger.info(f"Converting {len(data)} candles for {pair}")
|
||||||
trg.ohlcv_store(pair=pair, timeframe=timeframe, data=data)
|
if len(data) > 0:
|
||||||
if erase and convert_from != convert_to:
|
trg.ohlcv_store(pair=pair, timeframe=timeframe, data=data)
|
||||||
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
if erase and convert_from != convert_to:
|
||||||
src.ohlcv_purge(pair=pair, timeframe=timeframe)
|
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
||||||
|
src.ohlcv_purge(pair=pair, timeframe=timeframe)
|
||||||
|
@ -39,6 +39,12 @@ class DataProvider:
|
|||||||
"""
|
"""
|
||||||
self.__cached_pairs[(pair, timeframe)] = (dataframe, Arrow.utcnow().datetime)
|
self.__cached_pairs[(pair, timeframe)] = (dataframe, Arrow.utcnow().datetime)
|
||||||
|
|
||||||
|
def add_pairlisthandler(self, pairlists) -> None:
|
||||||
|
"""
|
||||||
|
Allow adding pairlisthandler after initialization
|
||||||
|
"""
|
||||||
|
self._pairlists = pairlists
|
||||||
|
|
||||||
def refresh(self,
|
def refresh(self,
|
||||||
pairlist: ListPairsWithTimeframes,
|
pairlist: ListPairsWithTimeframes,
|
||||||
helping_pairs: ListPairsWithTimeframes = None) -> None:
|
helping_pairs: ListPairsWithTimeframes = None) -> None:
|
||||||
|
211
freqtrade/data/history/hdf5datahandler.py
Normal file
211
freqtrade/data/history/hdf5datahandler.py
Normal file
@ -0,0 +1,211 @@
|
|||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
from freqtrade import misc
|
||||||
|
from freqtrade.configuration import TimeRange
|
||||||
|
from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS,
|
||||||
|
DEFAULT_TRADES_COLUMNS,
|
||||||
|
ListPairsWithTimeframes)
|
||||||
|
|
||||||
|
from .idatahandler import IDataHandler, TradeList
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class HDF5DataHandler(IDataHandler):
|
||||||
|
|
||||||
|
_columns = DEFAULT_DATAFRAME_COLUMNS
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def ohlcv_get_available_data(cls, datadir: Path) -> ListPairsWithTimeframes:
|
||||||
|
"""
|
||||||
|
Returns a list of all pairs with ohlcv data available in this datadir
|
||||||
|
:param datadir: Directory to search for ohlcv files
|
||||||
|
:return: List of Tuples of (pair, timeframe)
|
||||||
|
"""
|
||||||
|
_tmp = [re.search(r'^([a-zA-Z_]+)\-(\d+\S+)(?=.h5)', p.name)
|
||||||
|
for p in datadir.glob("*.h5")]
|
||||||
|
return [(match[1].replace('_', '/'), match[2]) for match in _tmp
|
||||||
|
if match and len(match.groups()) > 1]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str) -> List[str]:
|
||||||
|
"""
|
||||||
|
Returns a list of all pairs with ohlcv data available in this datadir
|
||||||
|
for the specified timeframe
|
||||||
|
:param datadir: Directory to search for ohlcv files
|
||||||
|
:param timeframe: Timeframe to search pairs for
|
||||||
|
:return: List of Pairs
|
||||||
|
"""
|
||||||
|
|
||||||
|
_tmp = [re.search(r'^(\S+)(?=\-' + timeframe + '.h5)', p.name)
|
||||||
|
for p in datadir.glob(f"*{timeframe}.h5")]
|
||||||
|
# Check if regex found something and only return these results
|
||||||
|
return [match[0].replace('_', '/') for match in _tmp if match]
|
||||||
|
|
||||||
|
def ohlcv_store(self, pair: str, timeframe: str, data: pd.DataFrame) -> None:
|
||||||
|
"""
|
||||||
|
Store data in hdf5 file.
|
||||||
|
:param pair: Pair - used to generate filename
|
||||||
|
:timeframe: Timeframe - used to generate filename
|
||||||
|
:data: Dataframe containing OHLCV data
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
key = self._pair_ohlcv_key(pair, timeframe)
|
||||||
|
_data = data.copy()
|
||||||
|
|
||||||
|
filename = self._pair_data_filename(self._datadir, pair, timeframe)
|
||||||
|
|
||||||
|
ds = pd.HDFStore(filename, mode='a', complevel=9, complib='blosc')
|
||||||
|
ds.put(key, _data.loc[:, self._columns], format='table', data_columns=['date'])
|
||||||
|
|
||||||
|
ds.close()
|
||||||
|
|
||||||
|
def _ohlcv_load(self, pair: str, timeframe: str,
|
||||||
|
timerange: Optional[TimeRange] = None) -> pd.DataFrame:
|
||||||
|
"""
|
||||||
|
Internal method used to load data for one pair from disk.
|
||||||
|
Implements the loading and conversion to a Pandas dataframe.
|
||||||
|
Timerange trimming and dataframe validation happens outside of this method.
|
||||||
|
:param pair: Pair to load data
|
||||||
|
:param timeframe: Timeframe (e.g. "5m")
|
||||||
|
:param timerange: Limit data to be loaded to this timerange.
|
||||||
|
Optionally implemented by subclasses to avoid loading
|
||||||
|
all data where possible.
|
||||||
|
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||||
|
"""
|
||||||
|
key = self._pair_ohlcv_key(pair, timeframe)
|
||||||
|
filename = self._pair_data_filename(self._datadir, pair, timeframe)
|
||||||
|
|
||||||
|
if not filename.exists():
|
||||||
|
return pd.DataFrame(columns=self._columns)
|
||||||
|
where = []
|
||||||
|
if timerange:
|
||||||
|
if timerange.starttype == 'date':
|
||||||
|
where.append(f"date >= Timestamp({timerange.startts * 1e9})")
|
||||||
|
if timerange.stoptype == 'date':
|
||||||
|
where.append(f"date < Timestamp({timerange.stopts * 1e9})")
|
||||||
|
|
||||||
|
pairdata = pd.read_hdf(filename, key=key, mode="r", where=where)
|
||||||
|
|
||||||
|
if list(pairdata.columns) != self._columns:
|
||||||
|
raise ValueError("Wrong dataframe format")
|
||||||
|
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||||
|
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||||
|
return pairdata
|
||||||
|
|
||||||
|
def ohlcv_purge(self, pair: str, timeframe: str) -> bool:
|
||||||
|
"""
|
||||||
|
Remove data for this pair
|
||||||
|
:param pair: Delete data for this pair.
|
||||||
|
:param timeframe: Timeframe (e.g. "5m")
|
||||||
|
:return: True when deleted, false if file did not exist.
|
||||||
|
"""
|
||||||
|
filename = self._pair_data_filename(self._datadir, pair, timeframe)
|
||||||
|
if filename.exists():
|
||||||
|
filename.unlink()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def ohlcv_append(self, pair: str, timeframe: str, data: pd.DataFrame) -> None:
|
||||||
|
"""
|
||||||
|
Append data to existing data structures
|
||||||
|
:param pair: Pair
|
||||||
|
:param timeframe: Timeframe this ohlcv data is for
|
||||||
|
:param data: Data to append.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def trades_get_pairs(cls, datadir: Path) -> List[str]:
|
||||||
|
"""
|
||||||
|
Returns a list of all pairs for which trade data is available in this
|
||||||
|
:param datadir: Directory to search for ohlcv files
|
||||||
|
:return: List of Pairs
|
||||||
|
"""
|
||||||
|
_tmp = [re.search(r'^(\S+)(?=\-trades.h5)', p.name)
|
||||||
|
for p in datadir.glob("*trades.h5")]
|
||||||
|
# Check if regex found something and only return these results to avoid exceptions.
|
||||||
|
return [match[0].replace('_', '/') for match in _tmp if match]
|
||||||
|
|
||||||
|
def trades_store(self, pair: str, data: TradeList) -> None:
|
||||||
|
"""
|
||||||
|
Store trades data (list of Dicts) to file
|
||||||
|
:param pair: Pair - used for filename
|
||||||
|
:param data: List of Lists containing trade data,
|
||||||
|
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||||
|
"""
|
||||||
|
key = self._pair_trades_key(pair)
|
||||||
|
|
||||||
|
ds = pd.HDFStore(self._pair_trades_filename(self._datadir, pair),
|
||||||
|
mode='a', complevel=9, complib='blosc')
|
||||||
|
ds.put(key, pd.DataFrame(data, columns=DEFAULT_TRADES_COLUMNS),
|
||||||
|
format='table', data_columns=['timestamp'])
|
||||||
|
ds.close()
|
||||||
|
|
||||||
|
def trades_append(self, pair: str, data: TradeList):
|
||||||
|
"""
|
||||||
|
Append data to existing files
|
||||||
|
:param pair: Pair - used for filename
|
||||||
|
:param data: List of Lists containing trade data,
|
||||||
|
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
|
||||||
|
"""
|
||||||
|
Load a pair from h5 file.
|
||||||
|
:param pair: Load trades for this pair
|
||||||
|
:param timerange: Timerange to load trades for - currently not implemented
|
||||||
|
:return: List of trades
|
||||||
|
"""
|
||||||
|
key = self._pair_trades_key(pair)
|
||||||
|
filename = self._pair_trades_filename(self._datadir, pair)
|
||||||
|
|
||||||
|
if not filename.exists():
|
||||||
|
return []
|
||||||
|
where = []
|
||||||
|
if timerange:
|
||||||
|
if timerange.starttype == 'date':
|
||||||
|
where.append(f"timestamp >= {timerange.startts * 1e3}")
|
||||||
|
if timerange.stoptype == 'date':
|
||||||
|
where.append(f"timestamp < {timerange.stopts * 1e3}")
|
||||||
|
|
||||||
|
trades = pd.read_hdf(filename, key=key, mode="r", where=where)
|
||||||
|
return trades.values.tolist()
|
||||||
|
|
||||||
|
def trades_purge(self, pair: str) -> bool:
|
||||||
|
"""
|
||||||
|
Remove data for this pair
|
||||||
|
:param pair: Delete data for this pair.
|
||||||
|
:return: True when deleted, false if file did not exist.
|
||||||
|
"""
|
||||||
|
filename = self._pair_trades_filename(self._datadir, pair)
|
||||||
|
if filename.exists():
|
||||||
|
filename.unlink()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _pair_ohlcv_key(cls, pair: str, timeframe: str) -> str:
|
||||||
|
return f"{pair}/ohlcv/tf_{timeframe}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _pair_trades_key(cls, pair: str) -> str:
|
||||||
|
return f"{pair}/trades"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _pair_data_filename(cls, datadir: Path, pair: str, timeframe: str) -> Path:
|
||||||
|
pair_s = misc.pair_to_filename(pair)
|
||||||
|
filename = datadir.joinpath(f'{pair_s}-{timeframe}.h5')
|
||||||
|
return filename
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _pair_trades_filename(cls, datadir: Path, pair: str) -> Path:
|
||||||
|
pair_s = misc.pair_to_filename(pair)
|
||||||
|
filename = datadir.joinpath(f'{pair_s}-trades.h5')
|
||||||
|
return filename
|
@ -9,7 +9,8 @@ from pandas import DataFrame
|
|||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
|
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS
|
||||||
from freqtrade.data.converter import (ohlcv_to_dataframe,
|
from freqtrade.data.converter import (clean_ohlcv_dataframe,
|
||||||
|
ohlcv_to_dataframe,
|
||||||
trades_remove_duplicates,
|
trades_remove_duplicates,
|
||||||
trades_to_ohlcv)
|
trades_to_ohlcv)
|
||||||
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler
|
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler
|
||||||
@ -202,7 +203,10 @@ def _download_pair_history(datadir: Path,
|
|||||||
if data.empty:
|
if data.empty:
|
||||||
data = new_dataframe
|
data = new_dataframe
|
||||||
else:
|
else:
|
||||||
data = data.append(new_dataframe)
|
# Run cleaning again to ensure there were no duplicate candles
|
||||||
|
# Especially between existing and new data.
|
||||||
|
data = clean_ohlcv_dataframe(data.append(new_dataframe), timeframe, pair,
|
||||||
|
fill_missing=False, drop_incomplete=False)
|
||||||
|
|
||||||
logger.debug("New Start: %s",
|
logger.debug("New Start: %s",
|
||||||
f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
|
||||||
|
@ -50,9 +50,7 @@ class IDataHandler(ABC):
|
|||||||
@abstractmethod
|
@abstractmethod
|
||||||
def ohlcv_store(self, pair: str, timeframe: str, data: DataFrame) -> None:
|
def ohlcv_store(self, pair: str, timeframe: str, data: DataFrame) -> None:
|
||||||
"""
|
"""
|
||||||
Store data in json format "values".
|
Store ohlcv data.
|
||||||
format looks as follows:
|
|
||||||
[[<date>,<open>,<high>,<low>,<close>]]
|
|
||||||
:param pair: Pair - used to generate filename
|
:param pair: Pair - used to generate filename
|
||||||
:timeframe: Timeframe - used to generate filename
|
:timeframe: Timeframe - used to generate filename
|
||||||
:data: Dataframe containing OHLCV data
|
:data: Dataframe containing OHLCV data
|
||||||
@ -239,6 +237,9 @@ def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
|
|||||||
elif datatype == 'jsongz':
|
elif datatype == 'jsongz':
|
||||||
from .jsondatahandler import JsonGzDataHandler
|
from .jsondatahandler import JsonGzDataHandler
|
||||||
return JsonGzDataHandler
|
return JsonGzDataHandler
|
||||||
|
elif datatype == 'hdf5':
|
||||||
|
from .hdf5datahandler import HDF5DataHandler
|
||||||
|
return HDF5DataHandler
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"No datahandler for datatype {datatype} available.")
|
raise ValueError(f"No datahandler for datatype {datatype} available.")
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ import utils_find_1st as utf1st
|
|||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT
|
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT, DATETIME_PRINT_FORMAT
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.data.history import get_timerange, load_data, refresh_data
|
from freqtrade.data.history import get_timerange, load_data, refresh_data
|
||||||
from freqtrade.strategy.interface import SellType
|
from freqtrade.strategy.interface import SellType
|
||||||
@ -121,12 +121,9 @@ class Edge:
|
|||||||
|
|
||||||
# Print timeframe
|
# Print timeframe
|
||||||
min_date, max_date = get_timerange(preprocessed)
|
min_date, max_date = get_timerange(preprocessed)
|
||||||
logger.info(
|
logger.info(f'Measuring data from {min_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||||
'Measuring data from %s up to %s (%s days) ...',
|
f'up to {max_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||||
min_date.isoformat(),
|
f'({(max_date - min_date).days} days)..')
|
||||||
max_date.isoformat(),
|
|
||||||
(max_date - min_date).days
|
|
||||||
)
|
|
||||||
headers = ['date', 'buy', 'open', 'close', 'sell', 'high', 'low']
|
headers = ['date', 'buy', 'open', 'close', 'sell', 'high', 'low']
|
||||||
|
|
||||||
trades: list = []
|
trades: list = []
|
||||||
@ -240,7 +237,7 @@ class Edge:
|
|||||||
# All returned values are relative, they are defined as ratios.
|
# All returned values are relative, they are defined as ratios.
|
||||||
stake = 0.015
|
stake = 0.015
|
||||||
|
|
||||||
result['trade_duration'] = result['close_time'] - result['open_time']
|
result['trade_duration'] = result['close_date'] - result['open_date']
|
||||||
|
|
||||||
result['trade_duration'] = result['trade_duration'].map(
|
result['trade_duration'] = result['trade_duration'].map(
|
||||||
lambda x: int(x.total_seconds() / 60))
|
lambda x: int(x.total_seconds() / 60))
|
||||||
@ -430,10 +427,8 @@ class Edge:
|
|||||||
'stoploss': stoploss,
|
'stoploss': stoploss,
|
||||||
'profit_ratio': '',
|
'profit_ratio': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': date_column[open_trade_index],
|
'open_date': date_column[open_trade_index],
|
||||||
'close_time': date_column[exit_index],
|
'close_date': date_column[exit_index],
|
||||||
'open_index': start_point + open_trade_index,
|
|
||||||
'close_index': start_point + exit_index,
|
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
'open_rate': round(open_price, 15),
|
'open_rate': round(open_price, 15),
|
||||||
'close_rate': round(exit_price, 15),
|
'close_rate': round(exit_price, 15),
|
||||||
|
@ -29,7 +29,14 @@ class PricingError(DependencyException):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class InvalidOrderException(FreqtradeException):
|
class ExchangeError(DependencyException):
|
||||||
|
"""
|
||||||
|
Error raised out of the exchange.
|
||||||
|
Has multiple Errors to determine the appropriate error.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidOrderException(ExchangeError):
|
||||||
"""
|
"""
|
||||||
This is returned when the order is not valid. Example:
|
This is returned when the order is not valid. Example:
|
||||||
If stoploss on exchange order is hit, then trying to cancel the order
|
If stoploss on exchange order is hit, then trying to cancel the order
|
||||||
@ -44,13 +51,6 @@ class RetryableOrderError(InvalidOrderException):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class ExchangeError(DependencyException):
|
|
||||||
"""
|
|
||||||
Error raised out of the exchange.
|
|
||||||
Has multiple Errors to determine the appropriate error.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class TemporaryError(ExchangeError):
|
class TemporaryError(ExchangeError):
|
||||||
"""
|
"""
|
||||||
Temporary network or exchange related error.
|
Temporary network or exchange related error.
|
||||||
|
@ -12,8 +12,7 @@ from freqtrade.exchange.exchange import (timeframe_to_seconds,
|
|||||||
timeframe_to_msecs,
|
timeframe_to_msecs,
|
||||||
timeframe_to_next_date,
|
timeframe_to_next_date,
|
||||||
timeframe_to_prev_date)
|
timeframe_to_prev_date)
|
||||||
from freqtrade.exchange.exchange import (market_is_active,
|
from freqtrade.exchange.exchange import (market_is_active)
|
||||||
symbol_is_pair)
|
|
||||||
from freqtrade.exchange.kraken import Kraken
|
from freqtrade.exchange.kraken import Kraken
|
||||||
from freqtrade.exchange.binance import Binance
|
from freqtrade.exchange.binance import Binance
|
||||||
from freqtrade.exchange.bibox import Bibox
|
from freqtrade.exchange.bibox import Bibox
|
||||||
|
@ -16,6 +16,7 @@ BAD_EXCHANGES = {
|
|||||||
"Details in https://github.com/freqtrade/freqtrade/issues/1983",
|
"Details in https://github.com/freqtrade/freqtrade/issues/1983",
|
||||||
"hitbtc": "This API cannot be used with Freqtrade. "
|
"hitbtc": "This API cannot be used with Freqtrade. "
|
||||||
"Use `hitbtc2` exchange id to access this exchange.",
|
"Use `hitbtc2` exchange id to access this exchange.",
|
||||||
|
"phemex": "Does not provide history. ",
|
||||||
**dict.fromkeys([
|
**dict.fromkeys([
|
||||||
'adara',
|
'adara',
|
||||||
'anxpro',
|
'anxpro',
|
||||||
|
@ -24,7 +24,7 @@ from freqtrade.exceptions import (DDosProtection, ExchangeError,
|
|||||||
InvalidOrderException, OperationalException,
|
InvalidOrderException, OperationalException,
|
||||||
RetryableOrderError, TemporaryError)
|
RetryableOrderError, TemporaryError)
|
||||||
from freqtrade.exchange.common import BAD_EXCHANGES, retrier, retrier_async
|
from freqtrade.exchange.common import BAD_EXCHANGES, retrier, retrier_async
|
||||||
from freqtrade.misc import deep_merge_dicts, safe_value_fallback
|
from freqtrade.misc import deep_merge_dicts, safe_value_fallback2
|
||||||
|
|
||||||
CcxtModuleType = Any
|
CcxtModuleType = Any
|
||||||
|
|
||||||
@ -85,8 +85,8 @@ class Exchange:
|
|||||||
|
|
||||||
# Deep merge ft_has with default ft_has options
|
# Deep merge ft_has with default ft_has options
|
||||||
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
||||||
if exchange_config.get("_ft_has_params"):
|
if exchange_config.get('_ft_has_params'):
|
||||||
self._ft_has = deep_merge_dicts(exchange_config.get("_ft_has_params"),
|
self._ft_has = deep_merge_dicts(exchange_config.get('_ft_has_params'),
|
||||||
self._ft_has)
|
self._ft_has)
|
||||||
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
||||||
|
|
||||||
@ -222,7 +222,7 @@ class Exchange:
|
|||||||
if quote_currencies:
|
if quote_currencies:
|
||||||
markets = {k: v for k, v in markets.items() if v['quote'] in quote_currencies}
|
markets = {k: v for k, v in markets.items() if v['quote'] in quote_currencies}
|
||||||
if pairs_only:
|
if pairs_only:
|
||||||
markets = {k: v for k, v in markets.items() if symbol_is_pair(v['symbol'])}
|
markets = {k: v for k, v in markets.items() if self.market_is_tradable(v)}
|
||||||
if active_only:
|
if active_only:
|
||||||
markets = {k: v for k, v in markets.items() if market_is_active(v)}
|
markets = {k: v for k, v in markets.items() if market_is_active(v)}
|
||||||
return markets
|
return markets
|
||||||
@ -246,6 +246,19 @@ class Exchange:
|
|||||||
"""
|
"""
|
||||||
return self.markets.get(pair, {}).get('base', '')
|
return self.markets.get(pair, {}).get('base', '')
|
||||||
|
|
||||||
|
def market_is_tradable(self, market: Dict[str, Any]) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the market symbol is tradable by Freqtrade.
|
||||||
|
By default, checks if it's splittable by `/` and both sides correspond to base / quote
|
||||||
|
"""
|
||||||
|
symbol_parts = market['symbol'].split('/')
|
||||||
|
return (len(symbol_parts) == 2 and
|
||||||
|
len(symbol_parts[0]) > 0 and
|
||||||
|
len(symbol_parts[1]) > 0 and
|
||||||
|
symbol_parts[0] == market.get('base') and
|
||||||
|
symbol_parts[1] == market.get('quote')
|
||||||
|
)
|
||||||
|
|
||||||
def klines(self, pair_interval: Tuple[str, str], copy: bool = True) -> DataFrame:
|
def klines(self, pair_interval: Tuple[str, str], copy: bool = True) -> DataFrame:
|
||||||
if pair_interval in self._klines:
|
if pair_interval in self._klines:
|
||||||
return self._klines[pair_interval].copy() if copy else self._klines[pair_interval]
|
return self._klines[pair_interval].copy() if copy else self._klines[pair_interval]
|
||||||
@ -480,6 +493,7 @@ class Exchange:
|
|||||||
"id": order_id,
|
"id": order_id,
|
||||||
'pair': pair,
|
'pair': pair,
|
||||||
'price': rate,
|
'price': rate,
|
||||||
|
'average': rate,
|
||||||
'amount': _amount,
|
'amount': _amount,
|
||||||
'cost': _amount * rate,
|
'cost': _amount * rate,
|
||||||
'type': ordertype,
|
'type': ordertype,
|
||||||
@ -959,7 +973,12 @@ class Exchange:
|
|||||||
@retrier
|
@retrier
|
||||||
def cancel_order(self, order_id: str, pair: str) -> Dict:
|
def cancel_order(self, order_id: str, pair: str) -> Dict:
|
||||||
if self._config['dry_run']:
|
if self._config['dry_run']:
|
||||||
return {}
|
order = self._dry_run_open_orders.get(order_id)
|
||||||
|
if order:
|
||||||
|
order.update({'status': 'canceled', 'filled': 0.0, 'remaining': order['amount']})
|
||||||
|
return order
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self._api.cancel_order(order_id, pair)
|
return self._api.cancel_order(order_id, pair)
|
||||||
@ -974,7 +993,7 @@ class Exchange:
|
|||||||
except ccxt.BaseError as e:
|
except ccxt.BaseError as e:
|
||||||
raise OperationalException(e) from e
|
raise OperationalException(e) from e
|
||||||
|
|
||||||
# Assign method to fetch_stoploss_order to allow easy overriding in other classes
|
# Assign method to cancel_stoploss_order to allow easy overriding in other classes
|
||||||
cancel_stoploss_order = cancel_order
|
cancel_stoploss_order = cancel_order
|
||||||
|
|
||||||
def is_cancel_order_result_suitable(self, corder) -> bool:
|
def is_cancel_order_result_suitable(self, corder) -> bool:
|
||||||
@ -1040,10 +1059,10 @@ class Exchange:
|
|||||||
@retrier
|
@retrier
|
||||||
def fetch_l2_order_book(self, pair: str, limit: int = 100) -> dict:
|
def fetch_l2_order_book(self, pair: str, limit: int = 100) -> dict:
|
||||||
"""
|
"""
|
||||||
get order book level 2 from exchange
|
Get L2 order book from exchange.
|
||||||
|
Can be limited to a certain amount (if supported).
|
||||||
Notes:
|
Returns a dict in the format
|
||||||
20180619: bittrex doesnt support limits -.-
|
{'asks': [price, volume], 'bids': [price, volume]}
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
|
||||||
@ -1144,7 +1163,7 @@ class Exchange:
|
|||||||
if fee_curr in self.get_pair_base_currency(order['symbol']):
|
if fee_curr in self.get_pair_base_currency(order['symbol']):
|
||||||
# Base currency - divide by amount
|
# Base currency - divide by amount
|
||||||
return round(
|
return round(
|
||||||
order['fee']['cost'] / safe_value_fallback(order, order, 'filled', 'amount'), 8)
|
order['fee']['cost'] / safe_value_fallback2(order, order, 'filled', 'amount'), 8)
|
||||||
elif fee_curr in self.get_pair_quote_currency(order['symbol']):
|
elif fee_curr in self.get_pair_quote_currency(order['symbol']):
|
||||||
# Quote currency - divide by cost
|
# Quote currency - divide by cost
|
||||||
return round(order['fee']['cost'] / order['cost'], 8) if order['cost'] else None
|
return round(order['fee']['cost'] / order['cost'], 8) if order['cost'] else None
|
||||||
@ -1157,7 +1176,7 @@ class Exchange:
|
|||||||
comb = self.get_valid_pair_combination(fee_curr, self._config['stake_currency'])
|
comb = self.get_valid_pair_combination(fee_curr, self._config['stake_currency'])
|
||||||
tick = self.fetch_ticker(comb)
|
tick = self.fetch_ticker(comb)
|
||||||
|
|
||||||
fee_to_quote_rate = safe_value_fallback(tick, tick, 'last', 'ask')
|
fee_to_quote_rate = safe_value_fallback2(tick, tick, 'last', 'ask')
|
||||||
return round((order['fee']['cost'] * fee_to_quote_rate) / order['cost'], 8)
|
return round((order['fee']['cost'] * fee_to_quote_rate) / order['cost'], 8)
|
||||||
except ExchangeError:
|
except ExchangeError:
|
||||||
return None
|
return None
|
||||||
@ -1172,7 +1191,6 @@ class Exchange:
|
|||||||
return (order['fee']['cost'],
|
return (order['fee']['cost'],
|
||||||
order['fee']['currency'],
|
order['fee']['currency'],
|
||||||
self.calculate_fee_rate(order))
|
self.calculate_fee_rate(order))
|
||||||
# calculate rate ? (order['fee']['cost'] / (order['amount'] * order['price']))
|
|
||||||
|
|
||||||
|
|
||||||
def is_exchange_bad(exchange_name: str) -> bool:
|
def is_exchange_bad(exchange_name: str) -> bool:
|
||||||
@ -1258,20 +1276,6 @@ def timeframe_to_next_date(timeframe: str, date: datetime = None) -> datetime:
|
|||||||
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
def symbol_is_pair(market_symbol: str, base_currency: str = None,
|
|
||||||
quote_currency: str = None) -> bool:
|
|
||||||
"""
|
|
||||||
Check if the market symbol is a pair, i.e. that its symbol consists of the base currency and the
|
|
||||||
quote currency separated by '/' character. If base_currency and/or quote_currency is passed,
|
|
||||||
it also checks that the symbol contains appropriate base and/or quote currency part before
|
|
||||||
and after the separating character correspondingly.
|
|
||||||
"""
|
|
||||||
symbol_parts = market_symbol.split('/')
|
|
||||||
return (len(symbol_parts) == 2 and
|
|
||||||
(symbol_parts[0] == base_currency if base_currency else len(symbol_parts[0]) > 0) and
|
|
||||||
(symbol_parts[1] == quote_currency if quote_currency else len(symbol_parts[1]) > 0))
|
|
||||||
|
|
||||||
|
|
||||||
def market_is_active(market: Dict) -> bool:
|
def market_is_active(market: Dict) -> bool:
|
||||||
"""
|
"""
|
||||||
Return True if the market is active.
|
Return True if the market is active.
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
""" FTX exchange subclass """
|
""" FTX exchange subclass """
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
import ccxt
|
import ccxt
|
||||||
|
|
||||||
@ -20,6 +20,16 @@ class Ftx(Exchange):
|
|||||||
"ohlcv_candle_limit": 1500,
|
"ohlcv_candle_limit": 1500,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def market_is_tradable(self, market: Dict[str, Any]) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the market symbol is tradable by Freqtrade.
|
||||||
|
Default checks + check if pair is spot pair (no futures trading yet).
|
||||||
|
"""
|
||||||
|
parent_check = super().market_is_tradable(market)
|
||||||
|
|
||||||
|
return (parent_check and
|
||||||
|
market.get('spot', False) is True)
|
||||||
|
|
||||||
def stoploss_adjust(self, stop_loss: float, order: Dict) -> bool:
|
def stoploss_adjust(self, stop_loss: float, order: Dict) -> bool:
|
||||||
"""
|
"""
|
||||||
Verify stop_loss against stoploss-order value (limit or price)
|
Verify stop_loss against stoploss-order value (limit or price)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
""" Kraken exchange subclass """
|
""" Kraken exchange subclass """
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
import ccxt
|
import ccxt
|
||||||
|
|
||||||
@ -22,6 +22,16 @@ class Kraken(Exchange):
|
|||||||
"trades_pagination_arg": "since",
|
"trades_pagination_arg": "since",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def market_is_tradable(self, market: Dict[str, Any]) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the market symbol is tradable by Freqtrade.
|
||||||
|
Default checks + check if pair is darkpool pair.
|
||||||
|
"""
|
||||||
|
parent_check = super().market_is_tradable(market)
|
||||||
|
|
||||||
|
return (parent_check and
|
||||||
|
market.get('darkpool', False) is False)
|
||||||
|
|
||||||
@retrier
|
@retrier
|
||||||
def get_balances(self) -> dict:
|
def get_balances(self) -> dict:
|
||||||
if self._config['dry_run']:
|
if self._config['dry_run']:
|
||||||
|
@ -20,7 +20,7 @@ from freqtrade.edge import Edge
|
|||||||
from freqtrade.exceptions import (DependencyException, ExchangeError,
|
from freqtrade.exceptions import (DependencyException, ExchangeError,
|
||||||
InvalidOrderException, PricingError)
|
InvalidOrderException, PricingError)
|
||||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date
|
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date
|
||||||
from freqtrade.misc import safe_value_fallback
|
from freqtrade.misc import safe_value_fallback, safe_value_fallback2
|
||||||
from freqtrade.pairlist.pairlistmanager import PairListManager
|
from freqtrade.pairlist.pairlistmanager import PairListManager
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||||
@ -275,7 +275,7 @@ class FreqtradeBot:
|
|||||||
rate = self._buy_rate_cache.get(pair)
|
rate = self._buy_rate_cache.get(pair)
|
||||||
# Check if cache has been invalidated
|
# Check if cache has been invalidated
|
||||||
if rate:
|
if rate:
|
||||||
logger.info(f"Using cached buy rate for {pair}.")
|
logger.debug(f"Using cached buy rate for {pair}.")
|
||||||
return rate
|
return rate
|
||||||
|
|
||||||
bid_strategy = self.config.get('bid_strategy', {})
|
bid_strategy = self.config.get('bid_strategy', {})
|
||||||
@ -433,7 +433,9 @@ class FreqtradeBot:
|
|||||||
"""
|
"""
|
||||||
logger.debug(f"create_trade for pair {pair}")
|
logger.debug(f"create_trade for pair {pair}")
|
||||||
|
|
||||||
if self.strategy.is_pair_locked(pair):
|
analyzed_df, _ = self.dataprovider.get_analyzed_dataframe(pair, self.strategy.timeframe)
|
||||||
|
if self.strategy.is_pair_locked(
|
||||||
|
pair, analyzed_df.iloc[-1]['date'] if len(analyzed_df) > 0 else None):
|
||||||
logger.info(f"Pair {pair} is currently locked.")
|
logger.info(f"Pair {pair} is currently locked.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -444,7 +446,6 @@ class FreqtradeBot:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# running get_signal on historical data fetched
|
# running get_signal on historical data fetched
|
||||||
analyzed_df, _ = self.dataprovider.get_analyzed_dataframe(pair, self.strategy.timeframe)
|
|
||||||
(buy, sell) = self.strategy.get_signal(pair, self.strategy.timeframe, analyzed_df)
|
(buy, sell) = self.strategy.get_signal(pair, self.strategy.timeframe, analyzed_df)
|
||||||
|
|
||||||
if buy and not sell:
|
if buy and not sell:
|
||||||
@ -523,7 +524,7 @@ class FreqtradeBot:
|
|||||||
time_in_force=time_in_force):
|
time_in_force=time_in_force):
|
||||||
logger.info(f"User requested abortion of buying {pair}")
|
logger.info(f"User requested abortion of buying {pair}")
|
||||||
return False
|
return False
|
||||||
|
amount = self.exchange.amount_to_precision(pair, amount)
|
||||||
order = self.exchange.buy(pair=pair, ordertype=order_type,
|
order = self.exchange.buy(pair=pair, ordertype=order_type,
|
||||||
amount=amount, rate=buy_limit_requested,
|
amount=amount, rate=buy_limit_requested,
|
||||||
time_in_force=time_in_force)
|
time_in_force=time_in_force)
|
||||||
@ -532,6 +533,7 @@ class FreqtradeBot:
|
|||||||
|
|
||||||
# we assume the order is executed at the price requested
|
# we assume the order is executed at the price requested
|
||||||
buy_limit_filled_price = buy_limit_requested
|
buy_limit_filled_price = buy_limit_requested
|
||||||
|
amount_requested = amount
|
||||||
|
|
||||||
if order_status == 'expired' or order_status == 'rejected':
|
if order_status == 'expired' or order_status == 'rejected':
|
||||||
order_tif = self.strategy.order_time_in_force['buy']
|
order_tif = self.strategy.order_time_in_force['buy']
|
||||||
@ -552,15 +554,15 @@ class FreqtradeBot:
|
|||||||
order['filled'], order['amount'], order['remaining']
|
order['filled'], order['amount'], order['remaining']
|
||||||
)
|
)
|
||||||
stake_amount = order['cost']
|
stake_amount = order['cost']
|
||||||
amount = order['amount']
|
amount = safe_value_fallback(order, 'filled', 'amount')
|
||||||
buy_limit_filled_price = order['price']
|
buy_limit_filled_price = safe_value_fallback(order, 'average', 'price')
|
||||||
order_id = None
|
order_id = None
|
||||||
|
|
||||||
# in case of FOK the order may be filled immediately and fully
|
# in case of FOK the order may be filled immediately and fully
|
||||||
elif order_status == 'closed':
|
elif order_status == 'closed':
|
||||||
stake_amount = order['cost']
|
stake_amount = order['cost']
|
||||||
amount = order['amount']
|
amount = safe_value_fallback(order, 'filled', 'amount')
|
||||||
buy_limit_filled_price = order['price']
|
buy_limit_filled_price = safe_value_fallback(order, 'average', 'price')
|
||||||
|
|
||||||
# Fee is applied twice because we make a LIMIT_BUY and LIMIT_SELL
|
# Fee is applied twice because we make a LIMIT_BUY and LIMIT_SELL
|
||||||
fee = self.exchange.get_fee(symbol=pair, taker_or_maker='maker')
|
fee = self.exchange.get_fee(symbol=pair, taker_or_maker='maker')
|
||||||
@ -568,6 +570,7 @@ class FreqtradeBot:
|
|||||||
pair=pair,
|
pair=pair,
|
||||||
stake_amount=stake_amount,
|
stake_amount=stake_amount,
|
||||||
amount=amount,
|
amount=amount,
|
||||||
|
amount_requested=amount_requested,
|
||||||
fee_open=fee,
|
fee_open=fee,
|
||||||
fee_close=fee,
|
fee_close=fee,
|
||||||
open_rate=buy_limit_filled_price,
|
open_rate=buy_limit_filled_price,
|
||||||
@ -615,7 +618,7 @@ class FreqtradeBot:
|
|||||||
# Send the message
|
# Send the message
|
||||||
self.rpc.send_msg(msg)
|
self.rpc.send_msg(msg)
|
||||||
|
|
||||||
def _notify_buy_cancel(self, trade: Trade, order_type: str) -> None:
|
def _notify_buy_cancel(self, trade: Trade, order_type: str, reason: str) -> None:
|
||||||
"""
|
"""
|
||||||
Sends rpc notification when a buy cancel occured.
|
Sends rpc notification when a buy cancel occured.
|
||||||
"""
|
"""
|
||||||
@ -634,6 +637,7 @@ class FreqtradeBot:
|
|||||||
'amount': trade.amount,
|
'amount': trade.amount,
|
||||||
'open_date': trade.open_date,
|
'open_date': trade.open_date,
|
||||||
'current_rate': current_rate,
|
'current_rate': current_rate,
|
||||||
|
'reason': reason,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Send the message
|
# Send the message
|
||||||
@ -691,7 +695,7 @@ class FreqtradeBot:
|
|||||||
rate = self._sell_rate_cache.get(pair)
|
rate = self._sell_rate_cache.get(pair)
|
||||||
# Check if cache has been invalidated
|
# Check if cache has been invalidated
|
||||||
if rate:
|
if rate:
|
||||||
logger.info(f"Using cached sell rate for {pair}.")
|
logger.debug(f"Using cached sell rate for {pair}.")
|
||||||
return rate
|
return rate
|
||||||
|
|
||||||
ask_strategy = self.config.get('ask_strategy', {})
|
ask_strategy = self.config.get('ask_strategy', {})
|
||||||
@ -768,7 +772,7 @@ class FreqtradeBot:
|
|||||||
logger.debug('Found no sell signal for %s.', trade)
|
logger.debug('Found no sell signal for %s.', trade)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def create_stoploss_order(self, trade: Trade, stop_price: float, rate: float) -> bool:
|
def create_stoploss_order(self, trade: Trade, stop_price: float) -> bool:
|
||||||
"""
|
"""
|
||||||
Abstracts creating stoploss orders from the logic.
|
Abstracts creating stoploss orders from the logic.
|
||||||
Handles errors and updates the trade database object.
|
Handles errors and updates the trade database object.
|
||||||
@ -831,14 +835,13 @@ class FreqtradeBot:
|
|||||||
stoploss = self.edge.stoploss(pair=trade.pair) if self.edge else self.strategy.stoploss
|
stoploss = self.edge.stoploss(pair=trade.pair) if self.edge else self.strategy.stoploss
|
||||||
stop_price = trade.open_rate * (1 + stoploss)
|
stop_price = trade.open_rate * (1 + stoploss)
|
||||||
|
|
||||||
if self.create_stoploss_order(trade=trade, stop_price=stop_price, rate=stop_price):
|
if self.create_stoploss_order(trade=trade, stop_price=stop_price):
|
||||||
trade.stoploss_last_update = datetime.now()
|
trade.stoploss_last_update = datetime.utcnow()
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# If stoploss order is canceled for some reason we add it
|
# If stoploss order is canceled for some reason we add it
|
||||||
if stoploss_order and stoploss_order['status'] in ('canceled', 'cancelled'):
|
if stoploss_order and stoploss_order['status'] in ('canceled', 'cancelled'):
|
||||||
if self.create_stoploss_order(trade=trade, stop_price=trade.stop_loss,
|
if self.create_stoploss_order(trade=trade, stop_price=trade.stop_loss):
|
||||||
rate=trade.stop_loss):
|
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
trade.stoploss_order_id = None
|
trade.stoploss_order_id = None
|
||||||
@ -875,8 +878,7 @@ class FreqtradeBot:
|
|||||||
f"for pair {trade.pair}")
|
f"for pair {trade.pair}")
|
||||||
|
|
||||||
# Create new stoploss order
|
# Create new stoploss order
|
||||||
if not self.create_stoploss_order(trade=trade, stop_price=trade.stop_loss,
|
if not self.create_stoploss_order(trade=trade, stop_price=trade.stop_loss):
|
||||||
rate=trade.stop_loss):
|
|
||||||
logger.warning(f"Could not create trailing stoploss order "
|
logger.warning(f"Could not create trailing stoploss order "
|
||||||
f"for pair {trade.pair}.")
|
f"for pair {trade.pair}.")
|
||||||
|
|
||||||
@ -921,7 +923,7 @@ class FreqtradeBot:
|
|||||||
if not trade.open_order_id:
|
if not trade.open_order_id:
|
||||||
continue
|
continue
|
||||||
order = self.exchange.fetch_order(trade.open_order_id, trade.pair)
|
order = self.exchange.fetch_order(trade.open_order_id, trade.pair)
|
||||||
except (ExchangeError, InvalidOrderException):
|
except (ExchangeError):
|
||||||
logger.info('Cannot query order for %s due to %s', trade, traceback.format_exc())
|
logger.info('Cannot query order for %s due to %s', trade, traceback.format_exc())
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -954,7 +956,7 @@ class FreqtradeBot:
|
|||||||
for trade in Trade.get_open_order_trades():
|
for trade in Trade.get_open_order_trades():
|
||||||
try:
|
try:
|
||||||
order = self.exchange.fetch_order(trade.open_order_id, trade.pair)
|
order = self.exchange.fetch_order(trade.open_order_id, trade.pair)
|
||||||
except (DependencyException, InvalidOrderException):
|
except (ExchangeError):
|
||||||
logger.info('Cannot query order for %s due to %s', trade, traceback.format_exc())
|
logger.info('Cannot query order for %s due to %s', trade, traceback.format_exc())
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -973,7 +975,6 @@ class FreqtradeBot:
|
|||||||
|
|
||||||
# Cancelled orders may have the status of 'canceled' or 'closed'
|
# Cancelled orders may have the status of 'canceled' or 'closed'
|
||||||
if order['status'] not in ('canceled', 'closed'):
|
if order['status'] not in ('canceled', 'closed'):
|
||||||
reason = constants.CANCEL_REASON['TIMEOUT']
|
|
||||||
corder = self.exchange.cancel_order_with_result(trade.open_order_id, trade.pair,
|
corder = self.exchange.cancel_order_with_result(trade.open_order_id, trade.pair,
|
||||||
trade.amount)
|
trade.amount)
|
||||||
# Avoid race condition where the order could not be cancelled coz its already filled.
|
# Avoid race condition where the order could not be cancelled coz its already filled.
|
||||||
@ -990,14 +991,14 @@ class FreqtradeBot:
|
|||||||
logger.info('Buy order %s for %s.', reason, trade)
|
logger.info('Buy order %s for %s.', reason, trade)
|
||||||
|
|
||||||
# Using filled to determine the filled amount
|
# Using filled to determine the filled amount
|
||||||
filled_amount = safe_value_fallback(corder, order, 'filled', 'filled')
|
filled_amount = safe_value_fallback2(corder, order, 'filled', 'filled')
|
||||||
|
|
||||||
if isclose(filled_amount, 0.0, abs_tol=constants.MATH_CLOSE_PREC):
|
if isclose(filled_amount, 0.0, abs_tol=constants.MATH_CLOSE_PREC):
|
||||||
logger.info('Buy order fully cancelled. Removing %s from database.', trade)
|
logger.info('Buy order fully cancelled. Removing %s from database.', trade)
|
||||||
# if trade is not partially completed, just delete the trade
|
# if trade is not partially completed, just delete the trade
|
||||||
Trade.session.delete(trade)
|
Trade.session.delete(trade)
|
||||||
Trade.session.flush()
|
Trade.session.flush()
|
||||||
was_trade_fully_canceled = True
|
was_trade_fully_canceled = True
|
||||||
|
reason += f", {constants.CANCEL_REASON['FULLY_CANCELLED']}"
|
||||||
else:
|
else:
|
||||||
# if trade is partially complete, edit the stake details for the trade
|
# if trade is partially complete, edit the stake details for the trade
|
||||||
# and close the order
|
# and close the order
|
||||||
@ -1010,13 +1011,11 @@ class FreqtradeBot:
|
|||||||
|
|
||||||
trade.open_order_id = None
|
trade.open_order_id = None
|
||||||
logger.info('Partial buy order timeout for %s.', trade)
|
logger.info('Partial buy order timeout for %s.', trade)
|
||||||
self.rpc.send_msg({
|
reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}"
|
||||||
'type': RPCMessageType.STATUS_NOTIFICATION,
|
|
||||||
'status': f'Remaining buy order for {trade.pair} cancelled due to timeout'
|
|
||||||
})
|
|
||||||
|
|
||||||
self.wallets.update()
|
self.wallets.update()
|
||||||
self._notify_buy_cancel(trade, order_type=self.strategy.order_types['buy'])
|
self._notify_buy_cancel(trade, order_type=self.strategy.order_types['buy'],
|
||||||
|
reason=reason)
|
||||||
return was_trade_fully_canceled
|
return was_trade_fully_canceled
|
||||||
|
|
||||||
def handle_cancel_sell(self, trade: Trade, order: Dict, reason: str) -> str:
|
def handle_cancel_sell(self, trade: Trade, order: Dict, reason: str) -> str:
|
||||||
@ -1047,7 +1046,7 @@ class FreqtradeBot:
|
|||||||
trade.open_order_id = None
|
trade.open_order_id = None
|
||||||
else:
|
else:
|
||||||
# TODO: figure out how to handle partially complete sell orders
|
# TODO: figure out how to handle partially complete sell orders
|
||||||
reason = constants.CANCEL_REASON['PARTIALLY_FILLED']
|
reason = constants.CANCEL_REASON['PARTIALLY_FILLED_KEEP_OPEN']
|
||||||
|
|
||||||
self.wallets.update()
|
self.wallets.update()
|
||||||
self._notify_sell_cancel(
|
self._notify_sell_cancel(
|
||||||
@ -1255,7 +1254,8 @@ class FreqtradeBot:
|
|||||||
# Try update amount (binance-fix)
|
# Try update amount (binance-fix)
|
||||||
try:
|
try:
|
||||||
new_amount = self.get_real_amount(trade, order, order_amount)
|
new_amount = self.get_real_amount(trade, order, order_amount)
|
||||||
if not isclose(order['amount'], new_amount, abs_tol=constants.MATH_CLOSE_PREC):
|
if not isclose(safe_value_fallback(order, 'filled', 'amount'), new_amount,
|
||||||
|
abs_tol=constants.MATH_CLOSE_PREC):
|
||||||
order['amount'] = new_amount
|
order['amount'] = new_amount
|
||||||
order.pop('filled', None)
|
order.pop('filled', None)
|
||||||
trade.recalc_open_trade_price()
|
trade.recalc_open_trade_price()
|
||||||
@ -1301,7 +1301,7 @@ class FreqtradeBot:
|
|||||||
"""
|
"""
|
||||||
# Init variables
|
# Init variables
|
||||||
if order_amount is None:
|
if order_amount is None:
|
||||||
order_amount = order['amount']
|
order_amount = safe_value_fallback(order, 'filled', 'amount')
|
||||||
# Only run for closed orders
|
# Only run for closed orders
|
||||||
if trade.fee_updated(order.get('side', '')) or order['status'] == 'open':
|
if trade.fee_updated(order.get('side', '')) or order['status'] == 'open':
|
||||||
return order_amount
|
return order_amount
|
||||||
|
@ -1,14 +1,18 @@
|
|||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from logging import Formatter
|
from logging import Formatter
|
||||||
from logging.handlers import RotatingFileHandler, SysLogHandler
|
from logging.handlers import (BufferingHandler, RotatingFileHandler,
|
||||||
from typing import Any, Dict, List
|
SysLogHandler)
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
LOGFORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||||
|
|
||||||
|
# Initialize bufferhandler - will be used for /log endpoints
|
||||||
|
bufferHandler = BufferingHandler(1000)
|
||||||
|
bufferHandler.setFormatter(Formatter(LOGFORMAT))
|
||||||
|
|
||||||
|
|
||||||
def _set_loggers(verbosity: int = 0, api_verbosity: str = 'info') -> None:
|
def _set_loggers(verbosity: int = 0, api_verbosity: str = 'info') -> None:
|
||||||
@ -33,17 +37,31 @@ def _set_loggers(verbosity: int = 0, api_verbosity: str = 'info') -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging_pre() -> None:
|
||||||
|
"""
|
||||||
|
Early setup for logging.
|
||||||
|
Uses INFO loglevel and only the Streamhandler.
|
||||||
|
Early messages (before proper logging setup) will therefore only be sent to additional
|
||||||
|
logging handlers after the real initialization, because we don't know which
|
||||||
|
ones the user desires beforehand.
|
||||||
|
"""
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format=LOGFORMAT,
|
||||||
|
handlers=[logging.StreamHandler(sys.stderr), bufferHandler]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def setup_logging(config: Dict[str, Any]) -> None:
|
def setup_logging(config: Dict[str, Any]) -> None:
|
||||||
"""
|
"""
|
||||||
Process -v/--verbose, --logfile options
|
Process -v/--verbose, --logfile options
|
||||||
"""
|
"""
|
||||||
# Log level
|
# Log level
|
||||||
verbosity = config['verbosity']
|
verbosity = config['verbosity']
|
||||||
|
logging.root.addHandler(bufferHandler)
|
||||||
# Log to stderr
|
|
||||||
log_handlers: List[logging.Handler] = [logging.StreamHandler(sys.stderr)]
|
|
||||||
|
|
||||||
logfile = config.get('logfile')
|
logfile = config.get('logfile')
|
||||||
|
|
||||||
if logfile:
|
if logfile:
|
||||||
s = logfile.split(':')
|
s = logfile.split(':')
|
||||||
if s[0] == 'syslog':
|
if s[0] == 'syslog':
|
||||||
@ -58,28 +76,27 @@ def setup_logging(config: Dict[str, Any]) -> None:
|
|||||||
# to perform reduction of repeating messages if this is set in the
|
# to perform reduction of repeating messages if this is set in the
|
||||||
# syslog config. The messages should be equal for this.
|
# syslog config. The messages should be equal for this.
|
||||||
handler.setFormatter(Formatter('%(name)s - %(levelname)s - %(message)s'))
|
handler.setFormatter(Formatter('%(name)s - %(levelname)s - %(message)s'))
|
||||||
log_handlers.append(handler)
|
logging.root.addHandler(handler)
|
||||||
elif s[0] == 'journald':
|
elif s[0] == 'journald':
|
||||||
try:
|
try:
|
||||||
from systemd.journal import JournaldLogHandler
|
from systemd.journal import JournaldLogHandler
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise OperationalException("You need the systemd python package be installed in "
|
raise OperationalException("You need the systemd python package be installed in "
|
||||||
"order to use logging to journald.")
|
"order to use logging to journald.")
|
||||||
handler = JournaldLogHandler()
|
handler_jd = JournaldLogHandler()
|
||||||
# No datetime field for logging into journald, to allow syslog
|
# No datetime field for logging into journald, to allow syslog
|
||||||
# to perform reduction of repeating messages if this is set in the
|
# to perform reduction of repeating messages if this is set in the
|
||||||
# syslog config. The messages should be equal for this.
|
# syslog config. The messages should be equal for this.
|
||||||
handler.setFormatter(Formatter('%(name)s - %(levelname)s - %(message)s'))
|
handler_jd.setFormatter(Formatter('%(name)s - %(levelname)s - %(message)s'))
|
||||||
log_handlers.append(handler)
|
logging.root.addHandler(handler_jd)
|
||||||
else:
|
else:
|
||||||
log_handlers.append(RotatingFileHandler(logfile,
|
handler_rf = RotatingFileHandler(logfile,
|
||||||
maxBytes=1024 * 1024, # 1Mb
|
maxBytes=1024 * 1024 * 10, # 10Mb
|
||||||
backupCount=10))
|
backupCount=10)
|
||||||
|
handler_rf.setFormatter(Formatter(LOGFORMAT))
|
||||||
|
logging.root.addHandler(handler_rf)
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.root.setLevel(logging.INFO if verbosity < 1 else logging.DEBUG)
|
||||||
level=logging.INFO if verbosity < 1 else logging.DEBUG,
|
|
||||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
||||||
handlers=log_handlers
|
|
||||||
)
|
|
||||||
_set_loggers(verbosity, config.get('api_server', {}).get('verbosity', 'info'))
|
_set_loggers(verbosity, config.get('api_server', {}).get('verbosity', 'info'))
|
||||||
|
|
||||||
logger.info('Verbosity set to %s', verbosity)
|
logger.info('Verbosity set to %s', verbosity)
|
||||||
|
@ -3,18 +3,17 @@
|
|||||||
Main Freqtrade bot script.
|
Main Freqtrade bot script.
|
||||||
Read the documentation to know what cli arguments you need.
|
Read the documentation to know what cli arguments you need.
|
||||||
"""
|
"""
|
||||||
|
import logging
|
||||||
from freqtrade.exceptions import FreqtradeException, OperationalException
|
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Any, List
|
||||||
|
|
||||||
# check min. python version
|
# check min. python version
|
||||||
if sys.version_info < (3, 6):
|
if sys.version_info < (3, 6):
|
||||||
sys.exit("Freqtrade requires Python version >= 3.6")
|
sys.exit("Freqtrade requires Python version >= 3.6")
|
||||||
|
|
||||||
# flake8: noqa E402
|
|
||||||
import logging
|
|
||||||
from typing import Any, List
|
|
||||||
|
|
||||||
from freqtrade.commands import Arguments
|
from freqtrade.commands import Arguments
|
||||||
|
from freqtrade.exceptions import FreqtradeException, OperationalException
|
||||||
|
from freqtrade.loggers import setup_logging_pre
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('freqtrade')
|
logger = logging.getLogger('freqtrade')
|
||||||
@ -28,6 +27,7 @@ def main(sysargv: List[str] = None) -> None:
|
|||||||
|
|
||||||
return_code: Any = 1
|
return_code: Any = 1
|
||||||
try:
|
try:
|
||||||
|
setup_logging_pre()
|
||||||
arguments = Arguments(sysargv)
|
arguments = Arguments(sysargv)
|
||||||
args = arguments.get_parsed_arg()
|
args = arguments.get_parsed_arg()
|
||||||
|
|
||||||
|
@ -134,7 +134,21 @@ def round_dict(d, n):
|
|||||||
return {k: (round(v, n) if isinstance(v, float) else v) for k, v in d.items()}
|
return {k: (round(v, n) if isinstance(v, float) else v) for k, v in d.items()}
|
||||||
|
|
||||||
|
|
||||||
def safe_value_fallback(dict1: dict, dict2: dict, key1: str, key2: str, default_value=None):
|
def safe_value_fallback(obj: dict, key1: str, key2: str, default_value=None):
|
||||||
|
"""
|
||||||
|
Search a value in obj, return this if it's not None.
|
||||||
|
Then search key2 in obj - return that if it's not none - then use default_value.
|
||||||
|
Else falls back to None.
|
||||||
|
"""
|
||||||
|
if key1 in obj and obj[key1] is not None:
|
||||||
|
return obj[key1]
|
||||||
|
else:
|
||||||
|
if key2 in obj and obj[key2] is not None:
|
||||||
|
return obj[key2]
|
||||||
|
return default_value
|
||||||
|
|
||||||
|
|
||||||
|
def safe_value_fallback2(dict1: dict, dict2: dict, key1: str, key2: str, default_value=None):
|
||||||
"""
|
"""
|
||||||
Search a value in dict1, return this if it's not None.
|
Search a value in dict1, return this if it's not None.
|
||||||
Fall back to dict2 - return key2 from dict2 if it's not None.
|
Fall back to dict2 - return key2 from dict2 if it's not None.
|
||||||
|
@ -13,6 +13,7 @@ from pandas import DataFrame
|
|||||||
|
|
||||||
from freqtrade.configuration import (TimeRange, remove_credentials,
|
from freqtrade.configuration import (TimeRange, remove_credentials,
|
||||||
validate_config_consistency)
|
validate_config_consistency)
|
||||||
|
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||||
from freqtrade.data import history
|
from freqtrade.data import history
|
||||||
from freqtrade.data.converter import trim_dataframe
|
from freqtrade.data.converter import trim_dataframe
|
||||||
from freqtrade.data.dataprovider import DataProvider
|
from freqtrade.data.dataprovider import DataProvider
|
||||||
@ -20,11 +21,10 @@ from freqtrade.exceptions import OperationalException
|
|||||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_seconds
|
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_seconds
|
||||||
from freqtrade.optimize.optimize_reports import (generate_backtest_stats,
|
from freqtrade.optimize.optimize_reports import (generate_backtest_stats,
|
||||||
show_backtest_results,
|
show_backtest_results,
|
||||||
store_backtest_result)
|
store_backtest_stats)
|
||||||
from freqtrade.pairlist.pairlistmanager import PairListManager
|
from freqtrade.pairlist.pairlistmanager import PairListManager
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||||
from freqtrade.state import RunMode
|
|
||||||
from freqtrade.strategy.interface import IStrategy, SellCheckTuple, SellType
|
from freqtrade.strategy.interface import IStrategy, SellCheckTuple, SellType
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -37,14 +37,15 @@ class BacktestResult(NamedTuple):
|
|||||||
pair: str
|
pair: str
|
||||||
profit_percent: float
|
profit_percent: float
|
||||||
profit_abs: float
|
profit_abs: float
|
||||||
open_time: datetime
|
open_date: datetime
|
||||||
close_time: datetime
|
open_rate: float
|
||||||
open_index: int
|
open_fee: float
|
||||||
close_index: int
|
close_date: datetime
|
||||||
|
close_rate: float
|
||||||
|
close_fee: float
|
||||||
|
amount: float
|
||||||
trade_duration: float
|
trade_duration: float
|
||||||
open_at_end: bool
|
open_at_end: bool
|
||||||
open_rate: float
|
|
||||||
close_rate: float
|
|
||||||
sell_reason: SellType
|
sell_reason: SellType
|
||||||
|
|
||||||
|
|
||||||
@ -65,9 +66,8 @@ class Backtesting:
|
|||||||
self.strategylist: List[IStrategy] = []
|
self.strategylist: List[IStrategy] = []
|
||||||
self.exchange = ExchangeResolver.load_exchange(self.config['exchange']['name'], self.config)
|
self.exchange = ExchangeResolver.load_exchange(self.config['exchange']['name'], self.config)
|
||||||
|
|
||||||
if self.config.get('runmode') != RunMode.HYPEROPT:
|
dataprovider = DataProvider(self.config, self.exchange)
|
||||||
self.dataprovider = DataProvider(self.config, self.exchange)
|
IStrategy.dp = dataprovider
|
||||||
IStrategy.dp = self.dataprovider
|
|
||||||
|
|
||||||
if self.config.get('strategy_list', None):
|
if self.config.get('strategy_list', None):
|
||||||
for strat in list(self.config['strategy_list']):
|
for strat in list(self.config['strategy_list']):
|
||||||
@ -96,6 +96,7 @@ class Backtesting:
|
|||||||
"PrecisionFilter not allowed for backtesting multiple strategies."
|
"PrecisionFilter not allowed for backtesting multiple strategies."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
dataprovider.add_pairlisthandler(self.pairlists)
|
||||||
self.pairlists.refresh_pairlist()
|
self.pairlists.refresh_pairlist()
|
||||||
|
|
||||||
if len(self.pairlists.whitelist) == 0:
|
if len(self.pairlists.whitelist) == 0:
|
||||||
@ -137,10 +138,10 @@ class Backtesting:
|
|||||||
|
|
||||||
min_date, max_date = history.get_timerange(data)
|
min_date, max_date = history.get_timerange(data)
|
||||||
|
|
||||||
logger.info(
|
logger.info(f'Loading data from {min_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||||
'Loading data from %s up to %s (%s days)..',
|
f'up to {max_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||||
min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
|
f'({(max_date - min_date).days} days)..')
|
||||||
)
|
|
||||||
# Adjust startts forward if not enough data is available
|
# Adjust startts forward if not enough data is available
|
||||||
timerange.adjust_start_if_necessary(timeframe_to_seconds(self.timeframe),
|
timerange.adjust_start_if_necessary(timeframe_to_seconds(self.timeframe),
|
||||||
self.required_startup, min_date)
|
self.required_startup, min_date)
|
||||||
@ -225,7 +226,7 @@ class Backtesting:
|
|||||||
open_rate=buy_row.open,
|
open_rate=buy_row.open,
|
||||||
open_date=buy_row.date,
|
open_date=buy_row.date,
|
||||||
stake_amount=stake_amount,
|
stake_amount=stake_amount,
|
||||||
amount=stake_amount / buy_row.open,
|
amount=round(stake_amount / buy_row.open, 8),
|
||||||
fee_open=self.fee,
|
fee_open=self.fee,
|
||||||
fee_close=self.fee,
|
fee_close=self.fee,
|
||||||
is_open=True,
|
is_open=True,
|
||||||
@ -246,14 +247,15 @@ class Backtesting:
|
|||||||
return BacktestResult(pair=pair,
|
return BacktestResult(pair=pair,
|
||||||
profit_percent=trade.calc_profit_ratio(rate=closerate),
|
profit_percent=trade.calc_profit_ratio(rate=closerate),
|
||||||
profit_abs=trade.calc_profit(rate=closerate),
|
profit_abs=trade.calc_profit(rate=closerate),
|
||||||
open_time=buy_row.date,
|
open_date=buy_row.date,
|
||||||
close_time=sell_row.date,
|
|
||||||
trade_duration=trade_dur,
|
|
||||||
open_index=buy_row.Index,
|
|
||||||
close_index=sell_row.Index,
|
|
||||||
open_at_end=False,
|
|
||||||
open_rate=buy_row.open,
|
open_rate=buy_row.open,
|
||||||
|
open_fee=self.fee,
|
||||||
|
close_date=sell_row.date,
|
||||||
close_rate=closerate,
|
close_rate=closerate,
|
||||||
|
close_fee=self.fee,
|
||||||
|
amount=trade.amount,
|
||||||
|
trade_duration=trade_dur,
|
||||||
|
open_at_end=False,
|
||||||
sell_reason=sell.sell_type
|
sell_reason=sell.sell_type
|
||||||
)
|
)
|
||||||
if partial_ohlcv:
|
if partial_ohlcv:
|
||||||
@ -262,15 +264,16 @@ class Backtesting:
|
|||||||
bt_res = BacktestResult(pair=pair,
|
bt_res = BacktestResult(pair=pair,
|
||||||
profit_percent=trade.calc_profit_ratio(rate=sell_row.open),
|
profit_percent=trade.calc_profit_ratio(rate=sell_row.open),
|
||||||
profit_abs=trade.calc_profit(rate=sell_row.open),
|
profit_abs=trade.calc_profit(rate=sell_row.open),
|
||||||
open_time=buy_row.date,
|
open_date=buy_row.date,
|
||||||
close_time=sell_row.date,
|
open_rate=buy_row.open,
|
||||||
|
open_fee=self.fee,
|
||||||
|
close_date=sell_row.date,
|
||||||
|
close_rate=sell_row.open,
|
||||||
|
close_fee=self.fee,
|
||||||
|
amount=trade.amount,
|
||||||
trade_duration=int((
|
trade_duration=int((
|
||||||
sell_row.date - buy_row.date).total_seconds() // 60),
|
sell_row.date - buy_row.date).total_seconds() // 60),
|
||||||
open_index=buy_row.Index,
|
|
||||||
close_index=sell_row.Index,
|
|
||||||
open_at_end=True,
|
open_at_end=True,
|
||||||
open_rate=buy_row.open,
|
|
||||||
close_rate=sell_row.open,
|
|
||||||
sell_reason=SellType.FORCE_SELL
|
sell_reason=SellType.FORCE_SELL
|
||||||
)
|
)
|
||||||
logger.debug(f"{pair} - Force selling still open trade, "
|
logger.debug(f"{pair} - Force selling still open trade, "
|
||||||
@ -356,8 +359,8 @@ class Backtesting:
|
|||||||
|
|
||||||
if trade_entry:
|
if trade_entry:
|
||||||
logger.debug(f"{pair} - Locking pair till "
|
logger.debug(f"{pair} - Locking pair till "
|
||||||
f"close_time={trade_entry.close_time}")
|
f"close_date={trade_entry.close_date}")
|
||||||
lock_pair_until[pair] = trade_entry.close_time
|
lock_pair_until[pair] = trade_entry.close_date
|
||||||
trades.append(trade_entry)
|
trades.append(trade_entry)
|
||||||
else:
|
else:
|
||||||
# Set lock_pair_until to end of testing period if trade could not be closed
|
# Set lock_pair_until to end of testing period if trade could not be closed
|
||||||
@ -400,10 +403,9 @@ class Backtesting:
|
|||||||
preprocessed[pair] = trim_dataframe(df, timerange)
|
preprocessed[pair] = trim_dataframe(df, timerange)
|
||||||
min_date, max_date = history.get_timerange(preprocessed)
|
min_date, max_date = history.get_timerange(preprocessed)
|
||||||
|
|
||||||
logger.info(
|
logger.info(f'Backtesting with data from {min_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||||
'Backtesting with data from %s up to %s (%s days)..',
|
f'up to {max_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||||
min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
|
f'({(max_date - min_date).days} days)..')
|
||||||
)
|
|
||||||
# Execute backtest and print results
|
# Execute backtest and print results
|
||||||
all_results[self.strategy.get_strategy_name()] = self.backtest(
|
all_results[self.strategy.get_strategy_name()] = self.backtest(
|
||||||
processed=preprocessed,
|
processed=preprocessed,
|
||||||
@ -414,8 +416,10 @@ class Backtesting:
|
|||||||
position_stacking=position_stacking,
|
position_stacking=position_stacking,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
stats = generate_backtest_stats(self.config, data, all_results,
|
||||||
|
min_date=min_date, max_date=max_date)
|
||||||
if self.config.get('export', False):
|
if self.config.get('export', False):
|
||||||
store_backtest_result(self.config['exportfilename'], all_results)
|
store_backtest_stats(self.config['exportfilename'], stats)
|
||||||
|
|
||||||
# Show backtest results
|
# Show backtest results
|
||||||
stats = generate_backtest_stats(self.config, data, all_results)
|
|
||||||
show_backtest_results(self.config, stats)
|
show_backtest_results(self.config, stats)
|
||||||
|
@ -4,27 +4,28 @@
|
|||||||
This module contains the hyperopt logic
|
This module contains the hyperopt logic
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
import locale
|
import locale
|
||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
import warnings
|
import warnings
|
||||||
from math import ceil
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from math import ceil
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
import progressbar
|
||||||
import rapidjson
|
import rapidjson
|
||||||
|
import tabulate
|
||||||
from colorama import Fore, Style
|
from colorama import Fore, Style
|
||||||
|
from colorama import init as colorama_init
|
||||||
from joblib import (Parallel, cpu_count, delayed, dump, load,
|
from joblib import (Parallel, cpu_count, delayed, dump, load,
|
||||||
wrap_non_picklable_objects)
|
wrap_non_picklable_objects)
|
||||||
from pandas import DataFrame, json_normalize, isna
|
from pandas import DataFrame, isna, json_normalize
|
||||||
import progressbar
|
|
||||||
import tabulate
|
|
||||||
from os import path
|
|
||||||
import io
|
|
||||||
|
|
||||||
|
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||||
from freqtrade.data.converter import trim_dataframe
|
from freqtrade.data.converter import trim_dataframe
|
||||||
from freqtrade.data.history import get_timerange
|
from freqtrade.data.history import get_timerange
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
@ -32,9 +33,11 @@ from freqtrade.misc import plural, round_dict
|
|||||||
from freqtrade.optimize.backtesting import Backtesting
|
from freqtrade.optimize.backtesting import Backtesting
|
||||||
# Import IHyperOpt and IHyperOptLoss to allow unpickling classes from these modules
|
# Import IHyperOpt and IHyperOptLoss to allow unpickling classes from these modules
|
||||||
from freqtrade.optimize.hyperopt_interface import IHyperOpt # noqa: F401
|
from freqtrade.optimize.hyperopt_interface import IHyperOpt # noqa: F401
|
||||||
from freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss # noqa: F401
|
from freqtrade.optimize.hyperopt_loss_interface import \
|
||||||
|
IHyperOptLoss # noqa: F401
|
||||||
from freqtrade.resolvers.hyperopt_resolver import (HyperOptLossResolver,
|
from freqtrade.resolvers.hyperopt_resolver import (HyperOptLossResolver,
|
||||||
HyperOptResolver)
|
HyperOptResolver)
|
||||||
|
from freqtrade.strategy import IStrategy
|
||||||
|
|
||||||
# Suppress scikit-learn FutureWarnings from skopt
|
# Suppress scikit-learn FutureWarnings from skopt
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
@ -312,11 +315,16 @@ class Hyperopt:
|
|||||||
|
|
||||||
trials = json_normalize(results, max_level=1)
|
trials = json_normalize(results, max_level=1)
|
||||||
trials['Best'] = ''
|
trials['Best'] = ''
|
||||||
|
if 'results_metrics.winsdrawslosses' not in trials.columns:
|
||||||
|
# Ensure compatibility with older versions of hyperopt results
|
||||||
|
trials['results_metrics.winsdrawslosses'] = 'N/A'
|
||||||
|
|
||||||
trials = trials[['Best', 'current_epoch', 'results_metrics.trade_count',
|
trials = trials[['Best', 'current_epoch', 'results_metrics.trade_count',
|
||||||
|
'results_metrics.winsdrawslosses',
|
||||||
'results_metrics.avg_profit', 'results_metrics.total_profit',
|
'results_metrics.avg_profit', 'results_metrics.total_profit',
|
||||||
'results_metrics.profit', 'results_metrics.duration',
|
'results_metrics.profit', 'results_metrics.duration',
|
||||||
'loss', 'is_initial_point', 'is_best']]
|
'loss', 'is_initial_point', 'is_best']]
|
||||||
trials.columns = ['Best', 'Epoch', 'Trades', 'Avg profit', 'Total profit',
|
trials.columns = ['Best', 'Epoch', 'Trades', 'W/D/L', 'Avg profit', 'Total profit',
|
||||||
'Profit', 'Avg duration', 'Objective', 'is_initial_point', 'is_best']
|
'Profit', 'Avg duration', 'Objective', 'is_initial_point', 'is_best']
|
||||||
trials['is_profit'] = False
|
trials['is_profit'] = False
|
||||||
trials.loc[trials['is_initial_point'], 'Best'] = '* '
|
trials.loc[trials['is_initial_point'], 'Best'] = '* '
|
||||||
@ -390,7 +398,7 @@ class Hyperopt:
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Verification for overwrite
|
# Verification for overwrite
|
||||||
if path.isfile(csv_file):
|
if Path(csv_file).is_file():
|
||||||
logger.error(f"CSV file already exists: {csv_file}")
|
logger.error(f"CSV file already exists: {csv_file}")
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -558,9 +566,17 @@ class Hyperopt:
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _calculate_results_metrics(self, backtesting_results: DataFrame) -> Dict:
|
def _calculate_results_metrics(self, backtesting_results: DataFrame) -> Dict:
|
||||||
|
wins = len(backtesting_results[backtesting_results.profit_percent > 0])
|
||||||
|
draws = len(backtesting_results[backtesting_results.profit_percent == 0])
|
||||||
|
losses = len(backtesting_results[backtesting_results.profit_percent < 0])
|
||||||
return {
|
return {
|
||||||
'trade_count': len(backtesting_results.index),
|
'trade_count': len(backtesting_results.index),
|
||||||
|
'wins': wins,
|
||||||
|
'draws': draws,
|
||||||
|
'losses': losses,
|
||||||
|
'winsdrawslosses': f"{wins}/{draws}/{losses}",
|
||||||
'avg_profit': backtesting_results.profit_percent.mean() * 100.0,
|
'avg_profit': backtesting_results.profit_percent.mean() * 100.0,
|
||||||
|
'median_profit': backtesting_results.profit_percent.median() * 100.0,
|
||||||
'total_profit': backtesting_results.profit_abs.sum(),
|
'total_profit': backtesting_results.profit_abs.sum(),
|
||||||
'profit': backtesting_results.profit_percent.sum() * 100.0,
|
'profit': backtesting_results.profit_percent.sum() * 100.0,
|
||||||
'duration': backtesting_results.trade_duration.mean(),
|
'duration': backtesting_results.trade_duration.mean(),
|
||||||
@ -572,7 +588,10 @@ class Hyperopt:
|
|||||||
"""
|
"""
|
||||||
stake_cur = self.config['stake_currency']
|
stake_cur = self.config['stake_currency']
|
||||||
return (f"{results_metrics['trade_count']:6d} trades. "
|
return (f"{results_metrics['trade_count']:6d} trades. "
|
||||||
|
f"{results_metrics['wins']}/{results_metrics['draws']}"
|
||||||
|
f"/{results_metrics['losses']} Wins/Draws/Losses. "
|
||||||
f"Avg profit {results_metrics['avg_profit']: 6.2f}%. "
|
f"Avg profit {results_metrics['avg_profit']: 6.2f}%. "
|
||||||
|
f"Median profit {results_metrics['median_profit']: 6.2f}%. "
|
||||||
f"Total profit {results_metrics['total_profit']: 11.8f} {stake_cur} "
|
f"Total profit {results_metrics['total_profit']: 11.8f} {stake_cur} "
|
||||||
f"({results_metrics['profit']: 7.2f}\N{GREEK CAPITAL LETTER SIGMA}%). "
|
f"({results_metrics['profit']: 7.2f}\N{GREEK CAPITAL LETTER SIGMA}%). "
|
||||||
f"Avg duration {results_metrics['duration']:5.1f} min."
|
f"Avg duration {results_metrics['duration']:5.1f} min."
|
||||||
@ -625,15 +644,17 @@ class Hyperopt:
|
|||||||
preprocessed[pair] = trim_dataframe(df, timerange)
|
preprocessed[pair] = trim_dataframe(df, timerange)
|
||||||
min_date, max_date = get_timerange(data)
|
min_date, max_date = get_timerange(data)
|
||||||
|
|
||||||
logger.info(
|
logger.info(f'Hyperopting with data from {min_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||||
'Hyperopting with data from %s up to %s (%s days)..',
|
f'up to {max_date.strftime(DATETIME_PRINT_FORMAT)} '
|
||||||
min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
|
f'({(max_date - min_date).days} days)..')
|
||||||
)
|
|
||||||
dump(preprocessed, self.data_pickle_file)
|
dump(preprocessed, self.data_pickle_file)
|
||||||
|
|
||||||
# We don't need exchange instance anymore while running hyperopt
|
# We don't need exchange instance anymore while running hyperopt
|
||||||
self.backtesting.exchange = None # type: ignore
|
self.backtesting.exchange = None # type: ignore
|
||||||
self.backtesting.pairlists = None # type: ignore
|
self.backtesting.pairlists = None # type: ignore
|
||||||
|
self.backtesting.strategy.dp = None # type: ignore
|
||||||
|
IStrategy.dp = None # type: ignore
|
||||||
|
|
||||||
self.epochs = self.load_previous_results(self.results_file)
|
self.epochs = self.load_previous_results(self.results_file)
|
||||||
|
|
||||||
@ -644,6 +665,10 @@ class Hyperopt:
|
|||||||
|
|
||||||
self.dimensions: List[Dimension] = self.hyperopt_space()
|
self.dimensions: List[Dimension] = self.hyperopt_space()
|
||||||
self.opt = self.get_optimizer(self.dimensions, config_jobs)
|
self.opt = self.get_optimizer(self.dimensions, config_jobs)
|
||||||
|
|
||||||
|
if self.print_colorized:
|
||||||
|
colorama_init(autoreset=True)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with Parallel(n_jobs=config_jobs) as parallel:
|
with Parallel(n_jobs=config_jobs) as parallel:
|
||||||
jobs = parallel._effective_n_jobs()
|
jobs = parallel._effective_n_jobs()
|
||||||
|
@ -43,7 +43,7 @@ class SharpeHyperOptLossDaily(IHyperOptLoss):
|
|||||||
normalize=True)
|
normalize=True)
|
||||||
|
|
||||||
sum_daily = (
|
sum_daily = (
|
||||||
results.resample(resample_freq, on='close_time').agg(
|
results.resample(resample_freq, on='close_date').agg(
|
||||||
{"profit_percent_after_slippage": sum}).reindex(t_index).fillna(0)
|
{"profit_percent_after_slippage": sum}).reindex(t_index).fillna(0)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ class SortinoHyperOptLossDaily(IHyperOptLoss):
|
|||||||
normalize=True)
|
normalize=True)
|
||||||
|
|
||||||
sum_daily = (
|
sum_daily = (
|
||||||
results.resample(resample_freq, on='close_time').agg(
|
results.resample(resample_freq, on='close_date').agg(
|
||||||
{"profit_percent_after_slippage": sum}).reindex(t_index).fillna(0)
|
{"profit_percent_after_slippage": sum}).reindex(t_index).fillna(0)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,46 +1,40 @@
|
|||||||
import logging
|
import logging
|
||||||
from datetime import timedelta
|
from datetime import datetime, timedelta, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
from arrow import Arrow
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
from numpy import int64
|
||||||
from tabulate import tabulate
|
from tabulate import tabulate
|
||||||
|
|
||||||
|
from freqtrade.constants import DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN
|
||||||
|
from freqtrade.data.btanalysis import calculate_max_drawdown, calculate_market_change
|
||||||
from freqtrade.misc import file_dump_json
|
from freqtrade.misc import file_dump_json
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def store_backtest_result(recordfilename: Path, all_results: Dict[str, DataFrame]) -> None:
|
def store_backtest_stats(recordfilename: Path, stats: Dict[str, DataFrame]) -> None:
|
||||||
"""
|
"""
|
||||||
Stores backtest results to file (one file per strategy)
|
Stores backtest results
|
||||||
:param recordfilename: Destination filename
|
:param recordfilename: Path object, which can either be a filename or a directory.
|
||||||
:param all_results: Dict of Dataframes, one results dataframe per strategy
|
Filenames will be appended with a timestamp right before the suffix
|
||||||
|
while for diectories, <directory>/backtest-result-<datetime>.json will be used as filename
|
||||||
|
:param stats: Dataframe containing the backtesting statistics
|
||||||
"""
|
"""
|
||||||
for strategy, results in all_results.items():
|
if recordfilename.is_dir():
|
||||||
records = backtest_result_to_list(results)
|
filename = (recordfilename /
|
||||||
|
f'backtest-result-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}.json')
|
||||||
|
else:
|
||||||
|
filename = Path.joinpath(
|
||||||
|
recordfilename.parent,
|
||||||
|
f'{recordfilename.stem}-{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}'
|
||||||
|
).with_suffix(recordfilename.suffix)
|
||||||
|
file_dump_json(filename, stats)
|
||||||
|
|
||||||
if records:
|
latest_filename = Path.joinpath(filename.parent, LAST_BT_RESULT_FN)
|
||||||
filename = recordfilename
|
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
|
||||||
if len(all_results) > 1:
|
|
||||||
# Inject strategy to filename
|
|
||||||
filename = Path.joinpath(
|
|
||||||
recordfilename.parent,
|
|
||||||
f'{recordfilename.stem}-{strategy}').with_suffix(recordfilename.suffix)
|
|
||||||
logger.info(f'Dumping backtest results to {filename}')
|
|
||||||
file_dump_json(filename, records)
|
|
||||||
|
|
||||||
|
|
||||||
def backtest_result_to_list(results: DataFrame) -> List[List]:
|
|
||||||
"""
|
|
||||||
Converts a list of Backtest-results to list
|
|
||||||
:param results: Dataframe containing results for one strategy
|
|
||||||
:return: List of Lists containing the trades
|
|
||||||
"""
|
|
||||||
return [[t.pair, t.profit_percent, t.open_time.timestamp(),
|
|
||||||
t.close_time.timestamp(), t.open_index - 1, t.trade_duration,
|
|
||||||
t.open_rate, t.close_rate, t.open_at_end, t.sell_reason.value]
|
|
||||||
for index, t in results.iterrows()]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_line_floatfmt() -> List[str]:
|
def _get_line_floatfmt() -> List[str]:
|
||||||
@ -66,11 +60,12 @@ def _generate_result_line(result: DataFrame, max_open_trades: int, first_column:
|
|||||||
return {
|
return {
|
||||||
'key': first_column,
|
'key': first_column,
|
||||||
'trades': len(result),
|
'trades': len(result),
|
||||||
'profit_mean': result['profit_percent'].mean(),
|
'profit_mean': result['profit_percent'].mean() if len(result) > 0 else 0.0,
|
||||||
'profit_mean_pct': result['profit_percent'].mean() * 100.0,
|
'profit_mean_pct': result['profit_percent'].mean() * 100.0 if len(result) > 0 else 0.0,
|
||||||
'profit_sum': result['profit_percent'].sum(),
|
'profit_sum': result['profit_percent'].sum(),
|
||||||
'profit_sum_pct': result['profit_percent'].sum() * 100.0,
|
'profit_sum_pct': result['profit_percent'].sum() * 100.0,
|
||||||
'profit_total_abs': result['profit_abs'].sum(),
|
'profit_total_abs': result['profit_abs'].sum(),
|
||||||
|
'profit_total': result['profit_percent'].sum() / max_open_trades,
|
||||||
'profit_total_pct': result['profit_percent'].sum() * 100.0 / max_open_trades,
|
'profit_total_pct': result['profit_percent'].sum() * 100.0 / max_open_trades,
|
||||||
'duration_avg': str(timedelta(
|
'duration_avg': str(timedelta(
|
||||||
minutes=round(result['trade_duration'].mean()))
|
minutes=round(result['trade_duration'].mean()))
|
||||||
@ -141,7 +136,7 @@ def generate_sell_reason_stats(max_open_trades: int, results: DataFrame) -> List
|
|||||||
'profit_sum': profit_sum,
|
'profit_sum': profit_sum,
|
||||||
'profit_sum_pct': round(profit_sum * 100, 2),
|
'profit_sum_pct': round(profit_sum * 100, 2),
|
||||||
'profit_total_abs': result['profit_abs'].sum(),
|
'profit_total_abs': result['profit_abs'].sum(),
|
||||||
'profit_pct_total': profit_percent_tot,
|
'profit_total_pct': profit_percent_tot,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return tabular_data
|
return tabular_data
|
||||||
@ -189,18 +184,58 @@ def generate_edge_table(results: dict) -> str:
|
|||||||
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right") # type: ignore
|
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def generate_daily_stats(results: DataFrame) -> Dict[str, Any]:
|
||||||
|
if len(results) == 0:
|
||||||
|
return {
|
||||||
|
'backtest_best_day': 0,
|
||||||
|
'backtest_worst_day': 0,
|
||||||
|
'winning_days': 0,
|
||||||
|
'draw_days': 0,
|
||||||
|
'losing_days': 0,
|
||||||
|
'winner_holding_avg': timedelta(),
|
||||||
|
'loser_holding_avg': timedelta(),
|
||||||
|
}
|
||||||
|
daily_profit = results.resample('1d', on='close_date')['profit_percent'].sum()
|
||||||
|
worst = min(daily_profit)
|
||||||
|
best = max(daily_profit)
|
||||||
|
winning_days = sum(daily_profit > 0)
|
||||||
|
draw_days = sum(daily_profit == 0)
|
||||||
|
losing_days = sum(daily_profit < 0)
|
||||||
|
|
||||||
|
winning_trades = results.loc[results['profit_percent'] > 0]
|
||||||
|
losing_trades = results.loc[results['profit_percent'] < 0]
|
||||||
|
|
||||||
|
return {
|
||||||
|
'backtest_best_day': best,
|
||||||
|
'backtest_worst_day': worst,
|
||||||
|
'winning_days': winning_days,
|
||||||
|
'draw_days': draw_days,
|
||||||
|
'losing_days': losing_days,
|
||||||
|
'winner_holding_avg': (timedelta(minutes=round(winning_trades['trade_duration'].mean()))
|
||||||
|
if not winning_trades.empty else timedelta()),
|
||||||
|
'loser_holding_avg': (timedelta(minutes=round(losing_trades['trade_duration'].mean()))
|
||||||
|
if not losing_trades.empty else timedelta()),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def generate_backtest_stats(config: Dict, btdata: Dict[str, DataFrame],
|
def generate_backtest_stats(config: Dict, btdata: Dict[str, DataFrame],
|
||||||
all_results: Dict[str, DataFrame]) -> Dict[str, Any]:
|
all_results: Dict[str, DataFrame],
|
||||||
|
min_date: Arrow, max_date: Arrow
|
||||||
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
:param config: Configuration object used for backtest
|
:param config: Configuration object used for backtest
|
||||||
:param btdata: Backtest data
|
:param btdata: Backtest data
|
||||||
:param all_results: backtest result - dictionary with { Strategy: results}.
|
:param all_results: backtest result - dictionary with { Strategy: results}.
|
||||||
|
:param min_date: Backtest start date
|
||||||
|
:param max_date: Backtest end date
|
||||||
:return:
|
:return:
|
||||||
Dictionary containing results per strategy and a stratgy summary.
|
Dictionary containing results per strategy and a stratgy summary.
|
||||||
"""
|
"""
|
||||||
stake_currency = config['stake_currency']
|
stake_currency = config['stake_currency']
|
||||||
max_open_trades = config['max_open_trades']
|
max_open_trades = config['max_open_trades']
|
||||||
result: Dict[str, Any] = {'strategy': {}}
|
result: Dict[str, Any] = {'strategy': {}}
|
||||||
|
market_change = calculate_market_change(btdata, 'close')
|
||||||
|
|
||||||
for strategy, results in all_results.items():
|
for strategy, results in all_results.items():
|
||||||
|
|
||||||
pair_results = generate_pair_metrics(btdata, stake_currency=stake_currency,
|
pair_results = generate_pair_metrics(btdata, stake_currency=stake_currency,
|
||||||
@ -212,14 +247,58 @@ def generate_backtest_stats(config: Dict, btdata: Dict[str, DataFrame],
|
|||||||
max_open_trades=max_open_trades,
|
max_open_trades=max_open_trades,
|
||||||
results=results.loc[results['open_at_end']],
|
results=results.loc[results['open_at_end']],
|
||||||
skip_nan=True)
|
skip_nan=True)
|
||||||
|
daily_stats = generate_daily_stats(results)
|
||||||
|
|
||||||
|
results['open_timestamp'] = results['open_date'].astype(int64) // 1e6
|
||||||
|
results['close_timestamp'] = results['close_date'].astype(int64) // 1e6
|
||||||
|
|
||||||
|
backtest_days = (max_date - min_date).days
|
||||||
strat_stats = {
|
strat_stats = {
|
||||||
'trades': backtest_result_to_list(results),
|
'trades': results.to_dict(orient='records'),
|
||||||
'results_per_pair': pair_results,
|
'results_per_pair': pair_results,
|
||||||
'sell_reason_summary': sell_reason_stats,
|
'sell_reason_summary': sell_reason_stats,
|
||||||
'left_open_trades': left_open_results,
|
'left_open_trades': left_open_results,
|
||||||
}
|
'total_trades': len(results),
|
||||||
|
'profit_mean': results['profit_percent'].mean() if len(results) > 0 else 0,
|
||||||
|
'profit_total': results['profit_percent'].sum(),
|
||||||
|
'profit_total_abs': results['profit_abs'].sum(),
|
||||||
|
'backtest_start': min_date.datetime,
|
||||||
|
'backtest_start_ts': min_date.timestamp * 1000,
|
||||||
|
'backtest_end': max_date.datetime,
|
||||||
|
'backtest_end_ts': max_date.timestamp * 1000,
|
||||||
|
'backtest_days': backtest_days,
|
||||||
|
|
||||||
|
'trades_per_day': round(len(results) / backtest_days, 2) if backtest_days > 0 else 0,
|
||||||
|
'market_change': market_change,
|
||||||
|
'pairlist': list(btdata.keys()),
|
||||||
|
'stake_amount': config['stake_amount'],
|
||||||
|
'stake_currency': config['stake_currency'],
|
||||||
|
'max_open_trades': (config['max_open_trades']
|
||||||
|
if config['max_open_trades'] != float('inf') else -1),
|
||||||
|
'timeframe': config['timeframe'],
|
||||||
|
**daily_stats,
|
||||||
|
}
|
||||||
result['strategy'][strategy] = strat_stats
|
result['strategy'][strategy] = strat_stats
|
||||||
|
|
||||||
|
try:
|
||||||
|
max_drawdown, drawdown_start, drawdown_end = calculate_max_drawdown(
|
||||||
|
results, value_col='profit_percent')
|
||||||
|
strat_stats.update({
|
||||||
|
'max_drawdown': max_drawdown,
|
||||||
|
'drawdown_start': drawdown_start,
|
||||||
|
'drawdown_start_ts': drawdown_start.timestamp() * 1000,
|
||||||
|
'drawdown_end': drawdown_end,
|
||||||
|
'drawdown_end_ts': drawdown_end.timestamp() * 1000,
|
||||||
|
})
|
||||||
|
except ValueError:
|
||||||
|
strat_stats.update({
|
||||||
|
'max_drawdown': 0.0,
|
||||||
|
'drawdown_start': datetime(1970, 1, 1, tzinfo=timezone.utc),
|
||||||
|
'drawdown_start_ts': 0,
|
||||||
|
'drawdown_end': datetime(1970, 1, 1, tzinfo=timezone.utc),
|
||||||
|
'drawdown_end_ts': 0,
|
||||||
|
})
|
||||||
|
|
||||||
strategy_results = generate_strategy_metrics(stake_currency=stake_currency,
|
strategy_results = generate_strategy_metrics(stake_currency=stake_currency,
|
||||||
max_open_trades=max_open_trades,
|
max_open_trades=max_open_trades,
|
||||||
all_results=all_results)
|
all_results=all_results)
|
||||||
@ -273,7 +352,7 @@ def text_table_sell_reason(sell_reason_stats: List[Dict[str, Any]], stake_curren
|
|||||||
|
|
||||||
output = [[
|
output = [[
|
||||||
t['sell_reason'], t['trades'], t['wins'], t['draws'], t['losses'],
|
t['sell_reason'], t['trades'], t['wins'], t['draws'], t['losses'],
|
||||||
t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'], t['profit_pct_total'],
|
t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'], t['profit_total_pct'],
|
||||||
] for t in sell_reason_stats]
|
] for t in sell_reason_stats]
|
||||||
return tabulate(output, headers=headers, tablefmt="orgtbl", stralign="right")
|
return tabulate(output, headers=headers, tablefmt="orgtbl", stralign="right")
|
||||||
|
|
||||||
@ -298,6 +377,35 @@ def text_table_strategy(strategy_results, stake_currency: str) -> str:
|
|||||||
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right")
|
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right")
|
||||||
|
|
||||||
|
|
||||||
|
def text_table_add_metrics(strat_results: Dict) -> str:
|
||||||
|
if len(strat_results['trades']) > 0:
|
||||||
|
min_trade = min(strat_results['trades'], key=lambda x: x['open_date'])
|
||||||
|
metrics = [
|
||||||
|
('Backtesting from', strat_results['backtest_start'].strftime(DATETIME_PRINT_FORMAT)),
|
||||||
|
('Backtesting to', strat_results['backtest_end'].strftime(DATETIME_PRINT_FORMAT)),
|
||||||
|
('Total trades', strat_results['total_trades']),
|
||||||
|
('First trade', min_trade['open_date'].strftime(DATETIME_PRINT_FORMAT)),
|
||||||
|
('First trade Pair', min_trade['pair']),
|
||||||
|
('Total Profit %', f"{round(strat_results['profit_total'] * 100, 2)}%"),
|
||||||
|
('Trades per day', strat_results['trades_per_day']),
|
||||||
|
('Best day', f"{round(strat_results['backtest_best_day'] * 100, 2)}%"),
|
||||||
|
('Worst day', f"{round(strat_results['backtest_worst_day'] * 100, 2)}%"),
|
||||||
|
('Days win/draw/lose', f"{strat_results['winning_days']} / "
|
||||||
|
f"{strat_results['draw_days']} / {strat_results['losing_days']}"),
|
||||||
|
('Avg. Duration Winners', f"{strat_results['winner_holding_avg']}"),
|
||||||
|
('Avg. Duration Loser', f"{strat_results['loser_holding_avg']}"),
|
||||||
|
('', ''), # Empty line to improve readability
|
||||||
|
('Max Drawdown', f"{round(strat_results['max_drawdown'] * 100, 2)}%"),
|
||||||
|
('Drawdown Start', strat_results['drawdown_start'].strftime(DATETIME_PRINT_FORMAT)),
|
||||||
|
('Drawdown End', strat_results['drawdown_end'].strftime(DATETIME_PRINT_FORMAT)),
|
||||||
|
('Market change', f"{round(strat_results['market_change'] * 100, 2)}%"),
|
||||||
|
]
|
||||||
|
|
||||||
|
return tabulate(metrics, headers=["Metric", "Value"], tablefmt="orgtbl")
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
def show_backtest_results(config: Dict, backtest_stats: Dict):
|
def show_backtest_results(config: Dict, backtest_stats: Dict):
|
||||||
stake_currency = config['stake_currency']
|
stake_currency = config['stake_currency']
|
||||||
|
|
||||||
@ -312,15 +420,21 @@ def show_backtest_results(config: Dict, backtest_stats: Dict):
|
|||||||
|
|
||||||
table = text_table_sell_reason(sell_reason_stats=results['sell_reason_summary'],
|
table = text_table_sell_reason(sell_reason_stats=results['sell_reason_summary'],
|
||||||
stake_currency=stake_currency)
|
stake_currency=stake_currency)
|
||||||
if isinstance(table, str):
|
if isinstance(table, str) and len(table) > 0:
|
||||||
print(' SELL REASON STATS '.center(len(table.splitlines()[0]), '='))
|
print(' SELL REASON STATS '.center(len(table.splitlines()[0]), '='))
|
||||||
print(table)
|
print(table)
|
||||||
|
|
||||||
table = text_table_bt_results(results['left_open_trades'], stake_currency=stake_currency)
|
table = text_table_bt_results(results['left_open_trades'], stake_currency=stake_currency)
|
||||||
if isinstance(table, str):
|
if isinstance(table, str) and len(table) > 0:
|
||||||
print(' LEFT OPEN TRADES REPORT '.center(len(table.splitlines()[0]), '='))
|
print(' LEFT OPEN TRADES REPORT '.center(len(table.splitlines()[0]), '='))
|
||||||
print(table)
|
print(table)
|
||||||
if isinstance(table, str):
|
|
||||||
|
table = text_table_add_metrics(results)
|
||||||
|
if isinstance(table, str) and len(table) > 0:
|
||||||
|
print(' SUMMARY METRICS '.center(len(table.splitlines()[0]), '='))
|
||||||
|
print(table)
|
||||||
|
|
||||||
|
if isinstance(table, str) and len(table) > 0:
|
||||||
print('=' * len(table.splitlines()[0]))
|
print('=' * len(table.splitlines()[0]))
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
@ -26,12 +26,11 @@ class AgeFilter(IPairList):
|
|||||||
self._min_days_listed = pairlistconfig.get('min_days_listed', 10)
|
self._min_days_listed = pairlistconfig.get('min_days_listed', 10)
|
||||||
|
|
||||||
if self._min_days_listed < 1:
|
if self._min_days_listed < 1:
|
||||||
raise OperationalException("AgeFilter requires min_days_listed must be >= 1")
|
raise OperationalException("AgeFilter requires min_days_listed to be >= 1")
|
||||||
if self._min_days_listed > exchange.ohlcv_candle_limit:
|
if self._min_days_listed > exchange.ohlcv_candle_limit:
|
||||||
raise OperationalException("AgeFilter requires min_days_listed must not exceed "
|
raise OperationalException("AgeFilter requires min_days_listed to not exceed "
|
||||||
"exchange max request size "
|
"exchange max request size "
|
||||||
f"({exchange.ohlcv_candle_limit})")
|
f"({exchange.ohlcv_candle_limit})")
|
||||||
self._enabled = self._min_days_listed >= 1
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def needstickers(self) -> bool:
|
def needstickers(self) -> bool:
|
||||||
|
@ -162,6 +162,11 @@ class IPairList(ABC):
|
|||||||
f"{self._exchange.name}. Removing it from whitelist..")
|
f"{self._exchange.name}. Removing it from whitelist..")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if not self._exchange.market_is_tradable(markets[pair]):
|
||||||
|
logger.warning(f"Pair {pair} is not tradable with Freqtrade."
|
||||||
|
"Removing it from whitelist..")
|
||||||
|
continue
|
||||||
|
|
||||||
if self._exchange.get_pair_quote_currency(pair) != self._config['stake_currency']:
|
if self._exchange.get_pair_quote_currency(pair) != self._config['stake_currency']:
|
||||||
logger.warning(f"Pair {pair} is not compatible with your stake currency "
|
logger.warning(f"Pair {pair} is not compatible with your stake currency "
|
||||||
f"{self._config['stake_currency']}. Removing it from whitelist..")
|
f"{self._config['stake_currency']}. Removing it from whitelist..")
|
||||||
|
@ -4,6 +4,7 @@ Price pair list filter
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.pairlist.IPairList import IPairList
|
from freqtrade.pairlist.IPairList import IPairList
|
||||||
|
|
||||||
|
|
||||||
@ -18,11 +19,17 @@ class PriceFilter(IPairList):
|
|||||||
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)
|
super().__init__(exchange, pairlistmanager, config, pairlistconfig, pairlist_pos)
|
||||||
|
|
||||||
self._low_price_ratio = pairlistconfig.get('low_price_ratio', 0)
|
self._low_price_ratio = pairlistconfig.get('low_price_ratio', 0)
|
||||||
|
if self._low_price_ratio < 0:
|
||||||
|
raise OperationalException("PriceFilter requires low_price_ratio to be >= 0")
|
||||||
self._min_price = pairlistconfig.get('min_price', 0)
|
self._min_price = pairlistconfig.get('min_price', 0)
|
||||||
|
if self._min_price < 0:
|
||||||
|
raise OperationalException("PriceFilter requires min_price to be >= 0")
|
||||||
self._max_price = pairlistconfig.get('max_price', 0)
|
self._max_price = pairlistconfig.get('max_price', 0)
|
||||||
self._enabled = ((self._low_price_ratio != 0) or
|
if self._max_price < 0:
|
||||||
(self._min_price != 0) or
|
raise OperationalException("PriceFilter requires max_price to be >= 0")
|
||||||
(self._max_price != 0))
|
self._enabled = ((self._low_price_ratio > 0) or
|
||||||
|
(self._min_price > 0) or
|
||||||
|
(self._max_price > 0))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def needstickers(self) -> bool:
|
def needstickers(self) -> bool:
|
||||||
|
@ -14,7 +14,7 @@ from freqtrade.pairlist.IPairList import IPairList
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
SORT_VALUES = ['askVolume', 'bidVolume', 'quoteVolume']
|
SORT_VALUES = ['quoteVolume']
|
||||||
|
|
||||||
|
|
||||||
class VolumePairList(IPairList):
|
class VolumePairList(IPairList):
|
||||||
@ -45,11 +45,6 @@ class VolumePairList(IPairList):
|
|||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
f'key {self._sort_key} not in {SORT_VALUES}')
|
f'key {self._sort_key} not in {SORT_VALUES}')
|
||||||
|
|
||||||
if self._sort_key != 'quoteVolume':
|
|
||||||
logger.warning(
|
|
||||||
"DEPRECATED: using any key other than quoteVolume for VolumePairList is deprecated."
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def needstickers(self) -> bool:
|
def needstickers(self) -> bool:
|
||||||
"""
|
"""
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
This module contains the class to persist trades into SQLite
|
This module contains the class to persist trades into SQLite
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
@ -17,6 +17,7 @@ from sqlalchemy.orm.session import sessionmaker
|
|||||||
from sqlalchemy.pool import StaticPool
|
from sqlalchemy.pool import StaticPool
|
||||||
|
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
|
from freqtrade.misc import safe_value_fallback
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -86,7 +87,7 @@ def check_migrate(engine) -> None:
|
|||||||
logger.debug(f'trying {table_back_name}')
|
logger.debug(f'trying {table_back_name}')
|
||||||
|
|
||||||
# Check for latest column
|
# Check for latest column
|
||||||
if not has_column(cols, 'timeframe'):
|
if not has_column(cols, 'amount_requested'):
|
||||||
logger.info(f'Running database migration - backup available as {table_back_name}')
|
logger.info(f'Running database migration - backup available as {table_back_name}')
|
||||||
|
|
||||||
fee_open = get_column_def(cols, 'fee_open', 'fee')
|
fee_open = get_column_def(cols, 'fee_open', 'fee')
|
||||||
@ -119,6 +120,7 @@ def check_migrate(engine) -> None:
|
|||||||
cols, 'close_profit_abs',
|
cols, 'close_profit_abs',
|
||||||
f"(amount * close_rate * (1 - {fee_close})) - {open_trade_price}")
|
f"(amount * close_rate * (1 - {fee_close})) - {open_trade_price}")
|
||||||
sell_order_status = get_column_def(cols, 'sell_order_status', 'null')
|
sell_order_status = get_column_def(cols, 'sell_order_status', 'null')
|
||||||
|
amount_requested = get_column_def(cols, 'amount_requested', 'amount')
|
||||||
|
|
||||||
# Schema migration necessary
|
# Schema migration necessary
|
||||||
engine.execute(f"alter table trades rename to {table_back_name}")
|
engine.execute(f"alter table trades rename to {table_back_name}")
|
||||||
@ -134,7 +136,7 @@ def check_migrate(engine) -> None:
|
|||||||
fee_open, fee_open_cost, fee_open_currency,
|
fee_open, fee_open_cost, fee_open_currency,
|
||||||
fee_close, fee_close_cost, fee_open_currency, open_rate,
|
fee_close, fee_close_cost, fee_open_currency, open_rate,
|
||||||
open_rate_requested, close_rate, close_rate_requested, close_profit,
|
open_rate_requested, close_rate, close_rate_requested, close_profit,
|
||||||
stake_amount, amount, open_date, close_date, open_order_id,
|
stake_amount, amount, amount_requested, open_date, close_date, open_order_id,
|
||||||
stop_loss, stop_loss_pct, initial_stop_loss, initial_stop_loss_pct,
|
stop_loss, stop_loss_pct, initial_stop_loss, initial_stop_loss_pct,
|
||||||
stoploss_order_id, stoploss_last_update,
|
stoploss_order_id, stoploss_last_update,
|
||||||
max_rate, min_rate, sell_reason, sell_order_status, strategy,
|
max_rate, min_rate, sell_reason, sell_order_status, strategy,
|
||||||
@ -153,7 +155,7 @@ def check_migrate(engine) -> None:
|
|||||||
{fee_close_cost} fee_close_cost, {fee_close_currency} fee_close_currency,
|
{fee_close_cost} fee_close_cost, {fee_close_currency} fee_close_currency,
|
||||||
open_rate, {open_rate_requested} open_rate_requested, close_rate,
|
open_rate, {open_rate_requested} open_rate_requested, close_rate,
|
||||||
{close_rate_requested} close_rate_requested, close_profit,
|
{close_rate_requested} close_rate_requested, close_profit,
|
||||||
stake_amount, amount, open_date, close_date, open_order_id,
|
stake_amount, amount, {amount_requested}, open_date, close_date, open_order_id,
|
||||||
{stop_loss} stop_loss, {stop_loss_pct} stop_loss_pct,
|
{stop_loss} stop_loss, {stop_loss_pct} stop_loss_pct,
|
||||||
{initial_stop_loss} initial_stop_loss,
|
{initial_stop_loss} initial_stop_loss,
|
||||||
{initial_stop_loss_pct} initial_stop_loss_pct,
|
{initial_stop_loss_pct} initial_stop_loss_pct,
|
||||||
@ -215,6 +217,7 @@ class Trade(_DECL_BASE):
|
|||||||
close_profit_abs = Column(Float)
|
close_profit_abs = Column(Float)
|
||||||
stake_amount = Column(Float, nullable=False)
|
stake_amount = Column(Float, nullable=False)
|
||||||
amount = Column(Float)
|
amount = Column(Float)
|
||||||
|
amount_requested = Column(Float)
|
||||||
open_date = Column(DateTime, nullable=False, default=datetime.utcnow)
|
open_date = Column(DateTime, nullable=False, default=datetime.utcnow)
|
||||||
close_date = Column(DateTime)
|
close_date = Column(DateTime)
|
||||||
open_order_id = Column(String)
|
open_order_id = Column(String)
|
||||||
@ -256,6 +259,7 @@ class Trade(_DECL_BASE):
|
|||||||
'is_open': self.is_open,
|
'is_open': self.is_open,
|
||||||
'exchange': self.exchange,
|
'exchange': self.exchange,
|
||||||
'amount': round(self.amount, 8),
|
'amount': round(self.amount, 8),
|
||||||
|
'amount_requested': round(self.amount_requested, 8) if self.amount_requested else None,
|
||||||
'stake_amount': round(self.stake_amount, 8),
|
'stake_amount': round(self.stake_amount, 8),
|
||||||
'strategy': self.strategy,
|
'strategy': self.strategy,
|
||||||
'ticker_interval': self.timeframe, # DEPRECATED
|
'ticker_interval': self.timeframe, # DEPRECATED
|
||||||
@ -270,16 +274,17 @@ class Trade(_DECL_BASE):
|
|||||||
|
|
||||||
'open_date_hum': arrow.get(self.open_date).humanize(),
|
'open_date_hum': arrow.get(self.open_date).humanize(),
|
||||||
'open_date': self.open_date.strftime("%Y-%m-%d %H:%M:%S"),
|
'open_date': self.open_date.strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
'open_timestamp': int(self.open_date.timestamp() * 1000),
|
'open_timestamp': int(self.open_date.replace(tzinfo=timezone.utc).timestamp() * 1000),
|
||||||
'open_rate': self.open_rate,
|
'open_rate': self.open_rate,
|
||||||
'open_rate_requested': self.open_rate_requested,
|
'open_rate_requested': self.open_rate_requested,
|
||||||
'open_trade_price': self.open_trade_price,
|
'open_trade_price': round(self.open_trade_price, 8),
|
||||||
|
|
||||||
'close_date_hum': (arrow.get(self.close_date).humanize()
|
'close_date_hum': (arrow.get(self.close_date).humanize()
|
||||||
if self.close_date else None),
|
if self.close_date else None),
|
||||||
'close_date': (self.close_date.strftime("%Y-%m-%d %H:%M:%S")
|
'close_date': (self.close_date.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
if self.close_date else None),
|
if self.close_date else None),
|
||||||
'close_timestamp': int(self.close_date.timestamp() * 1000) if self.close_date else None,
|
'close_timestamp': int(self.close_date.replace(
|
||||||
|
tzinfo=timezone.utc).timestamp() * 1000) if self.close_date else None,
|
||||||
'close_rate': self.close_rate,
|
'close_rate': self.close_rate,
|
||||||
'close_rate_requested': self.close_rate_requested,
|
'close_rate_requested': self.close_rate_requested,
|
||||||
'close_profit': self.close_profit,
|
'close_profit': self.close_profit,
|
||||||
@ -294,8 +299,8 @@ class Trade(_DECL_BASE):
|
|||||||
'stoploss_order_id': self.stoploss_order_id,
|
'stoploss_order_id': self.stoploss_order_id,
|
||||||
'stoploss_last_update': (self.stoploss_last_update.strftime("%Y-%m-%d %H:%M:%S")
|
'stoploss_last_update': (self.stoploss_last_update.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
if self.stoploss_last_update else None),
|
if self.stoploss_last_update else None),
|
||||||
'stoploss_last_update_timestamp': (int(self.stoploss_last_update.timestamp() * 1000)
|
'stoploss_last_update_timestamp': int(self.stoploss_last_update.replace(
|
||||||
if self.stoploss_last_update else None),
|
tzinfo=timezone.utc).timestamp() * 1000) if self.stoploss_last_update else None,
|
||||||
'initial_stop_loss': self.initial_stop_loss, # Deprecated - should not be used
|
'initial_stop_loss': self.initial_stop_loss, # Deprecated - should not be used
|
||||||
'initial_stop_loss_abs': self.initial_stop_loss,
|
'initial_stop_loss_abs': self.initial_stop_loss,
|
||||||
'initial_stop_loss_ratio': (self.initial_stop_loss_pct
|
'initial_stop_loss_ratio': (self.initial_stop_loss_pct
|
||||||
@ -365,20 +370,20 @@ class Trade(_DECL_BASE):
|
|||||||
"""
|
"""
|
||||||
order_type = order['type']
|
order_type = order['type']
|
||||||
# Ignore open and cancelled orders
|
# Ignore open and cancelled orders
|
||||||
if order['status'] == 'open' or order['price'] is None:
|
if order['status'] == 'open' or safe_value_fallback(order, 'average', 'price') is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.info('Updating trade (id=%s) ...', self.id)
|
logger.info('Updating trade (id=%s) ...', self.id)
|
||||||
|
|
||||||
if order_type in ('market', 'limit') and order['side'] == 'buy':
|
if order_type in ('market', 'limit') and order['side'] == 'buy':
|
||||||
# Update open rate and actual amount
|
# Update open rate and actual amount
|
||||||
self.open_rate = Decimal(order['price'])
|
self.open_rate = Decimal(safe_value_fallback(order, 'average', 'price'))
|
||||||
self.amount = Decimal(order.get('filled', order['amount']))
|
self.amount = Decimal(safe_value_fallback(order, 'filled', 'amount'))
|
||||||
self.recalc_open_trade_price()
|
self.recalc_open_trade_price()
|
||||||
logger.info('%s_BUY has been fulfilled for %s.', order_type.upper(), self)
|
logger.info('%s_BUY has been fulfilled for %s.', order_type.upper(), self)
|
||||||
self.open_order_id = None
|
self.open_order_id = None
|
||||||
elif order_type in ('market', 'limit') and order['side'] == 'sell':
|
elif order_type in ('market', 'limit') and order['side'] == 'sell':
|
||||||
self.close(order['price'])
|
self.close(safe_value_fallback(order, 'average', 'price'))
|
||||||
logger.info('%s_SELL has been fulfilled for %s.', order_type.upper(), self)
|
logger.info('%s_SELL has been fulfilled for %s.', order_type.upper(), self)
|
||||||
elif order_type in ('stop_loss_limit', 'stop-loss', 'stop'):
|
elif order_type in ('stop_loss_limit', 'stop-loss', 'stop'):
|
||||||
self.stoploss_order_id = None
|
self.stoploss_order_id = None
|
||||||
|
@ -8,7 +8,8 @@ from freqtrade.configuration import TimeRange
|
|||||||
from freqtrade.data.btanalysis import (calculate_max_drawdown,
|
from freqtrade.data.btanalysis import (calculate_max_drawdown,
|
||||||
combine_dataframes_with_mean,
|
combine_dataframes_with_mean,
|
||||||
create_cum_profit,
|
create_cum_profit,
|
||||||
extract_trades_of_period, load_trades)
|
extract_trades_of_period,
|
||||||
|
load_trades)
|
||||||
from freqtrade.data.converter import trim_dataframe
|
from freqtrade.data.converter import trim_dataframe
|
||||||
from freqtrade.data.dataprovider import DataProvider
|
from freqtrade.data.dataprovider import DataProvider
|
||||||
from freqtrade.data.history import load_data
|
from freqtrade.data.history import load_data
|
||||||
@ -37,15 +38,15 @@ def init_plotscript(config):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if "pairs" in config:
|
if "pairs" in config:
|
||||||
pairs = config["pairs"]
|
pairs = config['pairs']
|
||||||
else:
|
else:
|
||||||
pairs = config["exchange"]["pair_whitelist"]
|
pairs = config['exchange']['pair_whitelist']
|
||||||
|
|
||||||
# Set timerange to use
|
# Set timerange to use
|
||||||
timerange = TimeRange.parse_timerange(config.get("timerange"))
|
timerange = TimeRange.parse_timerange(config.get('timerange'))
|
||||||
|
|
||||||
data = load_data(
|
data = load_data(
|
||||||
datadir=config.get("datadir"),
|
datadir=config.get('datadir'),
|
||||||
pairs=pairs,
|
pairs=pairs,
|
||||||
timeframe=config.get('timeframe', '5m'),
|
timeframe=config.get('timeframe', '5m'),
|
||||||
timerange=timerange,
|
timerange=timerange,
|
||||||
@ -53,19 +54,22 @@ def init_plotscript(config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
no_trades = False
|
no_trades = False
|
||||||
|
filename = config.get('exportfilename')
|
||||||
if config.get('no_trades', False):
|
if config.get('no_trades', False):
|
||||||
no_trades = True
|
no_trades = True
|
||||||
elif not config['exportfilename'].is_file() and config['trade_source'] == 'file':
|
elif config['trade_source'] == 'file':
|
||||||
logger.warning("Backtest file is missing skipping trades.")
|
if not filename.is_dir() and not filename.is_file():
|
||||||
no_trades = True
|
logger.warning("Backtest file is missing skipping trades.")
|
||||||
|
no_trades = True
|
||||||
|
|
||||||
trades = load_trades(
|
trades = load_trades(
|
||||||
config['trade_source'],
|
config['trade_source'],
|
||||||
db_url=config.get('db_url'),
|
db_url=config.get('db_url'),
|
||||||
exportfilename=config.get('exportfilename'),
|
exportfilename=filename,
|
||||||
no_trades=no_trades
|
no_trades=no_trades,
|
||||||
|
strategy=config.get('strategy'),
|
||||||
)
|
)
|
||||||
trades = trim_dataframe(trades, timerange, 'open_time')
|
trades = trim_dataframe(trades, timerange, 'open_date')
|
||||||
|
|
||||||
return {"ohlcv": data,
|
return {"ohlcv": data,
|
||||||
"trades": trades,
|
"trades": trades,
|
||||||
@ -165,10 +169,11 @@ def plot_trades(fig, trades: pd.DataFrame) -> make_subplots:
|
|||||||
if trades is not None and len(trades) > 0:
|
if trades is not None and len(trades) > 0:
|
||||||
# Create description for sell summarizing the trade
|
# Create description for sell summarizing the trade
|
||||||
trades['desc'] = trades.apply(lambda row: f"{round(row['profit_percent'] * 100, 1)}%, "
|
trades['desc'] = trades.apply(lambda row: f"{round(row['profit_percent'] * 100, 1)}%, "
|
||||||
f"{row['sell_reason']}, {row['duration']} min",
|
f"{row['sell_reason']}, "
|
||||||
|
f"{row['trade_duration']} min",
|
||||||
axis=1)
|
axis=1)
|
||||||
trade_buys = go.Scatter(
|
trade_buys = go.Scatter(
|
||||||
x=trades["open_time"],
|
x=trades["open_date"],
|
||||||
y=trades["open_rate"],
|
y=trades["open_rate"],
|
||||||
mode='markers',
|
mode='markers',
|
||||||
name='Trade buy',
|
name='Trade buy',
|
||||||
@ -183,7 +188,7 @@ def plot_trades(fig, trades: pd.DataFrame) -> make_subplots:
|
|||||||
)
|
)
|
||||||
|
|
||||||
trade_sells = go.Scatter(
|
trade_sells = go.Scatter(
|
||||||
x=trades.loc[trades['profit_percent'] > 0, "close_time"],
|
x=trades.loc[trades['profit_percent'] > 0, "close_date"],
|
||||||
y=trades.loc[trades['profit_percent'] > 0, "close_rate"],
|
y=trades.loc[trades['profit_percent'] > 0, "close_rate"],
|
||||||
text=trades.loc[trades['profit_percent'] > 0, "desc"],
|
text=trades.loc[trades['profit_percent'] > 0, "desc"],
|
||||||
mode='markers',
|
mode='markers',
|
||||||
@ -196,7 +201,7 @@ def plot_trades(fig, trades: pd.DataFrame) -> make_subplots:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
trade_sells_loss = go.Scatter(
|
trade_sells_loss = go.Scatter(
|
||||||
x=trades.loc[trades['profit_percent'] <= 0, "close_time"],
|
x=trades.loc[trades['profit_percent'] <= 0, "close_date"],
|
||||||
y=trades.loc[trades['profit_percent'] <= 0, "close_rate"],
|
y=trades.loc[trades['profit_percent'] <= 0, "close_rate"],
|
||||||
text=trades.loc[trades['profit_percent'] <= 0, "desc"],
|
text=trades.loc[trades['profit_percent'] <= 0, "desc"],
|
||||||
mode='markers',
|
mode='markers',
|
||||||
@ -486,13 +491,13 @@ def load_and_plot_trades(config: Dict[str, Any]):
|
|||||||
pair=pair,
|
pair=pair,
|
||||||
data=df_analyzed,
|
data=df_analyzed,
|
||||||
trades=trades_pair,
|
trades=trades_pair,
|
||||||
indicators1=config.get("indicators1", []),
|
indicators1=config.get('indicators1', []),
|
||||||
indicators2=config.get("indicators2", []),
|
indicators2=config.get('indicators2', []),
|
||||||
plot_config=strategy.plot_config if hasattr(strategy, 'plot_config') else {}
|
plot_config=strategy.plot_config if hasattr(strategy, 'plot_config') else {}
|
||||||
)
|
)
|
||||||
|
|
||||||
store_plot_file(fig, filename=generate_plot_filename(pair, config['timeframe']),
|
store_plot_file(fig, filename=generate_plot_filename(pair, config['timeframe']),
|
||||||
directory=config['user_data_dir'] / "plot")
|
directory=config['user_data_dir'] / 'plot')
|
||||||
|
|
||||||
logger.info('End of plotting process. %s plots generated', pair_counter)
|
logger.info('End of plotting process. %s plots generated', pair_counter)
|
||||||
|
|
||||||
@ -509,8 +514,8 @@ def plot_profit(config: Dict[str, Any]) -> None:
|
|||||||
# Filter trades to relevant pairs
|
# Filter trades to relevant pairs
|
||||||
# Remove open pairs - we don't know the profit yet so can't calculate profit for these.
|
# Remove open pairs - we don't know the profit yet so can't calculate profit for these.
|
||||||
# Also, If only one open pair is left, then the profit-generation would fail.
|
# Also, If only one open pair is left, then the profit-generation would fail.
|
||||||
trades = trades[(trades['pair'].isin(plot_elements["pairs"]))
|
trades = trades[(trades['pair'].isin(plot_elements['pairs']))
|
||||||
& (~trades['close_time'].isnull())
|
& (~trades['close_date'].isnull())
|
||||||
]
|
]
|
||||||
if len(trades) == 0:
|
if len(trades) == 0:
|
||||||
raise OperationalException("No trades found, cannot generate Profit-plot without "
|
raise OperationalException("No trades found, cannot generate Profit-plot without "
|
||||||
@ -518,7 +523,7 @@ def plot_profit(config: Dict[str, Any]) -> None:
|
|||||||
|
|
||||||
# Create an average close price of all the pairs that were involved.
|
# Create an average close price of all the pairs that were involved.
|
||||||
# this could be useful to gauge the overall market trend
|
# this could be useful to gauge the overall market trend
|
||||||
fig = generate_profit_graph(plot_elements["pairs"], plot_elements["ohlcv"],
|
fig = generate_profit_graph(plot_elements['pairs'], plot_elements['ohlcv'],
|
||||||
trades, config.get('timeframe', '5m'))
|
trades, config.get('timeframe', '5m'))
|
||||||
store_plot_file(fig, filename='freqtrade-profit-plot.html',
|
store_plot_file(fig, filename='freqtrade-profit-plot.html',
|
||||||
directory=config['user_data_dir'] / "plot", auto_open=True)
|
directory=config['user_data_dir'] / 'plot', auto_open=True)
|
||||||
|
@ -23,7 +23,7 @@ class HyperOptResolver(IResolver):
|
|||||||
object_type = IHyperOpt
|
object_type = IHyperOpt
|
||||||
object_type_str = "Hyperopt"
|
object_type_str = "Hyperopt"
|
||||||
user_subdir = USERPATH_HYPEROPTS
|
user_subdir = USERPATH_HYPEROPTS
|
||||||
initial_search_path = Path(__file__).parent.parent.joinpath('optimize').resolve()
|
initial_search_path = None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_hyperopt(config: Dict) -> IHyperOpt:
|
def load_hyperopt(config: Dict) -> IHyperOpt:
|
||||||
|
@ -59,7 +59,7 @@ class IResolver:
|
|||||||
module = importlib.util.module_from_spec(spec)
|
module = importlib.util.module_from_spec(spec)
|
||||||
try:
|
try:
|
||||||
spec.loader.exec_module(module) # type: ignore # importlib does not use typehints
|
spec.loader.exec_module(module) # type: ignore # importlib does not use typehints
|
||||||
except (ModuleNotFoundError, SyntaxError) as err:
|
except (ModuleNotFoundError, SyntaxError, ImportError) as err:
|
||||||
# Catch errors in case a specific module is not installed
|
# Catch errors in case a specific module is not installed
|
||||||
logger.warning(f"Could not import {module_path} due to '{err}'")
|
logger.warning(f"Could not import {module_path} due to '{err}'")
|
||||||
if enum_failed:
|
if enum_failed:
|
||||||
|
@ -16,8 +16,10 @@ from werkzeug.security import safe_str_cmp
|
|||||||
from werkzeug.serving import make_server
|
from werkzeug.serving import make_server
|
||||||
|
|
||||||
from freqtrade.__init__ import __version__
|
from freqtrade.__init__ import __version__
|
||||||
from freqtrade.rpc.rpc import RPC, RPCException
|
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||||
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
|
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
|
||||||
|
from freqtrade.rpc.rpc import RPC, RPCException
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -32,7 +34,7 @@ class ArrowJSONEncoder(JSONEncoder):
|
|||||||
elif isinstance(obj, date):
|
elif isinstance(obj, date):
|
||||||
return obj.strftime("%Y-%m-%d")
|
return obj.strftime("%Y-%m-%d")
|
||||||
elif isinstance(obj, datetime):
|
elif isinstance(obj, datetime):
|
||||||
return obj.strftime("%Y-%m-%d %H:%M:%S")
|
return obj.strftime(DATETIME_PRINT_FORMAT)
|
||||||
iterable = iter(obj)
|
iterable = iter(obj)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
@ -69,6 +71,11 @@ def rpc_catch_errors(func: Callable[..., Any]):
|
|||||||
return func_wrapper
|
return func_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def shutdown_session(exception=None):
|
||||||
|
# Remove scoped session
|
||||||
|
Trade.session.remove()
|
||||||
|
|
||||||
|
|
||||||
class ApiServer(RPC):
|
class ApiServer(RPC):
|
||||||
"""
|
"""
|
||||||
This class runs api server and provides rpc.rpc functionality to it
|
This class runs api server and provides rpc.rpc functionality to it
|
||||||
@ -103,6 +110,8 @@ class ApiServer(RPC):
|
|||||||
self.jwt = JWTManager(self.app)
|
self.jwt = JWTManager(self.app)
|
||||||
self.app.json_encoder = ArrowJSONEncoder
|
self.app.json_encoder = ArrowJSONEncoder
|
||||||
|
|
||||||
|
self.app.teardown_appcontext(shutdown_session)
|
||||||
|
|
||||||
# Register application handling
|
# Register application handling
|
||||||
self.register_rest_rpc_urls()
|
self.register_rest_rpc_urls()
|
||||||
|
|
||||||
@ -186,6 +195,7 @@ class ApiServer(RPC):
|
|||||||
self.app.add_url_rule(f'{BASE_URI}/count', 'count', view_func=self._count, methods=['GET'])
|
self.app.add_url_rule(f'{BASE_URI}/count', 'count', view_func=self._count, methods=['GET'])
|
||||||
self.app.add_url_rule(f'{BASE_URI}/daily', 'daily', view_func=self._daily, methods=['GET'])
|
self.app.add_url_rule(f'{BASE_URI}/daily', 'daily', view_func=self._daily, methods=['GET'])
|
||||||
self.app.add_url_rule(f'{BASE_URI}/edge', 'edge', view_func=self._edge, methods=['GET'])
|
self.app.add_url_rule(f'{BASE_URI}/edge', 'edge', view_func=self._edge, methods=['GET'])
|
||||||
|
self.app.add_url_rule(f'{BASE_URI}/logs', 'log', view_func=self._get_logs, methods=['GET'])
|
||||||
self.app.add_url_rule(f'{BASE_URI}/profit', 'profit',
|
self.app.add_url_rule(f'{BASE_URI}/profit', 'profit',
|
||||||
view_func=self._profit, methods=['GET'])
|
view_func=self._profit, methods=['GET'])
|
||||||
self.app.add_url_rule(f'{BASE_URI}/performance', 'performance',
|
self.app.add_url_rule(f'{BASE_URI}/performance', 'performance',
|
||||||
@ -348,6 +358,18 @@ class ApiServer(RPC):
|
|||||||
|
|
||||||
return self.rest_dump(stats)
|
return self.rest_dump(stats)
|
||||||
|
|
||||||
|
@require_login
|
||||||
|
@rpc_catch_errors
|
||||||
|
def _get_logs(self):
|
||||||
|
"""
|
||||||
|
Returns latest logs
|
||||||
|
get:
|
||||||
|
param:
|
||||||
|
limit: Only get a certain number of records
|
||||||
|
"""
|
||||||
|
limit = int(request.args.get('limit', 0)) or None
|
||||||
|
return self.rest_dump(self._rpc_get_logs(limit))
|
||||||
|
|
||||||
@require_login
|
@require_login
|
||||||
@rpc_catch_errors
|
@rpc_catch_errors
|
||||||
def _edge(self):
|
def _edge(self):
|
||||||
|
@ -11,9 +11,10 @@ from typing import Any, Dict, List, Optional, Tuple, Union
|
|||||||
import arrow
|
import arrow
|
||||||
from numpy import NAN, mean
|
from numpy import NAN, mean
|
||||||
|
|
||||||
from freqtrade.exceptions import (ExchangeError, InvalidOrderException,
|
from freqtrade.constants import CANCEL_REASON
|
||||||
PricingError)
|
from freqtrade.exceptions import ExchangeError, PricingError
|
||||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_msecs
|
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_msecs
|
||||||
|
from freqtrade.loggers import bufferHandler
|
||||||
from freqtrade.misc import shorten_date
|
from freqtrade.misc import shorten_date
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
|
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
|
||||||
@ -158,6 +159,7 @@ class RPC:
|
|||||||
current_profit_abs=current_profit_abs,
|
current_profit_abs=current_profit_abs,
|
||||||
stoploss_current_dist=stoploss_current_dist,
|
stoploss_current_dist=stoploss_current_dist,
|
||||||
stoploss_current_dist_ratio=round(stoploss_current_dist_ratio, 8),
|
stoploss_current_dist_ratio=round(stoploss_current_dist_ratio, 8),
|
||||||
|
stoploss_current_dist_pct=round(stoploss_current_dist_ratio * 100, 2),
|
||||||
stoploss_entry_dist=stoploss_entry_dist,
|
stoploss_entry_dist=stoploss_entry_dist,
|
||||||
stoploss_entry_dist_ratio=round(stoploss_entry_dist_ratio, 8),
|
stoploss_entry_dist_ratio=round(stoploss_entry_dist_ratio, 8),
|
||||||
open_order='({} {} rem={:.8f})'.format(
|
open_order='({} {} rem={:.8f})'.format(
|
||||||
@ -222,24 +224,23 @@ class RPC:
|
|||||||
Trade.close_date >= profitday,
|
Trade.close_date >= profitday,
|
||||||
Trade.close_date < (profitday + timedelta(days=1))
|
Trade.close_date < (profitday + timedelta(days=1))
|
||||||
]).order_by(Trade.close_date).all()
|
]).order_by(Trade.close_date).all()
|
||||||
curdayprofit = sum(trade.close_profit_abs for trade in trades)
|
curdayprofit = sum(
|
||||||
|
trade.close_profit_abs for trade in trades if trade.close_profit_abs is not None)
|
||||||
profit_days[profitday] = {
|
profit_days[profitday] = {
|
||||||
'amount': f'{curdayprofit:.8f}',
|
'amount': curdayprofit,
|
||||||
'trades': len(trades)
|
'trades': len(trades)
|
||||||
}
|
}
|
||||||
|
|
||||||
data = [
|
data = [
|
||||||
{
|
{
|
||||||
'date': key,
|
'date': key,
|
||||||
'abs_profit': f'{float(value["amount"]):.8f}',
|
'abs_profit': value["amount"],
|
||||||
'fiat_value': '{value:.3f}'.format(
|
'fiat_value': self._fiat_converter.convert_amount(
|
||||||
value=self._fiat_converter.convert_amount(
|
|
||||||
value['amount'],
|
value['amount'],
|
||||||
stake_currency,
|
stake_currency,
|
||||||
fiat_display_currency
|
fiat_display_currency
|
||||||
) if self._fiat_converter else 0,
|
) if self._fiat_converter else 0,
|
||||||
),
|
'trade_count': value["trades"],
|
||||||
'trade_count': f'{value["trades"]}',
|
|
||||||
}
|
}
|
||||||
for key, value in profit_days.items()
|
for key, value in profit_days.items()
|
||||||
]
|
]
|
||||||
@ -435,7 +436,7 @@ class RPC:
|
|||||||
def _rpc_reload_config(self) -> Dict[str, str]:
|
def _rpc_reload_config(self) -> Dict[str, str]:
|
||||||
""" Handler for reload_config. """
|
""" Handler for reload_config. """
|
||||||
self._freqtrade.state = State.RELOAD_CONFIG
|
self._freqtrade.state = State.RELOAD_CONFIG
|
||||||
return {'status': 'reloading config ...'}
|
return {'status': 'Reloading config ...'}
|
||||||
|
|
||||||
def _rpc_stopbuy(self) -> Dict[str, str]:
|
def _rpc_stopbuy(self) -> Dict[str, str]:
|
||||||
"""
|
"""
|
||||||
@ -454,29 +455,22 @@ class RPC:
|
|||||||
"""
|
"""
|
||||||
def _exec_forcesell(trade: Trade) -> None:
|
def _exec_forcesell(trade: Trade) -> None:
|
||||||
# Check if there is there is an open order
|
# Check if there is there is an open order
|
||||||
|
fully_canceled = False
|
||||||
if trade.open_order_id:
|
if trade.open_order_id:
|
||||||
order = self._freqtrade.exchange.fetch_order(trade.open_order_id, trade.pair)
|
order = self._freqtrade.exchange.fetch_order(trade.open_order_id, trade.pair)
|
||||||
|
|
||||||
# Cancel open LIMIT_BUY orders and close trade
|
if order['side'] == 'buy':
|
||||||
if order and order['status'] == 'open' \
|
fully_canceled = self._freqtrade.handle_cancel_buy(
|
||||||
and order['type'] == 'limit' \
|
trade, order, CANCEL_REASON['FORCE_SELL'])
|
||||||
and order['side'] == 'buy':
|
|
||||||
self._freqtrade.exchange.cancel_order(trade.open_order_id, trade.pair)
|
|
||||||
trade.close(order.get('price') or trade.open_rate)
|
|
||||||
# Do the best effort, if we don't know 'filled' amount, don't try selling
|
|
||||||
if order['filled'] is None:
|
|
||||||
return
|
|
||||||
trade.amount = order['filled']
|
|
||||||
|
|
||||||
# Ignore trades with an attached LIMIT_SELL order
|
if order['side'] == 'sell':
|
||||||
if order and order['status'] == 'open' \
|
# Cancel order - so it is placed anew with a fresh price.
|
||||||
and order['type'] == 'limit' \
|
self._freqtrade.handle_cancel_sell(trade, order, CANCEL_REASON['FORCE_SELL'])
|
||||||
and order['side'] == 'sell':
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get current rate and execute sell
|
if not fully_canceled:
|
||||||
current_rate = self._freqtrade.get_sell_rate(trade.pair, False)
|
# Get current rate and execute sell
|
||||||
self._freqtrade.execute_sell(trade, current_rate, SellType.FORCE_SELL)
|
current_rate = self._freqtrade.get_sell_rate(trade.pair, False)
|
||||||
|
self._freqtrade.execute_sell(trade, current_rate, SellType.FORCE_SELL)
|
||||||
# ---- EOF def _exec_forcesell ----
|
# ---- EOF def _exec_forcesell ----
|
||||||
|
|
||||||
if self._freqtrade.state != State.RUNNING:
|
if self._freqtrade.state != State.RUNNING:
|
||||||
@ -555,7 +549,7 @@ class RPC:
|
|||||||
try:
|
try:
|
||||||
self._freqtrade.exchange.cancel_order(trade.open_order_id, trade.pair)
|
self._freqtrade.exchange.cancel_order(trade.open_order_id, trade.pair)
|
||||||
c_count += 1
|
c_count += 1
|
||||||
except (ExchangeError, InvalidOrderException):
|
except (ExchangeError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# cancel stoploss on exchange ...
|
# cancel stoploss on exchange ...
|
||||||
@ -565,7 +559,7 @@ class RPC:
|
|||||||
self._freqtrade.exchange.cancel_stoploss_order(trade.stoploss_order_id,
|
self._freqtrade.exchange.cancel_stoploss_order(trade.stoploss_order_id,
|
||||||
trade.pair)
|
trade.pair)
|
||||||
c_count += 1
|
c_count += 1
|
||||||
except (ExchangeError, InvalidOrderException):
|
except (ExchangeError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
Trade.session.delete(trade)
|
Trade.session.delete(trade)
|
||||||
@ -633,6 +627,24 @@ class RPC:
|
|||||||
}
|
}
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
def _rpc_get_logs(self, limit: Optional[int]) -> Dict[str, Any]:
|
||||||
|
"""Returns the last X logs"""
|
||||||
|
if limit:
|
||||||
|
buffer = bufferHandler.buffer[-limit:]
|
||||||
|
else:
|
||||||
|
buffer = bufferHandler.buffer
|
||||||
|
records = [[datetime.fromtimestamp(r.created).strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
|
r.created * 1000, r.name, r.levelname,
|
||||||
|
r.message + ('\n' + r.exc_text if r.exc_text else '')]
|
||||||
|
for r in buffer]
|
||||||
|
|
||||||
|
# Log format:
|
||||||
|
# [logtime-formatted, logepoch, logger-name, loglevel, message \n + exception]
|
||||||
|
# e.g. ["2020-08-27 11:35:01", 1598520901097.9397,
|
||||||
|
# "freqtrade.worker", "INFO", "Starting worker develop"]
|
||||||
|
|
||||||
|
return {'log_count': len(records), 'logs': records}
|
||||||
|
|
||||||
def _rpc_edge(self) -> List[Dict[str, Any]]:
|
def _rpc_edge(self) -> List[Dict[str, Any]]:
|
||||||
""" Returns information related to Edge """
|
""" Returns information related to Edge """
|
||||||
if not self._freqtrade.edge:
|
if not self._freqtrade.edge:
|
||||||
|
@ -12,6 +12,7 @@ from tabulate import tabulate
|
|||||||
from telegram import ParseMode, ReplyKeyboardMarkup, Update
|
from telegram import ParseMode, ReplyKeyboardMarkup, Update
|
||||||
from telegram.error import NetworkError, TelegramError
|
from telegram.error import NetworkError, TelegramError
|
||||||
from telegram.ext import CallbackContext, CommandHandler, Updater
|
from telegram.ext import CallbackContext, CommandHandler, Updater
|
||||||
|
from telegram.utils.helpers import escape_markdown
|
||||||
|
|
||||||
from freqtrade.__init__ import __version__
|
from freqtrade.__init__ import __version__
|
||||||
from freqtrade.rpc import RPC, RPCException, RPCMessageType
|
from freqtrade.rpc import RPC, RPCException, RPCMessageType
|
||||||
@ -103,6 +104,7 @@ class Telegram(RPC):
|
|||||||
CommandHandler('stopbuy', self._stopbuy),
|
CommandHandler('stopbuy', self._stopbuy),
|
||||||
CommandHandler('whitelist', self._whitelist),
|
CommandHandler('whitelist', self._whitelist),
|
||||||
CommandHandler('blacklist', self._blacklist),
|
CommandHandler('blacklist', self._blacklist),
|
||||||
|
CommandHandler('logs', self._logs),
|
||||||
CommandHandler('edge', self._edge),
|
CommandHandler('edge', self._edge),
|
||||||
CommandHandler('help', self._help),
|
CommandHandler('help', self._help),
|
||||||
CommandHandler('version', self._version),
|
CommandHandler('version', self._version),
|
||||||
@ -149,7 +151,7 @@ class Telegram(RPC):
|
|||||||
|
|
||||||
elif msg['type'] == RPCMessageType.BUY_CANCEL_NOTIFICATION:
|
elif msg['type'] == RPCMessageType.BUY_CANCEL_NOTIFICATION:
|
||||||
message = ("\N{WARNING SIGN} *{exchange}:* "
|
message = ("\N{WARNING SIGN} *{exchange}:* "
|
||||||
"Cancelling Open Buy Order for {pair}".format(**msg))
|
"Cancelling open buy Order for {pair}. Reason: {reason}.".format(**msg))
|
||||||
|
|
||||||
elif msg['type'] == RPCMessageType.SELL_NOTIFICATION:
|
elif msg['type'] == RPCMessageType.SELL_NOTIFICATION:
|
||||||
msg['amount'] = round(msg['amount'], 8)
|
msg['amount'] = round(msg['amount'], 8)
|
||||||
@ -239,17 +241,18 @@ class Telegram(RPC):
|
|||||||
("*Close Profit:* `{close_profit_pct}`"
|
("*Close Profit:* `{close_profit_pct}`"
|
||||||
if r['close_profit_pct'] is not None else ""),
|
if r['close_profit_pct'] is not None else ""),
|
||||||
"*Current Profit:* `{current_profit_pct:.2f}%`",
|
"*Current Profit:* `{current_profit_pct:.2f}%`",
|
||||||
|
|
||||||
# Adding initial stoploss only if it is different from stoploss
|
|
||||||
"*Initial Stoploss:* `{initial_stop_loss:.8f}` " +
|
|
||||||
("`({initial_stop_loss_pct:.2f}%)`") if (
|
|
||||||
r['stop_loss'] != r['initial_stop_loss']
|
|
||||||
and r['initial_stop_loss_pct'] is not None) else "",
|
|
||||||
|
|
||||||
# Adding stoploss and stoploss percentage only if it is not None
|
|
||||||
"*Stoploss:* `{stop_loss:.8f}` " +
|
|
||||||
("`({stop_loss_pct:.2f}%)`" if r['stop_loss_pct'] else ""),
|
|
||||||
]
|
]
|
||||||
|
if (r['stop_loss'] != r['initial_stop_loss']
|
||||||
|
and r['initial_stop_loss_pct'] is not None):
|
||||||
|
# Adding initial stoploss only if it is different from stoploss
|
||||||
|
lines.append("*Initial Stoploss:* `{initial_stop_loss:.8f}` "
|
||||||
|
"`({initial_stop_loss_pct:.2f}%)`")
|
||||||
|
|
||||||
|
# Adding stoploss and stoploss percentage only if it is not None
|
||||||
|
lines.append("*Stoploss:* `{stop_loss:.8f}` " +
|
||||||
|
("`({stop_loss_pct:.2f}%)`" if r['stop_loss_pct'] else ""))
|
||||||
|
lines.append("*Stoploss distance:* `{stoploss_current_dist:.8f}` "
|
||||||
|
"`({stoploss_current_dist_pct:.2f}%)`")
|
||||||
if r['open_order']:
|
if r['open_order']:
|
||||||
if r['sell_order_status']:
|
if r['sell_order_status']:
|
||||||
lines.append("*Open Order:* `{open_order}` - `{sell_order_status}`")
|
lines.append("*Open Order:* `{open_order}` - `{sell_order_status}`")
|
||||||
@ -305,8 +308,8 @@ class Telegram(RPC):
|
|||||||
)
|
)
|
||||||
stats_tab = tabulate(
|
stats_tab = tabulate(
|
||||||
[[day['date'],
|
[[day['date'],
|
||||||
f"{day['abs_profit']} {stats['stake_currency']}",
|
f"{day['abs_profit']:.8f} {stats['stake_currency']}",
|
||||||
f"{day['fiat_value']} {stats['fiat_display_currency']}",
|
f"{day['fiat_value']:.3f} {stats['fiat_display_currency']}",
|
||||||
f"{day['trade_count']} trades"] for day in stats['data']],
|
f"{day['trade_count']} trades"] for day in stats['data']],
|
||||||
headers=[
|
headers=[
|
||||||
'Day',
|
'Day',
|
||||||
@ -637,6 +640,38 @@ class Telegram(RPC):
|
|||||||
except RPCException as e:
|
except RPCException as e:
|
||||||
self._send_msg(str(e))
|
self._send_msg(str(e))
|
||||||
|
|
||||||
|
@authorized_only
|
||||||
|
def _logs(self, update: Update, context: CallbackContext) -> None:
|
||||||
|
"""
|
||||||
|
Handler for /logs
|
||||||
|
Shows the latest logs
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
limit = int(context.args[0])
|
||||||
|
except (TypeError, ValueError, IndexError):
|
||||||
|
limit = 10
|
||||||
|
logs = self._rpc_get_logs(limit)['logs']
|
||||||
|
msgs = ''
|
||||||
|
msg_template = "*{}* {}: {} \\- `{}`"
|
||||||
|
for logrec in logs:
|
||||||
|
msg = msg_template.format(escape_markdown(logrec[0], version=2),
|
||||||
|
escape_markdown(logrec[2], version=2),
|
||||||
|
escape_markdown(logrec[3], version=2),
|
||||||
|
escape_markdown(logrec[4], version=2))
|
||||||
|
if len(msgs + msg) + 10 >= MAX_TELEGRAM_MESSAGE_LENGTH:
|
||||||
|
# Send message immediately if it would become too long
|
||||||
|
self._send_msg(msgs, parse_mode=ParseMode.MARKDOWN_V2)
|
||||||
|
msgs = msg + '\n'
|
||||||
|
else:
|
||||||
|
# Append message to messages to send
|
||||||
|
msgs += msg + '\n'
|
||||||
|
|
||||||
|
if msgs:
|
||||||
|
self._send_msg(msgs, parse_mode=ParseMode.MARKDOWN_V2)
|
||||||
|
except RPCException as e:
|
||||||
|
self._send_msg(str(e))
|
||||||
|
|
||||||
@authorized_only
|
@authorized_only
|
||||||
def _edge(self, update: Update, context: CallbackContext) -> None:
|
def _edge(self, update: Update, context: CallbackContext) -> None:
|
||||||
"""
|
"""
|
||||||
@ -682,6 +717,7 @@ class Telegram(RPC):
|
|||||||
"*/stopbuy:* `Stops buying, but handles open trades gracefully` \n"
|
"*/stopbuy:* `Stops buying, but handles open trades gracefully` \n"
|
||||||
"*/reload_config:* `Reload configuration file` \n"
|
"*/reload_config:* `Reload configuration file` \n"
|
||||||
"*/show_config:* `Show running configuration` \n"
|
"*/show_config:* `Show running configuration` \n"
|
||||||
|
"*/logs [limit]:* `Show latest logs - defaults to 10` \n"
|
||||||
"*/whitelist:* `Show current whitelist` \n"
|
"*/whitelist:* `Show current whitelist` \n"
|
||||||
"*/blacklist [pair]:* `Show current blacklist, or adds one or more pairs "
|
"*/blacklist [pair]:* `Show current blacklist, or adds one or more pairs "
|
||||||
"to the blacklist.` \n"
|
"to the blacklist.` \n"
|
||||||
|
@ -1 +1,5 @@
|
|||||||
from freqtrade.strategy.interface import IStrategy # noqa: F401
|
# flake8: noqa: F401
|
||||||
|
from freqtrade.exchange import (timeframe_to_minutes, timeframe_to_prev_date,
|
||||||
|
timeframe_to_seconds, timeframe_to_next_date, timeframe_to_msecs)
|
||||||
|
from freqtrade.strategy.interface import IStrategy
|
||||||
|
from freqtrade.strategy.strategy_helper import merge_informative_pair
|
||||||
|
@ -14,8 +14,9 @@ from pandas import DataFrame
|
|||||||
|
|
||||||
from freqtrade.constants import ListPairsWithTimeframes
|
from freqtrade.constants import ListPairsWithTimeframes
|
||||||
from freqtrade.data.dataprovider import DataProvider
|
from freqtrade.data.dataprovider import DataProvider
|
||||||
from freqtrade.exceptions import StrategyError, OperationalException
|
from freqtrade.exceptions import OperationalException, StrategyError
|
||||||
from freqtrade.exchange import timeframe_to_minutes
|
from freqtrade.exchange import timeframe_to_minutes
|
||||||
|
from freqtrade.exchange.exchange import timeframe_to_next_date
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||||
from freqtrade.wallets import Wallets
|
from freqtrade.wallets import Wallets
|
||||||
@ -44,6 +45,10 @@ class SellType(Enum):
|
|||||||
EMERGENCY_SELL = "emergency_sell"
|
EMERGENCY_SELL = "emergency_sell"
|
||||||
NONE = ""
|
NONE = ""
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
# explicitly convert to String to help with exporting data.
|
||||||
|
return self.value
|
||||||
|
|
||||||
|
|
||||||
class SellCheckTuple(NamedTuple):
|
class SellCheckTuple(NamedTuple):
|
||||||
"""
|
"""
|
||||||
@ -293,13 +298,25 @@ class IStrategy(ABC):
|
|||||||
if pair in self._pair_locked_until:
|
if pair in self._pair_locked_until:
|
||||||
del self._pair_locked_until[pair]
|
del self._pair_locked_until[pair]
|
||||||
|
|
||||||
def is_pair_locked(self, pair: str) -> bool:
|
def is_pair_locked(self, pair: str, candle_date: datetime = None) -> bool:
|
||||||
"""
|
"""
|
||||||
Checks if a pair is currently locked
|
Checks if a pair is currently locked
|
||||||
|
The 2nd, optional parameter ensures that locks are applied until the new candle arrives,
|
||||||
|
and not stop at 14:00:00 - while the next candle arrives at 14:00:02 leaving a gap
|
||||||
|
of 2 seconds for a buy to happen on an old signal.
|
||||||
|
:param: pair: "Pair to check"
|
||||||
|
:param candle_date: Date of the last candle. Optional, defaults to current date
|
||||||
|
:returns: locking state of the pair in question.
|
||||||
"""
|
"""
|
||||||
if pair not in self._pair_locked_until:
|
if pair not in self._pair_locked_until:
|
||||||
return False
|
return False
|
||||||
return self._pair_locked_until[pair] >= datetime.now(timezone.utc)
|
if not candle_date:
|
||||||
|
return self._pair_locked_until[pair] >= datetime.now(timezone.utc)
|
||||||
|
else:
|
||||||
|
# Locking should happen until a new candle arrives
|
||||||
|
lock_time = timeframe_to_next_date(self.timeframe, candle_date)
|
||||||
|
# lock_time = candle_date + timedelta(minutes=timeframe_to_minutes(self.timeframe))
|
||||||
|
return self._pair_locked_until[pair] > lock_time
|
||||||
|
|
||||||
def analyze_ticker(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
def analyze_ticker(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
@ -430,7 +447,7 @@ class IStrategy(ABC):
|
|||||||
if latest_date < (arrow.utcnow().shift(minutes=-(timeframe_minutes * 2 + offset))):
|
if latest_date < (arrow.utcnow().shift(minutes=-(timeframe_minutes * 2 + offset))):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Outdated history for pair %s. Last tick is %s minutes old',
|
'Outdated history for pair %s. Last tick is %s minutes old',
|
||||||
pair, (arrow.utcnow() - latest_date).seconds // 60
|
pair, int((arrow.utcnow() - latest_date).total_seconds() // 60)
|
||||||
)
|
)
|
||||||
return False, False
|
return False, False
|
||||||
|
|
||||||
|
48
freqtrade/strategy/strategy_helper.py
Normal file
48
freqtrade/strategy/strategy_helper.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
import pandas as pd
|
||||||
|
from freqtrade.exchange import timeframe_to_minutes
|
||||||
|
|
||||||
|
|
||||||
|
def merge_informative_pair(dataframe: pd.DataFrame, informative: pd.DataFrame,
|
||||||
|
timeframe: str, timeframe_inf: str, ffill: bool = True) -> pd.DataFrame:
|
||||||
|
"""
|
||||||
|
Correctly merge informative samples to the original dataframe, avoiding lookahead bias.
|
||||||
|
|
||||||
|
Since dates are candle open dates, merging a 15m candle that starts at 15:00, and a
|
||||||
|
1h candle that starts at 15:00 will result in all candles to know the close at 16:00
|
||||||
|
which they should not know.
|
||||||
|
|
||||||
|
Moves the date of the informative pair by 1 time interval forward.
|
||||||
|
This way, the 14:00 1h candle is merged to 15:00 15m candle, since the 14:00 1h candle is the
|
||||||
|
last candle that's closed at 15:00, 15:15, 15:30 or 15:45.
|
||||||
|
|
||||||
|
Assuming inf_tf = '1d' - then the resulting columns will be:
|
||||||
|
date_1d, open_1d, high_1d, low_1d, close_1d, rsi_1d
|
||||||
|
|
||||||
|
:param dataframe: Original dataframe
|
||||||
|
:param informative: Informative pair, most likely loaded via dp.get_pair_dataframe
|
||||||
|
:param timeframe: Timeframe of the original pair sample.
|
||||||
|
:param timeframe_inf: Timeframe of the informative pair sample.
|
||||||
|
:param ffill: Forwardfill missing values - optional but usually required
|
||||||
|
"""
|
||||||
|
|
||||||
|
minutes_inf = timeframe_to_minutes(timeframe_inf)
|
||||||
|
minutes = timeframe_to_minutes(timeframe)
|
||||||
|
if minutes >= minutes_inf:
|
||||||
|
# No need to forwardshift if the timeframes are identical
|
||||||
|
informative['date_merge'] = informative["date"]
|
||||||
|
else:
|
||||||
|
informative['date_merge'] = informative["date"] + pd.to_timedelta(minutes_inf, 'm')
|
||||||
|
|
||||||
|
# Rename columns to be unique
|
||||||
|
informative.columns = [f"{col}_{timeframe_inf}" for col in informative.columns]
|
||||||
|
|
||||||
|
# Combine the 2 dataframes
|
||||||
|
# all indicators on the informative sample MUST be calculated before this point
|
||||||
|
dataframe = pd.merge(dataframe, informative, left_on='date',
|
||||||
|
right_on=f'date_merge_{timeframe_inf}', how='left')
|
||||||
|
dataframe = dataframe.drop(f'date_merge_{timeframe_inf}', axis=1)
|
||||||
|
|
||||||
|
if ffill:
|
||||||
|
dataframe = dataframe.ffill()
|
||||||
|
|
||||||
|
return dataframe
|
@ -34,7 +34,7 @@
|
|||||||
"# config = Configuration.from_files([\"config.json\"])\n",
|
"# config = Configuration.from_files([\"config.json\"])\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Define some constants\n",
|
"# Define some constants\n",
|
||||||
"config[\"ticker_interval\"] = \"5m\"\n",
|
"config[\"timeframe\"] = \"5m\"\n",
|
||||||
"# Name of the strategy class\n",
|
"# Name of the strategy class\n",
|
||||||
"config[\"strategy\"] = \"SampleStrategy\"\n",
|
"config[\"strategy\"] = \"SampleStrategy\"\n",
|
||||||
"# Location of the data\n",
|
"# Location of the data\n",
|
||||||
@ -53,7 +53,7 @@
|
|||||||
"from freqtrade.data.history import load_pair_history\n",
|
"from freqtrade.data.history import load_pair_history\n",
|
||||||
"\n",
|
"\n",
|
||||||
"candles = load_pair_history(datadir=data_location,\n",
|
"candles = load_pair_history(datadir=data_location,\n",
|
||||||
" timeframe=config[\"ticker_interval\"],\n",
|
" timeframe=config[\"timeframe\"],\n",
|
||||||
" pair=pair)\n",
|
" pair=pair)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Confirm success\n",
|
"# Confirm success\n",
|
||||||
@ -136,10 +136,51 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from freqtrade.data.btanalysis import load_backtest_data\n",
|
"from freqtrade.data.btanalysis import load_backtest_data, load_backtest_stats\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Load backtest results\n",
|
"# if backtest_dir points to a directory, it'll automatically load the last backtest file.\n",
|
||||||
"trades = load_backtest_data(config[\"user_data_dir\"] / \"backtest_results/backtest-result.json\")\n",
|
"backtest_dir = config[\"user_data_dir\"] / \"backtest_results\"\n",
|
||||||
|
"# backtest_dir can also point to a specific file \n",
|
||||||
|
"# backtest_dir = config[\"user_data_dir\"] / \"backtest_results/backtest-result-2020-07-01_20-04-22.json\""
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# You can get the full backtest statistics by using the following command.\n",
|
||||||
|
"# This contains all information used to generate the backtest result.\n",
|
||||||
|
"stats = load_backtest_stats(backtest_dir)\n",
|
||||||
|
"\n",
|
||||||
|
"strategy = 'SampleStrategy'\n",
|
||||||
|
"# All statistics are available per strategy, so if `--strategy-list` was used during backtest, this will be reflected here as well.\n",
|
||||||
|
"# Example usages:\n",
|
||||||
|
"print(stats['strategy'][strategy]['results_per_pair'])\n",
|
||||||
|
"# Get pairlist used for this backtest\n",
|
||||||
|
"print(stats['strategy'][strategy]['pairlist'])\n",
|
||||||
|
"# Get market change (average change of all pairs from start to end of the backtest period)\n",
|
||||||
|
"print(stats['strategy'][strategy]['market_change'])\n",
|
||||||
|
"# Maximum drawdown ()\n",
|
||||||
|
"print(stats['strategy'][strategy]['max_drawdown'])\n",
|
||||||
|
"# Maximum drawdown start and end\n",
|
||||||
|
"print(stats['strategy'][strategy]['drawdown_start'])\n",
|
||||||
|
"print(stats['strategy'][strategy]['drawdown_end'])\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# Get strategy comparison (only relevant if multiple strategies were compared)\n",
|
||||||
|
"print(stats['strategy_comparison'])\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# Load backtested trades as dataframe\n",
|
||||||
|
"trades = load_backtest_data(backtest_dir)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Show value-counts per pair\n",
|
"# Show value-counts per pair\n",
|
||||||
"trades.groupby(\"pair\")[\"sell_reason\"].value_counts()"
|
"trades.groupby(\"pair\")[\"sell_reason\"].value_counts()"
|
||||||
|
2
freqtrade/vendor/qtpylib/indicators.py
vendored
2
freqtrade/vendor/qtpylib/indicators.py
vendored
@ -222,7 +222,7 @@ def crossed(series1, series2, direction=None):
|
|||||||
if isinstance(series1, np.ndarray):
|
if isinstance(series1, np.ndarray):
|
||||||
series1 = pd.Series(series1)
|
series1 = pd.Series(series1)
|
||||||
|
|
||||||
if isinstance(series2, (float, int, np.ndarray)):
|
if isinstance(series2, (float, int, np.ndarray, np.integer, np.floating)):
|
||||||
series2 = pd.Series(index=series1.index, data=series2)
|
series2 = pd.Series(index=series1.index, data=series2)
|
||||||
|
|
||||||
if direction is None or direction == "above":
|
if direction is None or direction == "above":
|
||||||
|
16
mkdocs.yml
16
mkdocs.yml
@ -1,8 +1,11 @@
|
|||||||
site_name: Freqtrade
|
site_name: Freqtrade
|
||||||
nav:
|
nav:
|
||||||
- Home: index.md
|
- Home: index.md
|
||||||
- Installation Docker: docker.md
|
- Quickstart with Docker: docker_quickstart.md
|
||||||
- Installation: installation.md
|
- Installation:
|
||||||
|
- Docker without docker-compose: docker.md
|
||||||
|
- Linux/MacOS/Raspberry: installation.md
|
||||||
|
- Windows: windows_installation.md
|
||||||
- Freqtrade Basics: bot-basics.md
|
- Freqtrade Basics: bot-basics.md
|
||||||
- Configuration: configuration.md
|
- Configuration: configuration.md
|
||||||
- Strategy Customization: strategy-customization.md
|
- Strategy Customization: strategy-customization.md
|
||||||
@ -39,13 +42,19 @@ theme:
|
|||||||
accent: 'tear'
|
accent: 'tear'
|
||||||
extra_css:
|
extra_css:
|
||||||
- 'stylesheets/ft.extra.css'
|
- 'stylesheets/ft.extra.css'
|
||||||
|
extra_javascript:
|
||||||
|
- javascripts/config.js
|
||||||
|
- https://polyfill.io/v3/polyfill.min.js?features=es6
|
||||||
|
- https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js
|
||||||
markdown_extensions:
|
markdown_extensions:
|
||||||
- admonition
|
- admonition
|
||||||
|
- footnotes
|
||||||
- codehilite:
|
- codehilite:
|
||||||
guess_lang: false
|
guess_lang: false
|
||||||
- toc:
|
- toc:
|
||||||
permalink: true
|
permalink: true
|
||||||
- pymdownx.arithmatex
|
- pymdownx.arithmatex:
|
||||||
|
generic: true
|
||||||
- pymdownx.caret
|
- pymdownx.caret
|
||||||
- pymdownx.critic
|
- pymdownx.critic
|
||||||
- pymdownx.details
|
- pymdownx.details
|
||||||
@ -53,6 +62,7 @@ markdown_extensions:
|
|||||||
- pymdownx.magiclink
|
- pymdownx.magiclink
|
||||||
- pymdownx.mark
|
- pymdownx.mark
|
||||||
- pymdownx.smartsymbols
|
- pymdownx.smartsymbols
|
||||||
|
- pymdownx.tabbed
|
||||||
- pymdownx.superfences
|
- pymdownx.superfences
|
||||||
- pymdownx.tasklist:
|
- pymdownx.tasklist:
|
||||||
custom_checkbox: true
|
custom_checkbox: true
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
# requirements without requirements installable via conda
|
# requirements without requirements installable via conda
|
||||||
# mainly used for Raspberry pi installs
|
# mainly used for Raspberry pi installs
|
||||||
ccxt==1.32.88
|
ccxt==1.34.11
|
||||||
SQLAlchemy==1.3.18
|
SQLAlchemy==1.3.19
|
||||||
python-telegram-bot==12.8
|
python-telegram-bot==12.8
|
||||||
arrow==0.15.8
|
arrow==0.16.0
|
||||||
cachetools==4.1.1
|
cachetools==4.1.1
|
||||||
requests==2.24.0
|
requests==2.24.0
|
||||||
urllib3==1.25.10
|
urllib3==1.25.10
|
||||||
@ -13,6 +13,8 @@ TA-Lib==0.4.18
|
|||||||
tabulate==0.8.7
|
tabulate==0.8.7
|
||||||
pycoingecko==1.3.0
|
pycoingecko==1.3.0
|
||||||
jinja2==2.11.2
|
jinja2==2.11.2
|
||||||
|
tables==3.6.1
|
||||||
|
blosc==1.9.2
|
||||||
|
|
||||||
# find first, C search in arrays
|
# find first, C search in arrays
|
||||||
py_find_1st==1.1.4
|
py_find_1st==1.1.4
|
||||||
@ -26,10 +28,10 @@ sdnotify==0.3.2
|
|||||||
# Api server
|
# Api server
|
||||||
flask==1.1.2
|
flask==1.1.2
|
||||||
flask-jwt-extended==3.24.1
|
flask-jwt-extended==3.24.1
|
||||||
flask-cors==3.0.8
|
flask-cors==3.0.9
|
||||||
|
|
||||||
# Support for colorized terminal output
|
# Support for colorized terminal output
|
||||||
colorama==0.4.3
|
colorama==0.4.3
|
||||||
# Building config files interactively
|
# Building config files interactively
|
||||||
questionary==1.5.2
|
questionary==1.5.2
|
||||||
prompt-toolkit==3.0.5
|
prompt-toolkit==3.0.7
|
||||||
|
@ -3,15 +3,15 @@
|
|||||||
-r requirements-plot.txt
|
-r requirements-plot.txt
|
||||||
-r requirements-hyperopt.txt
|
-r requirements-hyperopt.txt
|
||||||
|
|
||||||
coveralls==2.1.1
|
coveralls==2.1.2
|
||||||
flake8==3.8.3
|
flake8==3.8.3
|
||||||
flake8-type-annotations==0.1.0
|
flake8-type-annotations==0.1.0
|
||||||
flake8-tidy-imports==4.1.0
|
flake8-tidy-imports==4.1.0
|
||||||
mypy==0.782
|
mypy==0.782
|
||||||
pytest==6.0.1
|
pytest==6.0.1
|
||||||
pytest-asyncio==0.14.0
|
pytest-asyncio==0.14.0
|
||||||
pytest-cov==2.10.0
|
pytest-cov==2.10.1
|
||||||
pytest-mock==3.2.0
|
pytest-mock==3.3.1
|
||||||
pytest-random-order==1.0.4
|
pytest-random-order==1.0.4
|
||||||
|
|
||||||
# Convert jupyter notebooks to markdown documents
|
# Convert jupyter notebooks to markdown documents
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
# Required for hyperopt
|
# Required for hyperopt
|
||||||
scipy==1.5.2
|
scipy==1.5.2
|
||||||
scikit-learn==0.23.1
|
scikit-learn==0.23.2
|
||||||
scikit-optimize==0.7.4
|
scikit-optimize==0.8.1
|
||||||
filelock==3.0.12
|
filelock==3.0.12
|
||||||
joblib==0.16.0
|
joblib==0.16.0
|
||||||
progressbar2==3.51.4
|
progressbar2==3.53.1
|
||||||
|
@ -2,4 +2,4 @@
|
|||||||
-r requirements-common.txt
|
-r requirements-common.txt
|
||||||
|
|
||||||
numpy==1.19.1
|
numpy==1.19.1
|
||||||
pandas==1.1.0
|
pandas==1.1.2
|
||||||
|
@ -159,6 +159,14 @@ class FtRestClient():
|
|||||||
"""
|
"""
|
||||||
return self._get("show_config")
|
return self._get("show_config")
|
||||||
|
|
||||||
|
def logs(self, limit=None):
|
||||||
|
"""Show latest logs.
|
||||||
|
|
||||||
|
:param limit: Limits log messages to the last <limit> logs. No limit to get all the trades.
|
||||||
|
:return: json object
|
||||||
|
"""
|
||||||
|
return self._get("logs", params={"limit": limit} if limit else 0)
|
||||||
|
|
||||||
def trades(self, limit=None):
|
def trades(self, limit=None):
|
||||||
"""Return trades history.
|
"""Return trades history.
|
||||||
|
|
||||||
@ -276,11 +284,11 @@ def main(args):
|
|||||||
print_commands()
|
print_commands()
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
config = load_config(args["config"])
|
config = load_config(args['config'])
|
||||||
url = config.get("api_server", {}).get("server_url", "127.0.0.1")
|
url = config.get('api_server', {}).get('server_url', '127.0.0.1')
|
||||||
port = config.get("api_server", {}).get("listen_port", "8080")
|
port = config.get('api_server', {}).get('listen_port', '8080')
|
||||||
username = config.get("api_server", {}).get("username")
|
username = config.get('api_server', {}).get('username')
|
||||||
password = config.get("api_server", {}).get("password")
|
password = config.get('api_server', {}).get('password')
|
||||||
|
|
||||||
server_url = f"http://{url}:{port}"
|
server_url = f"http://{url}:{port}"
|
||||||
client = FtRestClient(server_url, username, password)
|
client = FtRestClient(server_url, username, password)
|
||||||
|
2
setup.py
2
setup.py
@ -85,6 +85,8 @@ setup(name='freqtrade',
|
|||||||
# from requirements.txt
|
# from requirements.txt
|
||||||
'numpy',
|
'numpy',
|
||||||
'pandas',
|
'pandas',
|
||||||
|
'tables',
|
||||||
|
'blosc',
|
||||||
],
|
],
|
||||||
extras_require={
|
extras_require={
|
||||||
'api': api,
|
'api': api,
|
||||||
|
@ -667,7 +667,7 @@ def test_start_list_hyperopts(mocker, caplog, capsys):
|
|||||||
args = [
|
args = [
|
||||||
"list-hyperopts",
|
"list-hyperopts",
|
||||||
"--hyperopt-path",
|
"--hyperopt-path",
|
||||||
str(Path(__file__).parent.parent / "optimize"),
|
str(Path(__file__).parent.parent / "optimize" / "hyperopts"),
|
||||||
"-1"
|
"-1"
|
||||||
]
|
]
|
||||||
pargs = get_args(args)
|
pargs = get_args(args)
|
||||||
@ -683,7 +683,7 @@ def test_start_list_hyperopts(mocker, caplog, capsys):
|
|||||||
args = [
|
args = [
|
||||||
"list-hyperopts",
|
"list-hyperopts",
|
||||||
"--hyperopt-path",
|
"--hyperopt-path",
|
||||||
str(Path(__file__).parent.parent / "optimize"),
|
str(Path(__file__).parent.parent / "optimize" / "hyperopts"),
|
||||||
]
|
]
|
||||||
pargs = get_args(args)
|
pargs = get_args(args)
|
||||||
# pargs['config'] = None
|
# pargs['config'] = None
|
||||||
@ -692,7 +692,6 @@ def test_start_list_hyperopts(mocker, caplog, capsys):
|
|||||||
assert "TestHyperoptLegacy" not in captured.out
|
assert "TestHyperoptLegacy" not in captured.out
|
||||||
assert "legacy_hyperopt.py" not in captured.out
|
assert "legacy_hyperopt.py" not in captured.out
|
||||||
assert "DefaultHyperOpt" in captured.out
|
assert "DefaultHyperOpt" in captured.out
|
||||||
assert "test_hyperopt.py" in captured.out
|
|
||||||
|
|
||||||
|
|
||||||
def test_start_test_pairlist(mocker, caplog, tickers, default_conf, capsys):
|
def test_start_test_pairlist(mocker, caplog, tickers, default_conf, capsys):
|
||||||
|
@ -78,7 +78,7 @@ def patch_exchange(mocker, api_mock=None, id='bittrex', mock_markets=True) -> No
|
|||||||
def get_patched_exchange(mocker, config, api_mock=None, id='bittrex',
|
def get_patched_exchange(mocker, config, api_mock=None, id='bittrex',
|
||||||
mock_markets=True) -> Exchange:
|
mock_markets=True) -> Exchange:
|
||||||
patch_exchange(mocker, api_mock, id, mock_markets)
|
patch_exchange(mocker, api_mock, id, mock_markets)
|
||||||
config["exchange"]["name"] = id
|
config['exchange']['name'] = id
|
||||||
try:
|
try:
|
||||||
exchange = ExchangeResolver.load_exchange(id, config)
|
exchange = ExchangeResolver.load_exchange(id, config)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -176,11 +176,13 @@ def create_mock_trades(fee):
|
|||||||
pair='ETH/BTC',
|
pair='ETH/BTC',
|
||||||
stake_amount=0.001,
|
stake_amount=0.001,
|
||||||
amount=123.0,
|
amount=123.0,
|
||||||
|
amount_requested=123.0,
|
||||||
fee_open=fee.return_value,
|
fee_open=fee.return_value,
|
||||||
fee_close=fee.return_value,
|
fee_close=fee.return_value,
|
||||||
open_rate=0.123,
|
open_rate=0.123,
|
||||||
exchange='bittrex',
|
exchange='bittrex',
|
||||||
open_order_id='dry_run_buy_12345'
|
open_order_id='dry_run_buy_12345',
|
||||||
|
strategy='DefaultStrategy',
|
||||||
)
|
)
|
||||||
Trade.session.add(trade)
|
Trade.session.add(trade)
|
||||||
|
|
||||||
@ -188,6 +190,7 @@ def create_mock_trades(fee):
|
|||||||
pair='ETC/BTC',
|
pair='ETC/BTC',
|
||||||
stake_amount=0.001,
|
stake_amount=0.001,
|
||||||
amount=123.0,
|
amount=123.0,
|
||||||
|
amount_requested=123.0,
|
||||||
fee_open=fee.return_value,
|
fee_open=fee.return_value,
|
||||||
fee_close=fee.return_value,
|
fee_close=fee.return_value,
|
||||||
open_rate=0.123,
|
open_rate=0.123,
|
||||||
@ -195,7 +198,8 @@ def create_mock_trades(fee):
|
|||||||
close_profit=0.005,
|
close_profit=0.005,
|
||||||
exchange='bittrex',
|
exchange='bittrex',
|
||||||
is_open=False,
|
is_open=False,
|
||||||
open_order_id='dry_run_sell_12345'
|
open_order_id='dry_run_sell_12345',
|
||||||
|
strategy='DefaultStrategy',
|
||||||
)
|
)
|
||||||
Trade.session.add(trade)
|
Trade.session.add(trade)
|
||||||
|
|
||||||
@ -218,11 +222,13 @@ def create_mock_trades(fee):
|
|||||||
pair='ETC/BTC',
|
pair='ETC/BTC',
|
||||||
stake_amount=0.001,
|
stake_amount=0.001,
|
||||||
amount=123.0,
|
amount=123.0,
|
||||||
|
amount_requested=124.0,
|
||||||
fee_open=fee.return_value,
|
fee_open=fee.return_value,
|
||||||
fee_close=fee.return_value,
|
fee_close=fee.return_value,
|
||||||
open_rate=0.123,
|
open_rate=0.123,
|
||||||
exchange='bittrex',
|
exchange='bittrex',
|
||||||
open_order_id='prod_buy_12345'
|
open_order_id='prod_buy_12345',
|
||||||
|
strategy='DefaultStrategy',
|
||||||
)
|
)
|
||||||
Trade.session.add(trade)
|
Trade.session.add(trade)
|
||||||
|
|
||||||
|
@ -6,24 +6,48 @@ from arrow import Arrow
|
|||||||
from pandas import DataFrame, DateOffset, Timestamp, to_datetime
|
from pandas import DataFrame, DateOffset, Timestamp, to_datetime
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
|
from freqtrade.constants import LAST_BT_RESULT_FN
|
||||||
from freqtrade.data.btanalysis import (BT_DATA_COLUMNS,
|
from freqtrade.data.btanalysis import (BT_DATA_COLUMNS,
|
||||||
analyze_trade_parallelism,
|
analyze_trade_parallelism,
|
||||||
|
calculate_market_change,
|
||||||
calculate_max_drawdown,
|
calculate_max_drawdown,
|
||||||
combine_dataframes_with_mean,
|
combine_dataframes_with_mean,
|
||||||
create_cum_profit,
|
create_cum_profit,
|
||||||
extract_trades_of_period,
|
extract_trades_of_period,
|
||||||
|
get_latest_backtest_filename,
|
||||||
load_backtest_data, load_trades,
|
load_backtest_data, load_trades,
|
||||||
load_trades_from_db)
|
load_trades_from_db)
|
||||||
from freqtrade.data.history import load_data, load_pair_history
|
from freqtrade.data.history import load_data, load_pair_history
|
||||||
|
from freqtrade.optimize.backtesting import BacktestResult
|
||||||
from tests.conftest import create_mock_trades
|
from tests.conftest import create_mock_trades
|
||||||
|
|
||||||
|
|
||||||
def test_load_backtest_data(testdatadir):
|
def test_get_latest_backtest_filename(testdatadir, mocker):
|
||||||
|
with pytest.raises(ValueError, match=r"Directory .* does not exist\."):
|
||||||
|
get_latest_backtest_filename(testdatadir / 'does_not_exist')
|
||||||
|
|
||||||
|
with pytest.raises(ValueError,
|
||||||
|
match=r"Directory .* does not seem to contain .*"):
|
||||||
|
get_latest_backtest_filename(testdatadir.parent)
|
||||||
|
|
||||||
|
res = get_latest_backtest_filename(testdatadir)
|
||||||
|
assert res == 'backtest-result_new.json'
|
||||||
|
|
||||||
|
res = get_latest_backtest_filename(str(testdatadir))
|
||||||
|
assert res == 'backtest-result_new.json'
|
||||||
|
|
||||||
|
mocker.patch("freqtrade.data.btanalysis.json_load", return_value={})
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match=r"Invalid '.last_result.json' format."):
|
||||||
|
get_latest_backtest_filename(testdatadir)
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_backtest_data_old_format(testdatadir):
|
||||||
|
|
||||||
filename = testdatadir / "backtest-result_test.json"
|
filename = testdatadir / "backtest-result_test.json"
|
||||||
bt_data = load_backtest_data(filename)
|
bt_data = load_backtest_data(filename)
|
||||||
assert isinstance(bt_data, DataFrame)
|
assert isinstance(bt_data, DataFrame)
|
||||||
assert list(bt_data.columns) == BT_DATA_COLUMNS + ["profit"]
|
assert list(bt_data.columns) == BT_DATA_COLUMNS + ["profit_abs"]
|
||||||
assert len(bt_data) == 179
|
assert len(bt_data) == 179
|
||||||
|
|
||||||
# Test loading from string (must yield same result)
|
# Test loading from string (must yield same result)
|
||||||
@ -34,6 +58,49 @@ def test_load_backtest_data(testdatadir):
|
|||||||
load_backtest_data(str("filename") + "nofile")
|
load_backtest_data(str("filename") + "nofile")
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_backtest_data_new_format(testdatadir):
|
||||||
|
|
||||||
|
filename = testdatadir / "backtest-result_new.json"
|
||||||
|
bt_data = load_backtest_data(filename)
|
||||||
|
assert isinstance(bt_data, DataFrame)
|
||||||
|
assert set(bt_data.columns) == set(list(BacktestResult._fields) + ["profit_abs"])
|
||||||
|
assert len(bt_data) == 179
|
||||||
|
|
||||||
|
# Test loading from string (must yield same result)
|
||||||
|
bt_data2 = load_backtest_data(str(filename))
|
||||||
|
assert bt_data.equals(bt_data2)
|
||||||
|
|
||||||
|
# Test loading from folder (must yield same result)
|
||||||
|
bt_data3 = load_backtest_data(testdatadir)
|
||||||
|
assert bt_data.equals(bt_data3)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match=r"File .* does not exist\."):
|
||||||
|
load_backtest_data(str("filename") + "nofile")
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match=r"Unknown dataformat."):
|
||||||
|
load_backtest_data(testdatadir / LAST_BT_RESULT_FN)
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_backtest_data_multi(testdatadir):
|
||||||
|
|
||||||
|
filename = testdatadir / "backtest-result_multistrat.json"
|
||||||
|
for strategy in ('DefaultStrategy', 'TestStrategy'):
|
||||||
|
bt_data = load_backtest_data(filename, strategy=strategy)
|
||||||
|
assert isinstance(bt_data, DataFrame)
|
||||||
|
assert set(bt_data.columns) == set(list(BacktestResult._fields) + ["profit_abs"])
|
||||||
|
assert len(bt_data) == 179
|
||||||
|
|
||||||
|
# Test loading from string (must yield same result)
|
||||||
|
bt_data2 = load_backtest_data(str(filename), strategy=strategy)
|
||||||
|
assert bt_data.equals(bt_data2)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match=r"Strategy XYZ not available in the backtest result\."):
|
||||||
|
load_backtest_data(filename, strategy='XYZ')
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match=r"Detected backtest result with more than one strategy.*"):
|
||||||
|
load_backtest_data(filename)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("init_persistence")
|
@pytest.mark.usefixtures("init_persistence")
|
||||||
def test_load_trades_from_db(default_conf, fee, mocker):
|
def test_load_trades_from_db(default_conf, fee, mocker):
|
||||||
|
|
||||||
@ -46,12 +113,16 @@ def test_load_trades_from_db(default_conf, fee, mocker):
|
|||||||
assert len(trades) == 4
|
assert len(trades) == 4
|
||||||
assert isinstance(trades, DataFrame)
|
assert isinstance(trades, DataFrame)
|
||||||
assert "pair" in trades.columns
|
assert "pair" in trades.columns
|
||||||
assert "open_time" in trades.columns
|
assert "open_date" in trades.columns
|
||||||
assert "profit_percent" in trades.columns
|
assert "profit_percent" in trades.columns
|
||||||
|
|
||||||
for col in BT_DATA_COLUMNS:
|
for col in BT_DATA_COLUMNS:
|
||||||
if col not in ['index', 'open_at_end']:
|
if col not in ['index', 'open_at_end']:
|
||||||
assert col in trades.columns
|
assert col in trades.columns
|
||||||
|
trades = load_trades_from_db(db_url=default_conf['db_url'], strategy='DefaultStrategy')
|
||||||
|
assert len(trades) == 3
|
||||||
|
trades = load_trades_from_db(db_url=default_conf['db_url'], strategy='NoneStrategy')
|
||||||
|
assert len(trades) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_extract_trades_of_period(testdatadir):
|
def test_extract_trades_of_period(testdatadir):
|
||||||
@ -66,13 +137,13 @@ def test_extract_trades_of_period(testdatadir):
|
|||||||
{'pair': [pair, pair, pair, pair],
|
{'pair': [pair, pair, pair, pair],
|
||||||
'profit_percent': [0.0, 0.1, -0.2, -0.5],
|
'profit_percent': [0.0, 0.1, -0.2, -0.5],
|
||||||
'profit_abs': [0.0, 1, -2, -5],
|
'profit_abs': [0.0, 1, -2, -5],
|
||||||
'open_time': to_datetime([Arrow(2017, 11, 13, 15, 40, 0).datetime,
|
'open_date': to_datetime([Arrow(2017, 11, 13, 15, 40, 0).datetime,
|
||||||
Arrow(2017, 11, 14, 9, 41, 0).datetime,
|
Arrow(2017, 11, 14, 9, 41, 0).datetime,
|
||||||
Arrow(2017, 11, 14, 14, 20, 0).datetime,
|
Arrow(2017, 11, 14, 14, 20, 0).datetime,
|
||||||
Arrow(2017, 11, 15, 3, 40, 0).datetime,
|
Arrow(2017, 11, 15, 3, 40, 0).datetime,
|
||||||
], utc=True
|
], utc=True
|
||||||
),
|
),
|
||||||
'close_time': to_datetime([Arrow(2017, 11, 13, 16, 40, 0).datetime,
|
'close_date': to_datetime([Arrow(2017, 11, 13, 16, 40, 0).datetime,
|
||||||
Arrow(2017, 11, 14, 10, 41, 0).datetime,
|
Arrow(2017, 11, 14, 10, 41, 0).datetime,
|
||||||
Arrow(2017, 11, 14, 15, 25, 0).datetime,
|
Arrow(2017, 11, 14, 15, 25, 0).datetime,
|
||||||
Arrow(2017, 11, 15, 3, 55, 0).datetime,
|
Arrow(2017, 11, 15, 3, 55, 0).datetime,
|
||||||
@ -81,10 +152,10 @@ def test_extract_trades_of_period(testdatadir):
|
|||||||
trades1 = extract_trades_of_period(data, trades)
|
trades1 = extract_trades_of_period(data, trades)
|
||||||
# First and last trade are dropped as they are out of range
|
# First and last trade are dropped as they are out of range
|
||||||
assert len(trades1) == 2
|
assert len(trades1) == 2
|
||||||
assert trades1.iloc[0].open_time == Arrow(2017, 11, 14, 9, 41, 0).datetime
|
assert trades1.iloc[0].open_date == Arrow(2017, 11, 14, 9, 41, 0).datetime
|
||||||
assert trades1.iloc[0].close_time == Arrow(2017, 11, 14, 10, 41, 0).datetime
|
assert trades1.iloc[0].close_date == Arrow(2017, 11, 14, 10, 41, 0).datetime
|
||||||
assert trades1.iloc[-1].open_time == Arrow(2017, 11, 14, 14, 20, 0).datetime
|
assert trades1.iloc[-1].open_date == Arrow(2017, 11, 14, 14, 20, 0).datetime
|
||||||
assert trades1.iloc[-1].close_time == Arrow(2017, 11, 14, 15, 25, 0).datetime
|
assert trades1.iloc[-1].close_date == Arrow(2017, 11, 14, 15, 25, 0).datetime
|
||||||
|
|
||||||
|
|
||||||
def test_analyze_trade_parallelism(default_conf, mocker, testdatadir):
|
def test_analyze_trade_parallelism(default_conf, mocker, testdatadir):
|
||||||
@ -105,7 +176,8 @@ def test_load_trades(default_conf, mocker):
|
|||||||
load_trades("DB",
|
load_trades("DB",
|
||||||
db_url=default_conf.get('db_url'),
|
db_url=default_conf.get('db_url'),
|
||||||
exportfilename=default_conf.get('exportfilename'),
|
exportfilename=default_conf.get('exportfilename'),
|
||||||
no_trades=False
|
no_trades=False,
|
||||||
|
strategy="DefaultStrategy",
|
||||||
)
|
)
|
||||||
|
|
||||||
assert db_mock.call_count == 1
|
assert db_mock.call_count == 1
|
||||||
@ -135,6 +207,14 @@ def test_load_trades(default_conf, mocker):
|
|||||||
assert bt_mock.call_count == 0
|
assert bt_mock.call_count == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_calculate_market_change(testdatadir):
|
||||||
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
||||||
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='5m')
|
||||||
|
result = calculate_market_change(data)
|
||||||
|
assert isinstance(result, float)
|
||||||
|
assert pytest.approx(result) == 0.00955514
|
||||||
|
|
||||||
|
|
||||||
def test_combine_dataframes_with_mean(testdatadir):
|
def test_combine_dataframes_with_mean(testdatadir):
|
||||||
pairs = ["ETH/BTC", "ADA/BTC"]
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
||||||
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='5m')
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='5m')
|
||||||
@ -165,7 +245,7 @@ def test_create_cum_profit1(testdatadir):
|
|||||||
filename = testdatadir / "backtest-result_test.json"
|
filename = testdatadir / "backtest-result_test.json"
|
||||||
bt_data = load_backtest_data(filename)
|
bt_data = load_backtest_data(filename)
|
||||||
# Move close-time to "off" the candle, to make sure the logic still works
|
# Move close-time to "off" the candle, to make sure the logic still works
|
||||||
bt_data.loc[:, 'close_time'] = bt_data.loc[:, 'close_time'] + DateOffset(seconds=20)
|
bt_data.loc[:, 'close_date'] = bt_data.loc[:, 'close_date'] + DateOffset(seconds=20)
|
||||||
timerange = TimeRange.parse_timerange("20180110-20180112")
|
timerange = TimeRange.parse_timerange("20180110-20180112")
|
||||||
|
|
||||||
df = load_pair_history(pair="TRX/BTC", timeframe='5m',
|
df = load_pair_history(pair="TRX/BTC", timeframe='5m',
|
||||||
@ -204,11 +284,11 @@ def test_calculate_max_drawdown2():
|
|||||||
-0.033961, 0.010680, 0.010886, -0.029274, 0.011178, 0.010693, 0.010711]
|
-0.033961, 0.010680, 0.010886, -0.029274, 0.011178, 0.010693, 0.010711]
|
||||||
|
|
||||||
dates = [Arrow(2020, 1, 1).shift(days=i) for i in range(len(values))]
|
dates = [Arrow(2020, 1, 1).shift(days=i) for i in range(len(values))]
|
||||||
df = DataFrame(zip(values, dates), columns=['profit', 'open_time'])
|
df = DataFrame(zip(values, dates), columns=['profit', 'open_date'])
|
||||||
# sort by profit and reset index
|
# sort by profit and reset index
|
||||||
df = df.sort_values('profit').reset_index(drop=True)
|
df = df.sort_values('profit').reset_index(drop=True)
|
||||||
df1 = df.copy()
|
df1 = df.copy()
|
||||||
drawdown, h, low = calculate_max_drawdown(df, date_col='open_time', value_col='profit')
|
drawdown, h, low = calculate_max_drawdown(df, date_col='open_date', value_col='profit')
|
||||||
# Ensure df has not been altered.
|
# Ensure df has not been altered.
|
||||||
assert df.equals(df1)
|
assert df.equals(df1)
|
||||||
|
|
||||||
@ -217,6 +297,6 @@ def test_calculate_max_drawdown2():
|
|||||||
assert h < low
|
assert h < low
|
||||||
assert drawdown == 0.091755
|
assert drawdown == 0.091755
|
||||||
|
|
||||||
df = DataFrame(zip(values[:5], dates[:5]), columns=['profit', 'open_time'])
|
df = DataFrame(zip(values[:5], dates[:5]), columns=['profit', 'open_date'])
|
||||||
with pytest.raises(ValueError, match='No losing trade, therefore no drawdown.'):
|
with pytest.raises(ValueError, match='No losing trade, therefore no drawdown.'):
|
||||||
calculate_max_drawdown(df, date_col='open_time', value_col='profit')
|
calculate_max_drawdown(df, date_col='open_date', value_col='profit')
|
||||||
|
@ -12,7 +12,9 @@ from pandas import DataFrame
|
|||||||
from pandas.testing import assert_frame_equal
|
from pandas.testing import assert_frame_equal
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
|
from freqtrade.constants import AVAILABLE_DATAHANDLERS
|
||||||
from freqtrade.data.converter import ohlcv_to_dataframe
|
from freqtrade.data.converter import ohlcv_to_dataframe
|
||||||
|
from freqtrade.data.history.hdf5datahandler import HDF5DataHandler
|
||||||
from freqtrade.data.history.history_utils import (
|
from freqtrade.data.history.history_utils import (
|
||||||
_download_pair_history, _download_trades_history,
|
_download_pair_history, _download_trades_history,
|
||||||
_load_cached_data_for_updating, convert_trades_to_ohlcv, get_timerange,
|
_load_cached_data_for_updating, convert_trades_to_ohlcv, get_timerange,
|
||||||
@ -36,7 +38,7 @@ def _backup_file(file: Path, copy_file: bool = False) -> None:
|
|||||||
"""
|
"""
|
||||||
Backup existing file to avoid deleting the user file
|
Backup existing file to avoid deleting the user file
|
||||||
:param file: complete path to the file
|
:param file: complete path to the file
|
||||||
:param touch_file: create an empty file in replacement
|
:param copy_file: keep file in place too.
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
file_swp = str(file) + '.swp'
|
file_swp = str(file) + '.swp'
|
||||||
@ -620,7 +622,7 @@ def test_convert_trades_to_ohlcv(mocker, default_conf, testdatadir, caplog):
|
|||||||
_clean_test_file(file5)
|
_clean_test_file(file5)
|
||||||
|
|
||||||
|
|
||||||
def test_jsondatahandler_ohlcv_get_pairs(testdatadir):
|
def test_datahandler_ohlcv_get_pairs(testdatadir):
|
||||||
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '5m')
|
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '5m')
|
||||||
# Convert to set to avoid failures due to sorting
|
# Convert to set to avoid failures due to sorting
|
||||||
assert set(pairs) == {'UNITTEST/BTC', 'XLM/BTC', 'ETH/BTC', 'TRX/BTC', 'LTC/BTC',
|
assert set(pairs) == {'UNITTEST/BTC', 'XLM/BTC', 'ETH/BTC', 'TRX/BTC', 'LTC/BTC',
|
||||||
@ -630,8 +632,11 @@ def test_jsondatahandler_ohlcv_get_pairs(testdatadir):
|
|||||||
pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '8m')
|
pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '8m')
|
||||||
assert set(pairs) == {'UNITTEST/BTC'}
|
assert set(pairs) == {'UNITTEST/BTC'}
|
||||||
|
|
||||||
|
pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '5m')
|
||||||
|
assert set(pairs) == {'UNITTEST/BTC'}
|
||||||
|
|
||||||
def test_jsondatahandler_ohlcv_get_available_data(testdatadir):
|
|
||||||
|
def test_datahandler_ohlcv_get_available_data(testdatadir):
|
||||||
paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir)
|
paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir)
|
||||||
# Convert to set to avoid failures due to sorting
|
# Convert to set to avoid failures due to sorting
|
||||||
assert set(paircombs) == {('UNITTEST/BTC', '5m'), ('ETH/BTC', '5m'), ('XLM/BTC', '5m'),
|
assert set(paircombs) == {('UNITTEST/BTC', '5m'), ('ETH/BTC', '5m'), ('XLM/BTC', '5m'),
|
||||||
@ -643,6 +648,8 @@ def test_jsondatahandler_ohlcv_get_available_data(testdatadir):
|
|||||||
|
|
||||||
paircombs = JsonGzDataHandler.ohlcv_get_available_data(testdatadir)
|
paircombs = JsonGzDataHandler.ohlcv_get_available_data(testdatadir)
|
||||||
assert set(paircombs) == {('UNITTEST/BTC', '8m')}
|
assert set(paircombs) == {('UNITTEST/BTC', '8m')}
|
||||||
|
paircombs = HDF5DataHandler.ohlcv_get_available_data(testdatadir)
|
||||||
|
assert set(paircombs) == {('UNITTEST/BTC', '5m')}
|
||||||
|
|
||||||
|
|
||||||
def test_jsondatahandler_trades_get_pairs(testdatadir):
|
def test_jsondatahandler_trades_get_pairs(testdatadir):
|
||||||
@ -653,15 +660,17 @@ def test_jsondatahandler_trades_get_pairs(testdatadir):
|
|||||||
|
|
||||||
def test_jsondatahandler_ohlcv_purge(mocker, testdatadir):
|
def test_jsondatahandler_ohlcv_purge(mocker, testdatadir):
|
||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||||
mocker.patch.object(Path, "unlink", MagicMock())
|
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
||||||
dh = JsonGzDataHandler(testdatadir)
|
dh = JsonGzDataHandler(testdatadir)
|
||||||
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
||||||
|
assert unlinkmock.call_count == 0
|
||||||
|
|
||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||||
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
||||||
|
assert unlinkmock.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_jsondatahandler_trades_load(mocker, testdatadir, caplog):
|
def test_jsondatahandler_trades_load(testdatadir, caplog):
|
||||||
dh = JsonGzDataHandler(testdatadir)
|
dh = JsonGzDataHandler(testdatadir)
|
||||||
logmsg = "Old trades format detected - converting"
|
logmsg = "Old trades format detected - converting"
|
||||||
dh.trades_load('XRP/ETH')
|
dh.trades_load('XRP/ETH')
|
||||||
@ -674,26 +683,144 @@ def test_jsondatahandler_trades_load(mocker, testdatadir, caplog):
|
|||||||
|
|
||||||
def test_jsondatahandler_trades_purge(mocker, testdatadir):
|
def test_jsondatahandler_trades_purge(mocker, testdatadir):
|
||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||||
mocker.patch.object(Path, "unlink", MagicMock())
|
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
||||||
dh = JsonGzDataHandler(testdatadir)
|
dh = JsonGzDataHandler(testdatadir)
|
||||||
assert not dh.trades_purge('UNITTEST/NONEXIST')
|
assert not dh.trades_purge('UNITTEST/NONEXIST')
|
||||||
|
assert unlinkmock.call_count == 0
|
||||||
|
|
||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||||
assert dh.trades_purge('UNITTEST/NONEXIST')
|
assert dh.trades_purge('UNITTEST/NONEXIST')
|
||||||
|
assert unlinkmock.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_jsondatahandler_ohlcv_append(testdatadir):
|
@pytest.mark.parametrize('datahandler', AVAILABLE_DATAHANDLERS)
|
||||||
dh = JsonGzDataHandler(testdatadir)
|
def test_datahandler_ohlcv_append(datahandler, testdatadir, ):
|
||||||
|
dh = get_datahandler(testdatadir, datahandler)
|
||||||
with pytest.raises(NotImplementedError):
|
with pytest.raises(NotImplementedError):
|
||||||
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame())
|
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame())
|
||||||
|
|
||||||
|
|
||||||
def test_jsondatahandler_trades_append(testdatadir):
|
@pytest.mark.parametrize('datahandler', AVAILABLE_DATAHANDLERS)
|
||||||
dh = JsonGzDataHandler(testdatadir)
|
def test_datahandler_trades_append(datahandler, testdatadir):
|
||||||
|
dh = get_datahandler(testdatadir, datahandler)
|
||||||
with pytest.raises(NotImplementedError):
|
with pytest.raises(NotImplementedError):
|
||||||
dh.trades_append('UNITTEST/ETH', [])
|
dh.trades_append('UNITTEST/ETH', [])
|
||||||
|
|
||||||
|
|
||||||
|
def test_hdf5datahandler_trades_get_pairs(testdatadir):
|
||||||
|
pairs = HDF5DataHandler.trades_get_pairs(testdatadir)
|
||||||
|
# Convert to set to avoid failures due to sorting
|
||||||
|
assert set(pairs) == {'XRP/ETH'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_hdf5datahandler_trades_load(testdatadir):
|
||||||
|
dh = HDF5DataHandler(testdatadir)
|
||||||
|
trades = dh.trades_load('XRP/ETH')
|
||||||
|
assert isinstance(trades, list)
|
||||||
|
|
||||||
|
trades1 = dh.trades_load('UNITTEST/NONEXIST')
|
||||||
|
assert trades1 == []
|
||||||
|
# data goes from 2019-10-11 - 2019-10-13
|
||||||
|
timerange = TimeRange.parse_timerange('20191011-20191012')
|
||||||
|
|
||||||
|
trades2 = dh._trades_load('XRP/ETH', timerange)
|
||||||
|
assert len(trades) > len(trades2)
|
||||||
|
|
||||||
|
# unfiltered load has trades before starttime
|
||||||
|
assert len([t for t in trades if t[0] < timerange.startts * 1000]) >= 0
|
||||||
|
# filtered list does not have trades before starttime
|
||||||
|
assert len([t for t in trades2 if t[0] < timerange.startts * 1000]) == 0
|
||||||
|
# unfiltered load has trades after endtime
|
||||||
|
assert len([t for t in trades if t[0] > timerange.stopts * 1000]) > 0
|
||||||
|
# filtered list does not have trades after endtime
|
||||||
|
assert len([t for t in trades2 if t[0] > timerange.stopts * 1000]) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_hdf5datahandler_trades_store(testdatadir):
|
||||||
|
dh = HDF5DataHandler(testdatadir)
|
||||||
|
trades = dh.trades_load('XRP/ETH')
|
||||||
|
|
||||||
|
dh.trades_store('XRP/NEW', trades)
|
||||||
|
file = testdatadir / 'XRP_NEW-trades.h5'
|
||||||
|
assert file.is_file()
|
||||||
|
# Load trades back
|
||||||
|
trades_new = dh.trades_load('XRP/NEW')
|
||||||
|
|
||||||
|
assert len(trades_new) == len(trades)
|
||||||
|
assert trades[0][0] == trades_new[0][0]
|
||||||
|
assert trades[0][1] == trades_new[0][1]
|
||||||
|
# assert trades[0][2] == trades_new[0][2] # This is nan - so comparison does not make sense
|
||||||
|
assert trades[0][3] == trades_new[0][3]
|
||||||
|
assert trades[0][4] == trades_new[0][4]
|
||||||
|
assert trades[0][5] == trades_new[0][5]
|
||||||
|
assert trades[0][6] == trades_new[0][6]
|
||||||
|
assert trades[-1][0] == trades_new[-1][0]
|
||||||
|
assert trades[-1][1] == trades_new[-1][1]
|
||||||
|
# assert trades[-1][2] == trades_new[-1][2] # This is nan - so comparison does not make sense
|
||||||
|
assert trades[-1][3] == trades_new[-1][3]
|
||||||
|
assert trades[-1][4] == trades_new[-1][4]
|
||||||
|
assert trades[-1][5] == trades_new[-1][5]
|
||||||
|
assert trades[-1][6] == trades_new[-1][6]
|
||||||
|
|
||||||
|
_clean_test_file(file)
|
||||||
|
|
||||||
|
|
||||||
|
def test_hdf5datahandler_trades_purge(mocker, testdatadir):
|
||||||
|
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||||
|
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
||||||
|
dh = HDF5DataHandler(testdatadir)
|
||||||
|
assert not dh.trades_purge('UNITTEST/NONEXIST')
|
||||||
|
assert unlinkmock.call_count == 0
|
||||||
|
|
||||||
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||||
|
assert dh.trades_purge('UNITTEST/NONEXIST')
|
||||||
|
assert unlinkmock.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_hdf5datahandler_ohlcv_load_and_resave(testdatadir):
|
||||||
|
dh = HDF5DataHandler(testdatadir)
|
||||||
|
ohlcv = dh.ohlcv_load('UNITTEST/BTC', '5m')
|
||||||
|
assert isinstance(ohlcv, DataFrame)
|
||||||
|
assert len(ohlcv) > 0
|
||||||
|
|
||||||
|
file = testdatadir / 'UNITTEST_NEW-5m.h5'
|
||||||
|
assert not file.is_file()
|
||||||
|
|
||||||
|
dh.ohlcv_store('UNITTEST/NEW', '5m', ohlcv)
|
||||||
|
assert file.is_file()
|
||||||
|
|
||||||
|
assert not ohlcv[ohlcv['date'] < '2018-01-15'].empty
|
||||||
|
|
||||||
|
# Data gores from 2018-01-10 - 2018-01-30
|
||||||
|
timerange = TimeRange.parse_timerange('20180115-20180119')
|
||||||
|
|
||||||
|
# Call private function to ensure timerange is filtered in hdf5
|
||||||
|
ohlcv = dh._ohlcv_load('UNITTEST/BTC', '5m', timerange)
|
||||||
|
ohlcv1 = dh._ohlcv_load('UNITTEST/NEW', '5m', timerange)
|
||||||
|
assert len(ohlcv) == len(ohlcv1)
|
||||||
|
assert ohlcv.equals(ohlcv1)
|
||||||
|
assert ohlcv[ohlcv['date'] < '2018-01-15'].empty
|
||||||
|
assert ohlcv[ohlcv['date'] > '2018-01-19'].empty
|
||||||
|
|
||||||
|
_clean_test_file(file)
|
||||||
|
|
||||||
|
# Try loading inexisting file
|
||||||
|
ohlcv = dh.ohlcv_load('UNITTEST/NONEXIST', '5m')
|
||||||
|
assert ohlcv.empty
|
||||||
|
|
||||||
|
|
||||||
|
def test_hdf5datahandler_ohlcv_purge(mocker, testdatadir):
|
||||||
|
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||||
|
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
||||||
|
dh = HDF5DataHandler(testdatadir)
|
||||||
|
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
||||||
|
assert unlinkmock.call_count == 0
|
||||||
|
|
||||||
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||||
|
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m')
|
||||||
|
assert unlinkmock.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_gethandlerclass():
|
def test_gethandlerclass():
|
||||||
cl = get_datahandlerclass('json')
|
cl = get_datahandlerclass('json')
|
||||||
assert cl == JsonDataHandler
|
assert cl == JsonDataHandler
|
||||||
@ -702,6 +829,9 @@ def test_gethandlerclass():
|
|||||||
assert cl == JsonGzDataHandler
|
assert cl == JsonGzDataHandler
|
||||||
assert issubclass(cl, IDataHandler)
|
assert issubclass(cl, IDataHandler)
|
||||||
assert issubclass(cl, JsonDataHandler)
|
assert issubclass(cl, JsonDataHandler)
|
||||||
|
cl = get_datahandlerclass('hdf5')
|
||||||
|
assert cl == HDF5DataHandler
|
||||||
|
assert issubclass(cl, IDataHandler)
|
||||||
with pytest.raises(ValueError, match=r"No datahandler for .*"):
|
with pytest.raises(ValueError, match=r"No datahandler for .*"):
|
||||||
get_datahandlerclass('DeadBeef')
|
get_datahandlerclass('DeadBeef')
|
||||||
|
|
||||||
@ -713,3 +843,6 @@ def test_get_datahandler(testdatadir):
|
|||||||
assert type(dh) == JsonGzDataHandler
|
assert type(dh) == JsonGzDataHandler
|
||||||
dh1 = get_datahandler(testdatadir, 'jsongz', dh)
|
dh1 = get_datahandler(testdatadir, 'jsongz', dh)
|
||||||
assert id(dh1) == id(dh)
|
assert id(dh1) == id(dh)
|
||||||
|
|
||||||
|
dh = get_datahandler(testdatadir, 'hdf5')
|
||||||
|
assert type(dh) == HDF5DataHandler
|
||||||
|
@ -163,8 +163,8 @@ def test_edge_results(edge_conf, mocker, caplog, data) -> None:
|
|||||||
for c, trade in enumerate(data.trades):
|
for c, trade in enumerate(data.trades):
|
||||||
res = results.iloc[c]
|
res = results.iloc[c]
|
||||||
assert res.exit_type == trade.sell_reason
|
assert res.exit_type == trade.sell_reason
|
||||||
assert res.open_time == _get_frame_time_from_offset(trade.open_tick).replace(tzinfo=None)
|
assert res.open_date == _get_frame_time_from_offset(trade.open_tick).replace(tzinfo=None)
|
||||||
assert res.close_time == _get_frame_time_from_offset(trade.close_tick).replace(tzinfo=None)
|
assert res.close_date == _get_frame_time_from_offset(trade.close_tick).replace(tzinfo=None)
|
||||||
|
|
||||||
|
|
||||||
def test_adjust(mocker, edge_conf):
|
def test_adjust(mocker, edge_conf):
|
||||||
@ -354,10 +354,8 @@ def test_process_expectancy(mocker, edge_conf, fee, risk_reward_ratio, expectanc
|
|||||||
'stoploss': -0.9,
|
'stoploss': -0.9,
|
||||||
'profit_percent': '',
|
'profit_percent': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': np.datetime64('2018-10-03T00:05:00.000000000'),
|
'open_date': np.datetime64('2018-10-03T00:05:00.000000000'),
|
||||||
'close_time': np.datetime64('2018-10-03T00:10:00.000000000'),
|
'close_date': np.datetime64('2018-10-03T00:10:00.000000000'),
|
||||||
'open_index': 1,
|
|
||||||
'close_index': 1,
|
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
'open_rate': 17,
|
'open_rate': 17,
|
||||||
'close_rate': 17,
|
'close_rate': 17,
|
||||||
@ -367,10 +365,8 @@ def test_process_expectancy(mocker, edge_conf, fee, risk_reward_ratio, expectanc
|
|||||||
'stoploss': -0.9,
|
'stoploss': -0.9,
|
||||||
'profit_percent': '',
|
'profit_percent': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': np.datetime64('2018-10-03T00:20:00.000000000'),
|
'open_date': np.datetime64('2018-10-03T00:20:00.000000000'),
|
||||||
'close_time': np.datetime64('2018-10-03T00:25:00.000000000'),
|
'close_date': np.datetime64('2018-10-03T00:25:00.000000000'),
|
||||||
'open_index': 4,
|
|
||||||
'close_index': 4,
|
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
'open_rate': 20,
|
'open_rate': 20,
|
||||||
'close_rate': 20,
|
'close_rate': 20,
|
||||||
@ -380,10 +376,8 @@ def test_process_expectancy(mocker, edge_conf, fee, risk_reward_ratio, expectanc
|
|||||||
'stoploss': -0.9,
|
'stoploss': -0.9,
|
||||||
'profit_percent': '',
|
'profit_percent': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': np.datetime64('2018-10-03T00:30:00.000000000'),
|
'open_date': np.datetime64('2018-10-03T00:30:00.000000000'),
|
||||||
'close_time': np.datetime64('2018-10-03T00:40:00.000000000'),
|
'close_date': np.datetime64('2018-10-03T00:40:00.000000000'),
|
||||||
'open_index': 6,
|
|
||||||
'close_index': 7,
|
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
'open_rate': 26,
|
'open_rate': 26,
|
||||||
'close_rate': 34,
|
'close_rate': 34,
|
||||||
@ -424,8 +418,8 @@ def test_process_expectancy_remove_pumps(mocker, edge_conf, fee,):
|
|||||||
'stoploss': -0.9,
|
'stoploss': -0.9,
|
||||||
'profit_percent': '',
|
'profit_percent': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': np.datetime64('2018-10-03T00:05:00.000000000'),
|
'open_date': np.datetime64('2018-10-03T00:05:00.000000000'),
|
||||||
'close_time': np.datetime64('2018-10-03T00:10:00.000000000'),
|
'close_date': np.datetime64('2018-10-03T00:10:00.000000000'),
|
||||||
'open_index': 1,
|
'open_index': 1,
|
||||||
'close_index': 1,
|
'close_index': 1,
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
@ -437,8 +431,8 @@ def test_process_expectancy_remove_pumps(mocker, edge_conf, fee,):
|
|||||||
'stoploss': -0.9,
|
'stoploss': -0.9,
|
||||||
'profit_percent': '',
|
'profit_percent': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': np.datetime64('2018-10-03T00:20:00.000000000'),
|
'open_date': np.datetime64('2018-10-03T00:20:00.000000000'),
|
||||||
'close_time': np.datetime64('2018-10-03T00:25:00.000000000'),
|
'close_date': np.datetime64('2018-10-03T00:25:00.000000000'),
|
||||||
'open_index': 4,
|
'open_index': 4,
|
||||||
'close_index': 4,
|
'close_index': 4,
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
@ -449,8 +443,8 @@ def test_process_expectancy_remove_pumps(mocker, edge_conf, fee,):
|
|||||||
'stoploss': -0.9,
|
'stoploss': -0.9,
|
||||||
'profit_percent': '',
|
'profit_percent': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': np.datetime64('2018-10-03T00:20:00.000000000'),
|
'open_date': np.datetime64('2018-10-03T00:20:00.000000000'),
|
||||||
'close_time': np.datetime64('2018-10-03T00:25:00.000000000'),
|
'close_date': np.datetime64('2018-10-03T00:25:00.000000000'),
|
||||||
'open_index': 4,
|
'open_index': 4,
|
||||||
'close_index': 4,
|
'close_index': 4,
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
@ -461,8 +455,8 @@ def test_process_expectancy_remove_pumps(mocker, edge_conf, fee,):
|
|||||||
'stoploss': -0.9,
|
'stoploss': -0.9,
|
||||||
'profit_percent': '',
|
'profit_percent': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': np.datetime64('2018-10-03T00:20:00.000000000'),
|
'open_date': np.datetime64('2018-10-03T00:20:00.000000000'),
|
||||||
'close_time': np.datetime64('2018-10-03T00:25:00.000000000'),
|
'close_date': np.datetime64('2018-10-03T00:25:00.000000000'),
|
||||||
'open_index': 4,
|
'open_index': 4,
|
||||||
'close_index': 4,
|
'close_index': 4,
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
@ -473,8 +467,8 @@ def test_process_expectancy_remove_pumps(mocker, edge_conf, fee,):
|
|||||||
'stoploss': -0.9,
|
'stoploss': -0.9,
|
||||||
'profit_percent': '',
|
'profit_percent': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': np.datetime64('2018-10-03T00:20:00.000000000'),
|
'open_date': np.datetime64('2018-10-03T00:20:00.000000000'),
|
||||||
'close_time': np.datetime64('2018-10-03T00:25:00.000000000'),
|
'close_date': np.datetime64('2018-10-03T00:25:00.000000000'),
|
||||||
'open_index': 4,
|
'open_index': 4,
|
||||||
'close_index': 4,
|
'close_index': 4,
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
@ -486,8 +480,8 @@ def test_process_expectancy_remove_pumps(mocker, edge_conf, fee,):
|
|||||||
'stoploss': -0.9,
|
'stoploss': -0.9,
|
||||||
'profit_percent': '',
|
'profit_percent': '',
|
||||||
'profit_abs': '',
|
'profit_abs': '',
|
||||||
'open_time': np.datetime64('2018-10-03T00:30:00.000000000'),
|
'open_date': np.datetime64('2018-10-03T00:30:00.000000000'),
|
||||||
'close_time': np.datetime64('2018-10-03T00:40:00.000000000'),
|
'close_date': np.datetime64('2018-10-03T00:40:00.000000000'),
|
||||||
'open_index': 6,
|
'open_index': 6,
|
||||||
'close_index': 7,
|
'close_index': 7,
|
||||||
'trade_duration': '',
|
'trade_duration': '',
|
||||||
|
@ -11,11 +11,12 @@ import ccxt
|
|||||||
import pytest
|
import pytest
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from freqtrade.exceptions import (DependencyException, InvalidOrderException, DDosProtection,
|
from freqtrade.exceptions import (DDosProtection, DependencyException,
|
||||||
OperationalException, TemporaryError)
|
InvalidOrderException, OperationalException,
|
||||||
|
TemporaryError)
|
||||||
from freqtrade.exchange import Binance, Exchange, Kraken
|
from freqtrade.exchange import Binance, Exchange, Kraken
|
||||||
from freqtrade.exchange.common import API_RETRY_COUNT, calculate_backoff
|
from freqtrade.exchange.common import API_RETRY_COUNT, calculate_backoff
|
||||||
from freqtrade.exchange.exchange import (market_is_active, symbol_is_pair,
|
from freqtrade.exchange.exchange import (market_is_active,
|
||||||
timeframe_to_minutes,
|
timeframe_to_minutes,
|
||||||
timeframe_to_msecs,
|
timeframe_to_msecs,
|
||||||
timeframe_to_next_date,
|
timeframe_to_next_date,
|
||||||
@ -1760,6 +1761,14 @@ def test_cancel_order_dry_run(default_conf, mocker, exchange_name):
|
|||||||
assert exchange.cancel_order(order_id='123', pair='TKN/BTC') == {}
|
assert exchange.cancel_order(order_id='123', pair='TKN/BTC') == {}
|
||||||
assert exchange.cancel_stoploss_order(order_id='123', pair='TKN/BTC') == {}
|
assert exchange.cancel_stoploss_order(order_id='123', pair='TKN/BTC') == {}
|
||||||
|
|
||||||
|
order = exchange.buy('ETH/BTC', 'limit', 5, 0.55, 'gtc')
|
||||||
|
|
||||||
|
cancel_order = exchange.cancel_order(order_id=order['id'], pair='ETH/BTC')
|
||||||
|
assert order['id'] == cancel_order['id']
|
||||||
|
assert order['amount'] == cancel_order['amount']
|
||||||
|
assert order['pair'] == cancel_order['pair']
|
||||||
|
assert cancel_order['status'] == 'canceled'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
@pytest.mark.parametrize("order,result", [
|
@pytest.mark.parametrize("order,result", [
|
||||||
@ -2218,25 +2227,42 @@ def test_timeframe_to_next_date():
|
|||||||
assert timeframe_to_next_date("5m") > date
|
assert timeframe_to_next_date("5m") > date
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("market_symbol,base_currency,quote_currency,expected_result", [
|
@pytest.mark.parametrize("market_symbol,base,quote,exchange,add_dict,expected_result", [
|
||||||
("BTC/USDT", None, None, True),
|
("BTC/USDT", 'BTC', 'USDT', "binance", {}, True),
|
||||||
("USDT/BTC", None, None, True),
|
("USDT/BTC", 'USDT', 'BTC', "binance", {}, True),
|
||||||
("BTCUSDT", None, None, False),
|
("USDT/BTC", 'BTC', 'USDT', "binance", {}, False), # Reversed currencies
|
||||||
("BTC/USDT", None, "USDT", True),
|
("BTCUSDT", 'BTC', 'USDT', "binance", {}, False), # No seperating /
|
||||||
("USDT/BTC", None, "USDT", False),
|
("BTCUSDT", None, "USDT", "binance", {}, False), #
|
||||||
("BTCUSDT", None, "USDT", False),
|
("USDT/BTC", "BTC", None, "binance", {}, False),
|
||||||
("BTC/USDT", "BTC", None, True),
|
("BTCUSDT", "BTC", None, "binance", {}, False),
|
||||||
("USDT/BTC", "BTC", None, False),
|
("BTC/USDT", "BTC", "USDT", "binance", {}, True),
|
||||||
("BTCUSDT", "BTC", None, False),
|
("BTC/USDT", "USDT", "BTC", "binance", {}, False), # reversed currencies
|
||||||
("BTC/USDT", "BTC", "USDT", True),
|
("BTC/USDT", "BTC", "USD", "binance", {}, False), # Wrong quote currency
|
||||||
("BTC/USDT", "USDT", "BTC", False),
|
("BTC/", "BTC", 'UNK', "binance", {}, False),
|
||||||
("BTC/USDT", "BTC", "USD", False),
|
("/USDT", 'UNK', 'USDT', "binance", {}, False),
|
||||||
("BTCUSDT", "BTC", "USDT", False),
|
("BTC/EUR", 'BTC', 'EUR', "kraken", {"darkpool": False}, True),
|
||||||
("BTC/", None, None, False),
|
("EUR/BTC", 'EUR', 'BTC', "kraken", {"darkpool": False}, True),
|
||||||
("/USDT", None, None, False),
|
("EUR/BTC", 'BTC', 'EUR', "kraken", {"darkpool": False}, False), # Reversed currencies
|
||||||
|
("BTC/EUR", 'BTC', 'USD', "kraken", {"darkpool": False}, False), # wrong quote currency
|
||||||
|
("BTC/EUR", 'BTC', 'EUR', "kraken", {"darkpool": True}, False), # no darkpools
|
||||||
|
("BTC/EUR.d", 'BTC', 'EUR', "kraken", {"darkpool": True}, False), # no darkpools
|
||||||
|
("BTC/USD", 'BTC', 'USD', "ftx", {'spot': True}, True),
|
||||||
|
("USD/BTC", 'USD', 'BTC', "ftx", {'spot': True}, True),
|
||||||
|
("BTC/USD", 'BTC', 'USDT', "ftx", {'spot': True}, False), # Wrong quote currency
|
||||||
|
("BTC/USD", 'USD', 'BTC', "ftx", {'spot': True}, False), # Reversed currencies
|
||||||
|
("BTC/USD", 'BTC', 'USD', "ftx", {'spot': False}, False), # Can only trade spot markets
|
||||||
|
("BTC-PERP", 'BTC', 'USD', "ftx", {'spot': False}, False), # Can only trade spot markets
|
||||||
])
|
])
|
||||||
def test_symbol_is_pair(market_symbol, base_currency, quote_currency, expected_result) -> None:
|
def test_market_is_tradable(mocker, default_conf, market_symbol, base,
|
||||||
assert symbol_is_pair(market_symbol, base_currency, quote_currency) == expected_result
|
quote, add_dict, exchange, expected_result) -> None:
|
||||||
|
ex = get_patched_exchange(mocker, default_conf, id=exchange)
|
||||||
|
market = {
|
||||||
|
'symbol': market_symbol,
|
||||||
|
'base': base,
|
||||||
|
'quote': quote,
|
||||||
|
**(add_dict),
|
||||||
|
}
|
||||||
|
assert ex.market_is_tradable(market) == expected_result
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("market,expected_result", [
|
@pytest.mark.parametrize("market,expected_result", [
|
||||||
|
@ -395,5 +395,5 @@ def test_backtest_results(default_conf, fee, mocker, caplog, data) -> None:
|
|||||||
for c, trade in enumerate(data.trades):
|
for c, trade in enumerate(data.trades):
|
||||||
res = results.iloc[c]
|
res = results.iloc[c]
|
||||||
assert res.sell_reason == trade.sell_reason
|
assert res.sell_reason == trade.sell_reason
|
||||||
assert res.open_time == _get_frame_time_from_offset(trade.open_tick)
|
assert res.open_date == _get_frame_time_from_offset(trade.open_tick)
|
||||||
assert res.close_time == _get_frame_time_from_offset(trade.close_tick)
|
assert res.close_date == _get_frame_time_from_offset(trade.close_tick)
|
||||||
|
@ -354,11 +354,12 @@ def test_backtesting_start(default_conf, mocker, testdatadir, caplog) -> None:
|
|||||||
exists = [
|
exists = [
|
||||||
'Using stake_currency: BTC ...',
|
'Using stake_currency: BTC ...',
|
||||||
'Using stake_amount: 0.001 ...',
|
'Using stake_amount: 0.001 ...',
|
||||||
'Backtesting with data from 2017-11-14T21:17:00+00:00 '
|
'Backtesting with data from 2017-11-14 21:17:00 '
|
||||||
'up to 2017-11-14T22:59:00+00:00 (0 days)..'
|
'up to 2017-11-14 22:59:00 (0 days)..'
|
||||||
]
|
]
|
||||||
for line in exists:
|
for line in exists:
|
||||||
assert log_has(line, caplog)
|
assert log_has(line, caplog)
|
||||||
|
assert backtesting.strategy.dp._pairlists is not None
|
||||||
|
|
||||||
|
|
||||||
def test_backtesting_start_no_data(default_conf, mocker, caplog, testdatadir) -> None:
|
def test_backtesting_start_no_data(default_conf, mocker, caplog, testdatadir) -> None:
|
||||||
@ -464,28 +465,29 @@ def test_backtest(default_conf, fee, mocker, testdatadir) -> None:
|
|||||||
{'pair': [pair, pair],
|
{'pair': [pair, pair],
|
||||||
'profit_percent': [0.0, 0.0],
|
'profit_percent': [0.0, 0.0],
|
||||||
'profit_abs': [0.0, 0.0],
|
'profit_abs': [0.0, 0.0],
|
||||||
'open_time': pd.to_datetime([Arrow(2018, 1, 29, 18, 40, 0).datetime,
|
'open_date': pd.to_datetime([Arrow(2018, 1, 29, 18, 40, 0).datetime,
|
||||||
Arrow(2018, 1, 30, 3, 30, 0).datetime], utc=True
|
Arrow(2018, 1, 30, 3, 30, 0).datetime], utc=True
|
||||||
),
|
),
|
||||||
'close_time': pd.to_datetime([Arrow(2018, 1, 29, 22, 35, 0).datetime,
|
'open_rate': [0.104445, 0.10302485],
|
||||||
|
'open_fee': [0.0025, 0.0025],
|
||||||
|
'close_date': pd.to_datetime([Arrow(2018, 1, 29, 22, 35, 0).datetime,
|
||||||
Arrow(2018, 1, 30, 4, 10, 0).datetime], utc=True),
|
Arrow(2018, 1, 30, 4, 10, 0).datetime], utc=True),
|
||||||
'open_index': [78, 184],
|
'close_rate': [0.104969, 0.103541],
|
||||||
'close_index': [125, 192],
|
'close_fee': [0.0025, 0.0025],
|
||||||
|
'amount': [0.00957442, 0.0097064],
|
||||||
'trade_duration': [235, 40],
|
'trade_duration': [235, 40],
|
||||||
'open_at_end': [False, False],
|
'open_at_end': [False, False],
|
||||||
'open_rate': [0.104445, 0.10302485],
|
|
||||||
'close_rate': [0.104969, 0.103541],
|
|
||||||
'sell_reason': [SellType.ROI, SellType.ROI]
|
'sell_reason': [SellType.ROI, SellType.ROI]
|
||||||
})
|
})
|
||||||
pd.testing.assert_frame_equal(results, expected)
|
pd.testing.assert_frame_equal(results, expected)
|
||||||
data_pair = processed[pair]
|
data_pair = processed[pair]
|
||||||
for _, t in results.iterrows():
|
for _, t in results.iterrows():
|
||||||
ln = data_pair.loc[data_pair["date"] == t["open_time"]]
|
ln = data_pair.loc[data_pair["date"] == t["open_date"]]
|
||||||
# Check open trade rate alignes to open rate
|
# Check open trade rate alignes to open rate
|
||||||
assert ln is not None
|
assert ln is not None
|
||||||
assert round(ln.iloc[0]["open"], 6) == round(t["open_rate"], 6)
|
assert round(ln.iloc[0]["open"], 6) == round(t["open_rate"], 6)
|
||||||
# check close trade rate alignes to close rate or is between high and low
|
# check close trade rate alignes to close rate or is between high and low
|
||||||
ln = data_pair.loc[data_pair["date"] == t["close_time"]]
|
ln = data_pair.loc[data_pair["date"] == t["close_date"]]
|
||||||
assert (round(ln.iloc[0]["open"], 6) == round(t["close_rate"], 6) or
|
assert (round(ln.iloc[0]["open"], 6) == round(t["close_rate"], 6) or
|
||||||
round(ln.iloc[0]["low"], 6) < round(
|
round(ln.iloc[0]["low"], 6) < round(
|
||||||
t["close_rate"], 6) < round(ln.iloc[0]["high"], 6))
|
t["close_rate"], 6) < round(ln.iloc[0]["high"], 6))
|
||||||
@ -677,10 +679,10 @@ def test_backtest_start_timerange(default_conf, mocker, caplog, testdatadir):
|
|||||||
f'Using data directory: {testdatadir} ...',
|
f'Using data directory: {testdatadir} ...',
|
||||||
'Using stake_currency: BTC ...',
|
'Using stake_currency: BTC ...',
|
||||||
'Using stake_amount: 0.001 ...',
|
'Using stake_amount: 0.001 ...',
|
||||||
'Loading data from 2017-11-14T20:57:00+00:00 '
|
'Loading data from 2017-11-14 20:57:00 '
|
||||||
'up to 2017-11-14T22:58:00+00:00 (0 days)..',
|
'up to 2017-11-14 22:58:00 (0 days)..',
|
||||||
'Backtesting with data from 2017-11-14T21:17:00+00:00 '
|
'Backtesting with data from 2017-11-14 21:17:00 '
|
||||||
'up to 2017-11-14T22:58:00+00:00 (0 days)..',
|
'up to 2017-11-14 22:58:00 (0 days)..',
|
||||||
'Parameter --enable-position-stacking detected ...'
|
'Parameter --enable-position-stacking detected ...'
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -707,6 +709,7 @@ def test_backtest_start_multi_strat(default_conf, mocker, caplog, testdatadir):
|
|||||||
generate_pair_metrics=MagicMock(),
|
generate_pair_metrics=MagicMock(),
|
||||||
generate_sell_reason_stats=sell_reason_mock,
|
generate_sell_reason_stats=sell_reason_mock,
|
||||||
generate_strategy_metrics=strat_summary,
|
generate_strategy_metrics=strat_summary,
|
||||||
|
generate_daily_stats=MagicMock(),
|
||||||
)
|
)
|
||||||
patched_configuration_load_config_file(mocker, default_conf)
|
patched_configuration_load_config_file(mocker, default_conf)
|
||||||
|
|
||||||
@ -740,10 +743,10 @@ def test_backtest_start_multi_strat(default_conf, mocker, caplog, testdatadir):
|
|||||||
f'Using data directory: {testdatadir} ...',
|
f'Using data directory: {testdatadir} ...',
|
||||||
'Using stake_currency: BTC ...',
|
'Using stake_currency: BTC ...',
|
||||||
'Using stake_amount: 0.001 ...',
|
'Using stake_amount: 0.001 ...',
|
||||||
'Loading data from 2017-11-14T20:57:00+00:00 '
|
'Loading data from 2017-11-14 20:57:00 '
|
||||||
'up to 2017-11-14T22:58:00+00:00 (0 days)..',
|
'up to 2017-11-14 22:58:00 (0 days)..',
|
||||||
'Backtesting with data from 2017-11-14T21:17:00+00:00 '
|
'Backtesting with data from 2017-11-14 21:17:00 '
|
||||||
'up to 2017-11-14T22:58:00+00:00 (0 days)..',
|
'up to 2017-11-14 22:58:00 (0 days)..',
|
||||||
'Parameter --enable-position-stacking detected ...',
|
'Parameter --enable-position-stacking detected ...',
|
||||||
'Running backtesting for Strategy DefaultStrategy',
|
'Running backtesting for Strategy DefaultStrategy',
|
||||||
'Running backtesting for Strategy TestStrategyLegacy',
|
'Running backtesting for Strategy TestStrategyLegacy',
|
||||||
@ -761,13 +764,11 @@ def test_backtest_start_multi_strat_nomock(default_conf, mocker, caplog, testdat
|
|||||||
pd.DataFrame({'pair': ['XRP/BTC', 'LTC/BTC'],
|
pd.DataFrame({'pair': ['XRP/BTC', 'LTC/BTC'],
|
||||||
'profit_percent': [0.0, 0.0],
|
'profit_percent': [0.0, 0.0],
|
||||||
'profit_abs': [0.0, 0.0],
|
'profit_abs': [0.0, 0.0],
|
||||||
'open_time': pd.to_datetime(['2018-01-29 18:40:00',
|
'open_date': pd.to_datetime(['2018-01-29 18:40:00',
|
||||||
'2018-01-30 03:30:00', ], utc=True
|
'2018-01-30 03:30:00', ], utc=True
|
||||||
),
|
),
|
||||||
'close_time': pd.to_datetime(['2018-01-29 20:45:00',
|
'close_date': pd.to_datetime(['2018-01-29 20:45:00',
|
||||||
'2018-01-30 05:35:00', ], utc=True),
|
'2018-01-30 05:35:00', ], utc=True),
|
||||||
'open_index': [78, 184],
|
|
||||||
'close_index': [125, 192],
|
|
||||||
'trade_duration': [235, 40],
|
'trade_duration': [235, 40],
|
||||||
'open_at_end': [False, False],
|
'open_at_end': [False, False],
|
||||||
'open_rate': [0.104445, 0.10302485],
|
'open_rate': [0.104445, 0.10302485],
|
||||||
@ -777,15 +778,13 @@ def test_backtest_start_multi_strat_nomock(default_conf, mocker, caplog, testdat
|
|||||||
pd.DataFrame({'pair': ['XRP/BTC', 'LTC/BTC', 'ETH/BTC'],
|
pd.DataFrame({'pair': ['XRP/BTC', 'LTC/BTC', 'ETH/BTC'],
|
||||||
'profit_percent': [0.03, 0.01, 0.1],
|
'profit_percent': [0.03, 0.01, 0.1],
|
||||||
'profit_abs': [0.01, 0.02, 0.2],
|
'profit_abs': [0.01, 0.02, 0.2],
|
||||||
'open_time': pd.to_datetime(['2018-01-29 18:40:00',
|
'open_date': pd.to_datetime(['2018-01-29 18:40:00',
|
||||||
'2018-01-30 03:30:00',
|
'2018-01-30 03:30:00',
|
||||||
'2018-01-30 05:30:00'], utc=True
|
'2018-01-30 05:30:00'], utc=True
|
||||||
),
|
),
|
||||||
'close_time': pd.to_datetime(['2018-01-29 20:45:00',
|
'close_date': pd.to_datetime(['2018-01-29 20:45:00',
|
||||||
'2018-01-30 05:35:00',
|
'2018-01-30 05:35:00',
|
||||||
'2018-01-30 08:30:00'], utc=True),
|
'2018-01-30 08:30:00'], utc=True),
|
||||||
'open_index': [78, 184, 185],
|
|
||||||
'close_index': [125, 224, 205],
|
|
||||||
'trade_duration': [47, 40, 20],
|
'trade_duration': [47, 40, 20],
|
||||||
'open_at_end': [False, False, False],
|
'open_at_end': [False, False, False],
|
||||||
'open_rate': [0.104445, 0.10302485, 0.122541],
|
'open_rate': [0.104445, 0.10302485, 0.122541],
|
||||||
@ -823,10 +822,10 @@ def test_backtest_start_multi_strat_nomock(default_conf, mocker, caplog, testdat
|
|||||||
f'Using data directory: {testdatadir} ...',
|
f'Using data directory: {testdatadir} ...',
|
||||||
'Using stake_currency: BTC ...',
|
'Using stake_currency: BTC ...',
|
||||||
'Using stake_amount: 0.001 ...',
|
'Using stake_amount: 0.001 ...',
|
||||||
'Loading data from 2017-11-14T20:57:00+00:00 '
|
'Loading data from 2017-11-14 20:57:00 '
|
||||||
'up to 2017-11-14T22:58:00+00:00 (0 days)..',
|
'up to 2017-11-14 22:58:00 (0 days)..',
|
||||||
'Backtesting with data from 2017-11-14T21:17:00+00:00 '
|
'Backtesting with data from 2017-11-14 21:17:00 '
|
||||||
'up to 2017-11-14T22:58:00+00:00 (0 days)..',
|
'up to 2017-11-14 22:58:00 (0 days)..',
|
||||||
'Parameter --enable-position-stacking detected ...',
|
'Parameter --enable-position-stacking detected ...',
|
||||||
'Running backtesting for Strategy DefaultStrategy',
|
'Running backtesting for Strategy DefaultStrategy',
|
||||||
'Running backtesting for Strategy TestStrategyLegacy',
|
'Running backtesting for Strategy TestStrategyLegacy',
|
||||||
|
@ -105,3 +105,17 @@ def test_edge_init_fee(mocker, edge_conf) -> None:
|
|||||||
edge_cli = EdgeCli(edge_conf)
|
edge_cli = EdgeCli(edge_conf)
|
||||||
assert edge_cli.edge.fee == 0.1234
|
assert edge_cli.edge.fee == 0.1234
|
||||||
assert fee_mock.call_count == 0
|
assert fee_mock.call_count == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_edge_start(mocker, edge_conf) -> None:
|
||||||
|
mock_calculate = mocker.patch('freqtrade.edge.edge_positioning.Edge.calculate',
|
||||||
|
return_value=True)
|
||||||
|
table_mock = mocker.patch('freqtrade.optimize.edge_cli.generate_edge_table')
|
||||||
|
|
||||||
|
patch_exchange(mocker)
|
||||||
|
edge_conf['stake_amount'] = 20
|
||||||
|
|
||||||
|
edge_cli = EdgeCli(edge_conf)
|
||||||
|
edge_cli.start()
|
||||||
|
assert mock_calculate.call_count == 1
|
||||||
|
assert table_mock.call_count == 1
|
||||||
|
@ -3,6 +3,7 @@ import locale
|
|||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from copy import deepcopy
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
from unittest.mock import MagicMock, PropertyMock
|
from unittest.mock import MagicMock, PropertyMock
|
||||||
|
|
||||||
@ -16,7 +17,6 @@ from freqtrade.commands.optimize_commands import (setup_optimize_configuration,
|
|||||||
start_hyperopt)
|
start_hyperopt)
|
||||||
from freqtrade.data.history import load_data
|
from freqtrade.data.history import load_data
|
||||||
from freqtrade.exceptions import DependencyException, OperationalException
|
from freqtrade.exceptions import DependencyException, OperationalException
|
||||||
from freqtrade.optimize.default_hyperopt import DefaultHyperOpt
|
|
||||||
from freqtrade.optimize.default_hyperopt_loss import DefaultHyperOptLoss
|
from freqtrade.optimize.default_hyperopt_loss import DefaultHyperOptLoss
|
||||||
from freqtrade.optimize.hyperopt import Hyperopt
|
from freqtrade.optimize.hyperopt import Hyperopt
|
||||||
from freqtrade.resolvers.hyperopt_resolver import (HyperOptLossResolver,
|
from freqtrade.resolvers.hyperopt_resolver import (HyperOptLossResolver,
|
||||||
@ -26,15 +26,28 @@ from freqtrade.strategy.interface import SellType
|
|||||||
from tests.conftest import (get_args, log_has, log_has_re, patch_exchange,
|
from tests.conftest import (get_args, log_has, log_has_re, patch_exchange,
|
||||||
patched_configuration_load_config_file)
|
patched_configuration_load_config_file)
|
||||||
|
|
||||||
|
from .hyperopts.default_hyperopt import DefaultHyperOpt
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def hyperopt(default_conf, mocker):
|
def hyperopt_conf(default_conf):
|
||||||
default_conf.update({
|
hyperconf = deepcopy(default_conf)
|
||||||
'spaces': ['default'],
|
hyperconf.update({
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
'hyperopt': 'DefaultHyperOpt',
|
||||||
})
|
'hyperopt_path': str(Path(__file__).parent / 'hyperopts'),
|
||||||
|
'epochs': 1,
|
||||||
|
'timerange': None,
|
||||||
|
'spaces': ['default'],
|
||||||
|
'hyperopt_jobs': 1,
|
||||||
|
})
|
||||||
|
return hyperconf
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def hyperopt(hyperopt_conf, mocker):
|
||||||
|
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
return Hyperopt(default_conf)
|
return Hyperopt(hyperopt_conf)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
@ -46,7 +59,7 @@ def hyperopt_results():
|
|||||||
'profit_abs': [-0.2, 0.4, 0.6],
|
'profit_abs': [-0.2, 0.4, 0.6],
|
||||||
'trade_duration': [10, 30, 10],
|
'trade_duration': [10, 30, 10],
|
||||||
'sell_reason': [SellType.STOP_LOSS, SellType.ROI, SellType.ROI],
|
'sell_reason': [SellType.STOP_LOSS, SellType.ROI, SellType.ROI],
|
||||||
'close_time':
|
'close_date':
|
||||||
[
|
[
|
||||||
datetime(2019, 1, 1, 9, 26, 3, 478039),
|
datetime(2019, 1, 1, 9, 26, 3, 478039),
|
||||||
datetime(2019, 2, 1, 9, 26, 3, 478039),
|
datetime(2019, 2, 1, 9, 26, 3, 478039),
|
||||||
@ -160,7 +173,7 @@ def test_setup_hyperopt_configuration_with_arguments(mocker, default_conf, caplo
|
|||||||
assert log_has('Parameter --print-all detected ...', caplog)
|
assert log_has('Parameter --print-all detected ...', caplog)
|
||||||
|
|
||||||
|
|
||||||
def test_setup_hyperopt_configuration_unlimited_stake_amount(mocker, default_conf, caplog) -> None:
|
def test_setup_hyperopt_configuration_unlimited_stake_amount(mocker, default_conf) -> None:
|
||||||
default_conf['stake_amount'] = constants.UNLIMITED_STAKE_AMOUNT
|
default_conf['stake_amount'] = constants.UNLIMITED_STAKE_AMOUNT
|
||||||
|
|
||||||
patched_configuration_load_config_file(mocker, default_conf)
|
patched_configuration_load_config_file(mocker, default_conf)
|
||||||
@ -201,7 +214,7 @@ def test_hyperoptresolver(mocker, default_conf, caplog) -> None:
|
|||||||
assert hasattr(x, "timeframe")
|
assert hasattr(x, "timeframe")
|
||||||
|
|
||||||
|
|
||||||
def test_hyperoptresolver_wrongname(mocker, default_conf, caplog) -> None:
|
def test_hyperoptresolver_wrongname(default_conf) -> None:
|
||||||
default_conf.update({'hyperopt': "NonExistingHyperoptClass"})
|
default_conf.update({'hyperopt': "NonExistingHyperoptClass"})
|
||||||
|
|
||||||
with pytest.raises(OperationalException, match=r'Impossible to load Hyperopt.*'):
|
with pytest.raises(OperationalException, match=r'Impossible to load Hyperopt.*'):
|
||||||
@ -216,7 +229,7 @@ def test_hyperoptresolver_noname(default_conf):
|
|||||||
HyperOptResolver.load_hyperopt(default_conf)
|
HyperOptResolver.load_hyperopt(default_conf)
|
||||||
|
|
||||||
|
|
||||||
def test_hyperoptlossresolver(mocker, default_conf, caplog) -> None:
|
def test_hyperoptlossresolver(mocker, default_conf) -> None:
|
||||||
|
|
||||||
hl = DefaultHyperOptLoss
|
hl = DefaultHyperOptLoss
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@ -227,14 +240,14 @@ def test_hyperoptlossresolver(mocker, default_conf, caplog) -> None:
|
|||||||
assert hasattr(x, "hyperopt_loss_function")
|
assert hasattr(x, "hyperopt_loss_function")
|
||||||
|
|
||||||
|
|
||||||
def test_hyperoptlossresolver_wrongname(mocker, default_conf, caplog) -> None:
|
def test_hyperoptlossresolver_wrongname(default_conf) -> None:
|
||||||
default_conf.update({'hyperopt_loss': "NonExistingLossClass"})
|
default_conf.update({'hyperopt_loss': "NonExistingLossClass"})
|
||||||
|
|
||||||
with pytest.raises(OperationalException, match=r'Impossible to load HyperoptLoss.*'):
|
with pytest.raises(OperationalException, match=r'Impossible to load HyperoptLoss.*'):
|
||||||
HyperOptLossResolver.load_hyperoptloss(default_conf)
|
HyperOptLossResolver.load_hyperoptloss(default_conf)
|
||||||
|
|
||||||
|
|
||||||
def test_start_not_installed(mocker, default_conf, caplog, import_fails) -> None:
|
def test_start_not_installed(mocker, default_conf, import_fails) -> None:
|
||||||
start_mock = MagicMock()
|
start_mock = MagicMock()
|
||||||
patched_configuration_load_config_file(mocker, default_conf)
|
patched_configuration_load_config_file(mocker, default_conf)
|
||||||
|
|
||||||
@ -245,6 +258,8 @@ def test_start_not_installed(mocker, default_conf, caplog, import_fails) -> None
|
|||||||
'hyperopt',
|
'hyperopt',
|
||||||
'--config', 'config.json',
|
'--config', 'config.json',
|
||||||
'--hyperopt', 'DefaultHyperOpt',
|
'--hyperopt', 'DefaultHyperOpt',
|
||||||
|
'--hyperopt-path',
|
||||||
|
str(Path(__file__).parent / "hyperopts"),
|
||||||
'--epochs', '5'
|
'--epochs', '5'
|
||||||
]
|
]
|
||||||
pargs = get_args(args)
|
pargs = get_args(args)
|
||||||
@ -253,9 +268,9 @@ def test_start_not_installed(mocker, default_conf, caplog, import_fails) -> None
|
|||||||
start_hyperopt(pargs)
|
start_hyperopt(pargs)
|
||||||
|
|
||||||
|
|
||||||
def test_start(mocker, default_conf, caplog) -> None:
|
def test_start(mocker, hyperopt_conf, caplog) -> None:
|
||||||
start_mock = MagicMock()
|
start_mock = MagicMock()
|
||||||
patched_configuration_load_config_file(mocker, default_conf)
|
patched_configuration_load_config_file(mocker, hyperopt_conf)
|
||||||
mocker.patch('freqtrade.optimize.hyperopt.Hyperopt.start', start_mock)
|
mocker.patch('freqtrade.optimize.hyperopt.Hyperopt.start', start_mock)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
@ -272,8 +287,8 @@ def test_start(mocker, default_conf, caplog) -> None:
|
|||||||
assert start_mock.call_count == 1
|
assert start_mock.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_start_no_data(mocker, default_conf, caplog) -> None:
|
def test_start_no_data(mocker, hyperopt_conf) -> None:
|
||||||
patched_configuration_load_config_file(mocker, default_conf)
|
patched_configuration_load_config_file(mocker, hyperopt_conf)
|
||||||
mocker.patch('freqtrade.data.history.load_pair_history', MagicMock(return_value=pd.DataFrame))
|
mocker.patch('freqtrade.data.history.load_pair_history', MagicMock(return_value=pd.DataFrame))
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
'freqtrade.optimize.hyperopt.get_timerange',
|
'freqtrade.optimize.hyperopt.get_timerange',
|
||||||
@ -293,9 +308,9 @@ def test_start_no_data(mocker, default_conf, caplog) -> None:
|
|||||||
start_hyperopt(pargs)
|
start_hyperopt(pargs)
|
||||||
|
|
||||||
|
|
||||||
def test_start_filelock(mocker, default_conf, caplog) -> None:
|
def test_start_filelock(mocker, hyperopt_conf, caplog) -> None:
|
||||||
start_mock = MagicMock(side_effect=Timeout(Hyperopt.get_lock_filename(default_conf)))
|
start_mock = MagicMock(side_effect=Timeout(Hyperopt.get_lock_filename(hyperopt_conf)))
|
||||||
patched_configuration_load_config_file(mocker, default_conf)
|
patched_configuration_load_config_file(mocker, hyperopt_conf)
|
||||||
mocker.patch('freqtrade.optimize.hyperopt.Hyperopt.start', start_mock)
|
mocker.patch('freqtrade.optimize.hyperopt.Hyperopt.start', start_mock)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
@ -519,7 +534,7 @@ def test_roi_table_generation(hyperopt) -> None:
|
|||||||
assert hyperopt.custom_hyperopt.generate_roi_table(params) == {0: 6, 15: 3, 25: 1, 30: 0}
|
assert hyperopt.custom_hyperopt.generate_roi_table(params) == {0: 6, 15: 3, 25: 1, 30: 0}
|
||||||
|
|
||||||
|
|
||||||
def test_start_calls_optimizer(mocker, default_conf, caplog, capsys) -> None:
|
def test_start_calls_optimizer(mocker, hyperopt_conf, capsys) -> None:
|
||||||
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
||||||
MagicMock(return_value=(MagicMock(), None)))
|
MagicMock(return_value=(MagicMock(), None)))
|
||||||
@ -545,15 +560,9 @@ def test_start_calls_optimizer(mocker, default_conf, caplog, capsys) -> None:
|
|||||||
)
|
)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
# Co-test loading timeframe from strategy
|
# Co-test loading timeframe from strategy
|
||||||
del default_conf['timeframe']
|
del hyperopt_conf['timeframe']
|
||||||
default_conf.update({'config': 'config.json.example',
|
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
|
||||||
'epochs': 1,
|
|
||||||
'timerange': None,
|
|
||||||
'spaces': 'default',
|
|
||||||
'hyperopt_jobs': 1, })
|
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
||||||
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
||||||
|
|
||||||
@ -569,7 +578,7 @@ def test_start_calls_optimizer(mocker, default_conf, caplog, capsys) -> None:
|
|||||||
assert hasattr(hyperopt.backtesting.strategy, "advise_sell")
|
assert hasattr(hyperopt.backtesting.strategy, "advise_sell")
|
||||||
assert hasattr(hyperopt.backtesting.strategy, "advise_buy")
|
assert hasattr(hyperopt.backtesting.strategy, "advise_buy")
|
||||||
assert hasattr(hyperopt, "max_open_trades")
|
assert hasattr(hyperopt, "max_open_trades")
|
||||||
assert hyperopt.max_open_trades == default_conf['max_open_trades']
|
assert hyperopt.max_open_trades == hyperopt_conf['max_open_trades']
|
||||||
assert hasattr(hyperopt, "position_stacking")
|
assert hasattr(hyperopt, "position_stacking")
|
||||||
|
|
||||||
|
|
||||||
@ -686,13 +695,36 @@ def test_buy_strategy_generator(hyperopt, testdatadir) -> None:
|
|||||||
assert 1 in result['buy']
|
assert 1 in result['buy']
|
||||||
|
|
||||||
|
|
||||||
def test_generate_optimizer(mocker, default_conf) -> None:
|
def test_sell_strategy_generator(hyperopt, testdatadir) -> None:
|
||||||
default_conf.update({'config': 'config.json.example',
|
data = load_data(testdatadir, '1m', ['UNITTEST/BTC'], fill_up_missing=True)
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
dataframes = hyperopt.backtesting.strategy.ohlcvdata_to_dataframe(data)
|
||||||
'timerange': None,
|
dataframe = hyperopt.custom_hyperopt.populate_indicators(dataframes['UNITTEST/BTC'],
|
||||||
'spaces': 'all',
|
{'pair': 'UNITTEST/BTC'})
|
||||||
'hyperopt_min_trades': 1,
|
|
||||||
})
|
populate_sell_trend = hyperopt.custom_hyperopt.sell_strategy_generator(
|
||||||
|
{
|
||||||
|
'sell-adx-value': 20,
|
||||||
|
'sell-fastd-value': 75,
|
||||||
|
'sell-mfi-value': 80,
|
||||||
|
'sell-rsi-value': 20,
|
||||||
|
'sell-adx-enabled': True,
|
||||||
|
'sell-fastd-enabled': True,
|
||||||
|
'sell-mfi-enabled': True,
|
||||||
|
'sell-rsi-enabled': True,
|
||||||
|
'sell-trigger': 'sell-bb_upper'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
result = populate_sell_trend(dataframe, {'pair': 'UNITTEST/BTC'})
|
||||||
|
# Check if some indicators are generated. We will not test all of them
|
||||||
|
print(result)
|
||||||
|
assert 'sell' in result
|
||||||
|
assert 1 in result['sell']
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_optimizer(mocker, hyperopt_conf) -> None:
|
||||||
|
hyperopt_conf.update({'spaces': 'all',
|
||||||
|
'hyperopt_min_trades': 1,
|
||||||
|
})
|
||||||
|
|
||||||
trades = [
|
trades = [
|
||||||
('TRX/BTC', 0.023117, 0.000233, 100)
|
('TRX/BTC', 0.023117, 0.000233, 100)
|
||||||
@ -744,8 +776,10 @@ def test_generate_optimizer(mocker, default_conf) -> None:
|
|||||||
}
|
}
|
||||||
response_expected = {
|
response_expected = {
|
||||||
'loss': 1.9840569076926293,
|
'loss': 1.9840569076926293,
|
||||||
'results_explanation': (' 1 trades. Avg profit 2.31%. Total profit 0.00023300 BTC '
|
'results_explanation': (' 1 trades. 1/0/0 Wins/Draws/Losses. '
|
||||||
'( 2.31\N{GREEK CAPITAL LETTER SIGMA}%). Avg duration 100.0 min.'
|
'Avg profit 2.31%. Median profit 2.31%. Total profit '
|
||||||
|
'0.00023300 BTC ( 2.31\N{GREEK CAPITAL LETTER SIGMA}%). '
|
||||||
|
'Avg duration 100.0 min.'
|
||||||
).encode(locale.getpreferredencoding(), 'replace').decode('utf-8'),
|
).encode(locale.getpreferredencoding(), 'replace').decode('utf-8'),
|
||||||
'params_details': {'buy': {'adx-enabled': False,
|
'params_details': {'buy': {'adx-enabled': False,
|
||||||
'adx-value': 0,
|
'adx-value': 0,
|
||||||
@ -776,55 +810,47 @@ def test_generate_optimizer(mocker, default_conf) -> None:
|
|||||||
'trailing_stop_positive_offset': 0.07}},
|
'trailing_stop_positive_offset': 0.07}},
|
||||||
'params_dict': optimizer_param,
|
'params_dict': optimizer_param,
|
||||||
'results_metrics': {'avg_profit': 2.3117,
|
'results_metrics': {'avg_profit': 2.3117,
|
||||||
|
'draws': 0,
|
||||||
'duration': 100.0,
|
'duration': 100.0,
|
||||||
|
'losses': 0,
|
||||||
|
'winsdrawslosses': '1/0/0',
|
||||||
|
'median_profit': 2.3117,
|
||||||
'profit': 2.3117,
|
'profit': 2.3117,
|
||||||
'total_profit': 0.000233,
|
'total_profit': 0.000233,
|
||||||
'trade_count': 1},
|
'trade_count': 1,
|
||||||
|
'wins': 1},
|
||||||
'total_profit': 0.00023300
|
'total_profit': 0.00023300
|
||||||
}
|
}
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.dimensions = hyperopt.hyperopt_space()
|
hyperopt.dimensions = hyperopt.hyperopt_space()
|
||||||
generate_optimizer_value = hyperopt.generate_optimizer(list(optimizer_param.values()))
|
generate_optimizer_value = hyperopt.generate_optimizer(list(optimizer_param.values()))
|
||||||
assert generate_optimizer_value == response_expected
|
assert generate_optimizer_value == response_expected
|
||||||
|
|
||||||
|
|
||||||
def test_clean_hyperopt(mocker, default_conf, caplog):
|
def test_clean_hyperopt(mocker, hyperopt_conf, caplog):
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
default_conf.update({'config': 'config.json.example',
|
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
|
||||||
'epochs': 1,
|
|
||||||
'timerange': None,
|
|
||||||
'spaces': 'default',
|
|
||||||
'hyperopt_jobs': 1,
|
|
||||||
})
|
|
||||||
mocker.patch("freqtrade.optimize.hyperopt.Path.is_file", MagicMock(return_value=True))
|
mocker.patch("freqtrade.optimize.hyperopt.Path.is_file", MagicMock(return_value=True))
|
||||||
unlinkmock = mocker.patch("freqtrade.optimize.hyperopt.Path.unlink", MagicMock())
|
unlinkmock = mocker.patch("freqtrade.optimize.hyperopt.Path.unlink", MagicMock())
|
||||||
h = Hyperopt(default_conf)
|
h = Hyperopt(hyperopt_conf)
|
||||||
|
|
||||||
assert unlinkmock.call_count == 2
|
assert unlinkmock.call_count == 2
|
||||||
assert log_has(f"Removing `{h.data_pickle_file}`.", caplog)
|
assert log_has(f"Removing `{h.data_pickle_file}`.", caplog)
|
||||||
|
|
||||||
|
|
||||||
def test_continue_hyperopt(mocker, default_conf, caplog):
|
def test_continue_hyperopt(mocker, hyperopt_conf, caplog):
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
default_conf.update({'config': 'config.json.example',
|
hyperopt_conf.update({'hyperopt_continue': True})
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
|
||||||
'epochs': 1,
|
|
||||||
'timerange': None,
|
|
||||||
'spaces': 'default',
|
|
||||||
'hyperopt_jobs': 1,
|
|
||||||
'hyperopt_continue': True
|
|
||||||
})
|
|
||||||
mocker.patch("freqtrade.optimize.hyperopt.Path.is_file", MagicMock(return_value=True))
|
mocker.patch("freqtrade.optimize.hyperopt.Path.is_file", MagicMock(return_value=True))
|
||||||
unlinkmock = mocker.patch("freqtrade.optimize.hyperopt.Path.unlink", MagicMock())
|
unlinkmock = mocker.patch("freqtrade.optimize.hyperopt.Path.unlink", MagicMock())
|
||||||
Hyperopt(default_conf)
|
Hyperopt(hyperopt_conf)
|
||||||
|
|
||||||
assert unlinkmock.call_count == 0
|
assert unlinkmock.call_count == 0
|
||||||
assert log_has("Continuing on previous hyperopt results.", caplog)
|
assert log_has("Continuing on previous hyperopt results.", caplog)
|
||||||
|
|
||||||
|
|
||||||
def test_print_json_spaces_all(mocker, default_conf, caplog, capsys) -> None:
|
def test_print_json_spaces_all(mocker, hyperopt_conf, capsys) -> None:
|
||||||
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
||||||
MagicMock(return_value=(MagicMock(), None)))
|
MagicMock(return_value=(MagicMock(), None)))
|
||||||
@ -855,16 +881,12 @@ def test_print_json_spaces_all(mocker, default_conf, caplog, capsys) -> None:
|
|||||||
)
|
)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
default_conf.update({'config': 'config.json.example',
|
hyperopt_conf.update({'spaces': 'all',
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
'hyperopt_jobs': 1,
|
||||||
'epochs': 1,
|
'print_json': True,
|
||||||
'timerange': None,
|
})
|
||||||
'spaces': 'all',
|
|
||||||
'hyperopt_jobs': 1,
|
|
||||||
'print_json': True,
|
|
||||||
})
|
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
||||||
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
||||||
|
|
||||||
@ -883,7 +905,7 @@ def test_print_json_spaces_all(mocker, default_conf, caplog, capsys) -> None:
|
|||||||
assert dumper.call_count == 2
|
assert dumper.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
def test_print_json_spaces_default(mocker, default_conf, caplog, capsys) -> None:
|
def test_print_json_spaces_default(mocker, hyperopt_conf, capsys) -> None:
|
||||||
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
||||||
MagicMock(return_value=(MagicMock(), None)))
|
MagicMock(return_value=(MagicMock(), None)))
|
||||||
@ -913,16 +935,9 @@ def test_print_json_spaces_default(mocker, default_conf, caplog, capsys) -> None
|
|||||||
)
|
)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
default_conf.update({'config': 'config.json.example',
|
hyperopt_conf.update({'print_json': True})
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
|
||||||
'epochs': 1,
|
|
||||||
'timerange': None,
|
|
||||||
'spaces': 'default',
|
|
||||||
'hyperopt_jobs': 1,
|
|
||||||
'print_json': True,
|
|
||||||
})
|
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
||||||
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
||||||
|
|
||||||
@ -937,7 +952,7 @@ def test_print_json_spaces_default(mocker, default_conf, caplog, capsys) -> None
|
|||||||
assert dumper.call_count == 2
|
assert dumper.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
def test_print_json_spaces_roi_stoploss(mocker, default_conf, caplog, capsys) -> None:
|
def test_print_json_spaces_roi_stoploss(mocker, hyperopt_conf, capsys) -> None:
|
||||||
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
||||||
MagicMock(return_value=(MagicMock(), None)))
|
MagicMock(return_value=(MagicMock(), None)))
|
||||||
@ -963,16 +978,12 @@ def test_print_json_spaces_roi_stoploss(mocker, default_conf, caplog, capsys) ->
|
|||||||
)
|
)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
default_conf.update({'config': 'config.json.example',
|
hyperopt_conf.update({'spaces': 'roi stoploss',
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
'hyperopt_jobs': 1,
|
||||||
'epochs': 1,
|
'print_json': True,
|
||||||
'timerange': None,
|
})
|
||||||
'spaces': 'roi stoploss',
|
|
||||||
'hyperopt_jobs': 1,
|
|
||||||
'print_json': True,
|
|
||||||
})
|
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
||||||
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
||||||
|
|
||||||
@ -987,7 +998,7 @@ def test_print_json_spaces_roi_stoploss(mocker, default_conf, caplog, capsys) ->
|
|||||||
assert dumper.call_count == 2
|
assert dumper.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
def test_simplified_interface_roi_stoploss(mocker, default_conf, caplog, capsys) -> None:
|
def test_simplified_interface_roi_stoploss(mocker, hyperopt_conf, capsys) -> None:
|
||||||
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
||||||
MagicMock(return_value=(MagicMock(), None)))
|
MagicMock(return_value=(MagicMock(), None)))
|
||||||
@ -1012,14 +1023,9 @@ def test_simplified_interface_roi_stoploss(mocker, default_conf, caplog, capsys)
|
|||||||
)
|
)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
default_conf.update({'config': 'config.json.example',
|
hyperopt_conf.update({'spaces': 'roi stoploss'})
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
|
||||||
'epochs': 1,
|
|
||||||
'timerange': None,
|
|
||||||
'spaces': 'roi stoploss',
|
|
||||||
'hyperopt_jobs': 1, })
|
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
||||||
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
||||||
|
|
||||||
@ -1040,11 +1046,11 @@ def test_simplified_interface_roi_stoploss(mocker, default_conf, caplog, capsys)
|
|||||||
assert hasattr(hyperopt.backtesting.strategy, "advise_sell")
|
assert hasattr(hyperopt.backtesting.strategy, "advise_sell")
|
||||||
assert hasattr(hyperopt.backtesting.strategy, "advise_buy")
|
assert hasattr(hyperopt.backtesting.strategy, "advise_buy")
|
||||||
assert hasattr(hyperopt, "max_open_trades")
|
assert hasattr(hyperopt, "max_open_trades")
|
||||||
assert hyperopt.max_open_trades == default_conf['max_open_trades']
|
assert hyperopt.max_open_trades == hyperopt_conf['max_open_trades']
|
||||||
assert hasattr(hyperopt, "position_stacking")
|
assert hasattr(hyperopt, "position_stacking")
|
||||||
|
|
||||||
|
|
||||||
def test_simplified_interface_all_failed(mocker, default_conf, caplog, capsys) -> None:
|
def test_simplified_interface_all_failed(mocker, hyperopt_conf) -> None:
|
||||||
mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
||||||
MagicMock(return_value=(MagicMock(), None)))
|
MagicMock(return_value=(MagicMock(), None)))
|
||||||
@ -1055,14 +1061,9 @@ def test_simplified_interface_all_failed(mocker, default_conf, caplog, capsys) -
|
|||||||
|
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
default_conf.update({'config': 'config.json.example',
|
hyperopt_conf.update({'spaces': 'all', })
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
|
||||||
'epochs': 1,
|
|
||||||
'timerange': None,
|
|
||||||
'spaces': 'all',
|
|
||||||
'hyperopt_jobs': 1, })
|
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
||||||
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
||||||
|
|
||||||
@ -1075,7 +1076,7 @@ def test_simplified_interface_all_failed(mocker, default_conf, caplog, capsys) -
|
|||||||
hyperopt.start()
|
hyperopt.start()
|
||||||
|
|
||||||
|
|
||||||
def test_simplified_interface_buy(mocker, default_conf, caplog, capsys) -> None:
|
def test_simplified_interface_buy(mocker, hyperopt_conf, capsys) -> None:
|
||||||
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
||||||
MagicMock(return_value=(MagicMock(), None)))
|
MagicMock(return_value=(MagicMock(), None)))
|
||||||
@ -1100,14 +1101,9 @@ def test_simplified_interface_buy(mocker, default_conf, caplog, capsys) -> None:
|
|||||||
)
|
)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
default_conf.update({'config': 'config.json.example',
|
hyperopt_conf.update({'spaces': 'buy'})
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
|
||||||
'epochs': 1,
|
|
||||||
'timerange': None,
|
|
||||||
'spaces': 'buy',
|
|
||||||
'hyperopt_jobs': 1, })
|
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
||||||
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
||||||
|
|
||||||
@ -1128,11 +1124,11 @@ def test_simplified_interface_buy(mocker, default_conf, caplog, capsys) -> None:
|
|||||||
assert hasattr(hyperopt.backtesting.strategy, "advise_sell")
|
assert hasattr(hyperopt.backtesting.strategy, "advise_sell")
|
||||||
assert hasattr(hyperopt.backtesting.strategy, "advise_buy")
|
assert hasattr(hyperopt.backtesting.strategy, "advise_buy")
|
||||||
assert hasattr(hyperopt, "max_open_trades")
|
assert hasattr(hyperopt, "max_open_trades")
|
||||||
assert hyperopt.max_open_trades == default_conf['max_open_trades']
|
assert hyperopt.max_open_trades == hyperopt_conf['max_open_trades']
|
||||||
assert hasattr(hyperopt, "position_stacking")
|
assert hasattr(hyperopt, "position_stacking")
|
||||||
|
|
||||||
|
|
||||||
def test_simplified_interface_sell(mocker, default_conf, caplog, capsys) -> None:
|
def test_simplified_interface_sell(mocker, hyperopt_conf, capsys) -> None:
|
||||||
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
dumper = mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
||||||
MagicMock(return_value=(MagicMock(), None)))
|
MagicMock(return_value=(MagicMock(), None)))
|
||||||
@ -1157,14 +1153,9 @@ def test_simplified_interface_sell(mocker, default_conf, caplog, capsys) -> None
|
|||||||
)
|
)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
default_conf.update({'config': 'config.json.example',
|
hyperopt_conf.update({'spaces': 'sell', })
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
|
||||||
'epochs': 1,
|
|
||||||
'timerange': None,
|
|
||||||
'spaces': 'sell',
|
|
||||||
'hyperopt_jobs': 1, })
|
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
||||||
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
||||||
|
|
||||||
@ -1185,7 +1176,7 @@ def test_simplified_interface_sell(mocker, default_conf, caplog, capsys) -> None
|
|||||||
assert hasattr(hyperopt.backtesting.strategy, "advise_sell")
|
assert hasattr(hyperopt.backtesting.strategy, "advise_sell")
|
||||||
assert hasattr(hyperopt.backtesting.strategy, "advise_buy")
|
assert hasattr(hyperopt.backtesting.strategy, "advise_buy")
|
||||||
assert hasattr(hyperopt, "max_open_trades")
|
assert hasattr(hyperopt, "max_open_trades")
|
||||||
assert hyperopt.max_open_trades == default_conf['max_open_trades']
|
assert hyperopt.max_open_trades == hyperopt_conf['max_open_trades']
|
||||||
assert hasattr(hyperopt, "position_stacking")
|
assert hasattr(hyperopt, "position_stacking")
|
||||||
|
|
||||||
|
|
||||||
@ -1195,7 +1186,7 @@ def test_simplified_interface_sell(mocker, default_conf, caplog, capsys) -> None
|
|||||||
('sell_strategy_generator', 'sell'),
|
('sell_strategy_generator', 'sell'),
|
||||||
('sell_indicator_space', 'sell'),
|
('sell_indicator_space', 'sell'),
|
||||||
])
|
])
|
||||||
def test_simplified_interface_failed(mocker, default_conf, caplog, capsys, method, space) -> None:
|
def test_simplified_interface_failed(mocker, hyperopt_conf, method, space) -> None:
|
||||||
mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
mocker.patch('freqtrade.optimize.hyperopt.dump', MagicMock())
|
||||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
mocker.patch('freqtrade.optimize.backtesting.Backtesting.load_bt_data',
|
||||||
MagicMock(return_value=(MagicMock(), None)))
|
MagicMock(return_value=(MagicMock(), None)))
|
||||||
@ -1206,14 +1197,9 @@ def test_simplified_interface_failed(mocker, default_conf, caplog, capsys, metho
|
|||||||
|
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
|
|
||||||
default_conf.update({'config': 'config.json.example',
|
hyperopt_conf.update({'spaces': space})
|
||||||
'hyperopt': 'DefaultHyperOpt',
|
|
||||||
'epochs': 1,
|
|
||||||
'timerange': None,
|
|
||||||
'spaces': space,
|
|
||||||
'hyperopt_jobs': 1, })
|
|
||||||
|
|
||||||
hyperopt = Hyperopt(default_conf)
|
hyperopt = Hyperopt(hyperopt_conf)
|
||||||
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
hyperopt.backtesting.strategy.ohlcvdata_to_dataframe = MagicMock()
|
||||||
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
hyperopt.custom_hyperopt.generate_roi_table = MagicMock(return_value={})
|
||||||
|
|
||||||
|
@ -1,16 +1,29 @@
|
|||||||
|
import re
|
||||||
|
from datetime import timedelta
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pytest
|
import pytest
|
||||||
from arrow import Arrow
|
from arrow import Arrow
|
||||||
|
|
||||||
|
from freqtrade.configuration import TimeRange
|
||||||
|
from freqtrade.constants import LAST_BT_RESULT_FN
|
||||||
|
from freqtrade.data import history
|
||||||
|
from freqtrade.data.btanalysis import (get_latest_backtest_filename,
|
||||||
|
load_backtest_data)
|
||||||
from freqtrade.edge import PairInfo
|
from freqtrade.edge import PairInfo
|
||||||
from freqtrade.optimize.optimize_reports import (
|
from freqtrade.optimize.optimize_reports import (generate_backtest_stats,
|
||||||
generate_pair_metrics, generate_edge_table, generate_sell_reason_stats,
|
generate_daily_stats,
|
||||||
text_table_bt_results, text_table_sell_reason, generate_strategy_metrics,
|
generate_edge_table,
|
||||||
text_table_strategy, store_backtest_result)
|
generate_pair_metrics,
|
||||||
|
generate_sell_reason_stats,
|
||||||
|
generate_strategy_metrics,
|
||||||
|
store_backtest_stats,
|
||||||
|
text_table_bt_results,
|
||||||
|
text_table_sell_reason,
|
||||||
|
text_table_strategy)
|
||||||
from freqtrade.strategy.interface import SellType
|
from freqtrade.strategy.interface import SellType
|
||||||
from tests.conftest import patch_exchange
|
from tests.data.test_history import _backup_file, _clean_test_file
|
||||||
|
|
||||||
|
|
||||||
def test_text_table_bt_results(default_conf, mocker):
|
def test_text_table_bt_results(default_conf, mocker):
|
||||||
@ -43,6 +56,115 @@ def test_text_table_bt_results(default_conf, mocker):
|
|||||||
assert text_table_bt_results(pair_results, stake_currency='BTC') == result_str
|
assert text_table_bt_results(pair_results, stake_currency='BTC') == result_str
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_backtest_stats(default_conf, testdatadir):
|
||||||
|
results = {'DefStrat': pd.DataFrame({"pair": ["UNITTEST/BTC", "UNITTEST/BTC",
|
||||||
|
"UNITTEST/BTC", "UNITTEST/BTC"],
|
||||||
|
"profit_percent": [0.003312, 0.010801, 0.013803, 0.002780],
|
||||||
|
"profit_abs": [0.000003, 0.000011, 0.000014, 0.000003],
|
||||||
|
"open_date": [Arrow(2017, 11, 14, 19, 32, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 21, 36, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 12, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 44, 00).datetime],
|
||||||
|
"close_date": [Arrow(2017, 11, 14, 21, 35, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 10, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 43, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 58, 00).datetime],
|
||||||
|
"open_rate": [0.002543, 0.003003, 0.003089, 0.003214],
|
||||||
|
"close_rate": [0.002546, 0.003014, 0.003103, 0.003217],
|
||||||
|
"trade_duration": [123, 34, 31, 14],
|
||||||
|
"open_at_end": [False, False, False, True],
|
||||||
|
"sell_reason": [SellType.ROI, SellType.STOP_LOSS,
|
||||||
|
SellType.ROI, SellType.FORCE_SELL]
|
||||||
|
})}
|
||||||
|
timerange = TimeRange.parse_timerange('1510688220-1510700340')
|
||||||
|
min_date = Arrow.fromtimestamp(1510688220)
|
||||||
|
max_date = Arrow.fromtimestamp(1510700340)
|
||||||
|
btdata = history.load_data(testdatadir, '1m', ['UNITTEST/BTC'], timerange=timerange,
|
||||||
|
fill_up_missing=True)
|
||||||
|
|
||||||
|
stats = generate_backtest_stats(default_conf, btdata, results, min_date, max_date)
|
||||||
|
assert isinstance(stats, dict)
|
||||||
|
assert 'strategy' in stats
|
||||||
|
assert 'DefStrat' in stats['strategy']
|
||||||
|
assert 'strategy_comparison' in stats
|
||||||
|
strat_stats = stats['strategy']['DefStrat']
|
||||||
|
assert strat_stats['backtest_start'] == min_date.datetime
|
||||||
|
assert strat_stats['backtest_end'] == max_date.datetime
|
||||||
|
assert strat_stats['total_trades'] == len(results['DefStrat'])
|
||||||
|
# Above sample had no loosing trade
|
||||||
|
assert strat_stats['max_drawdown'] == 0.0
|
||||||
|
|
||||||
|
results = {'DefStrat': pd.DataFrame(
|
||||||
|
{"pair": ["UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC"],
|
||||||
|
"profit_percent": [0.003312, 0.010801, -0.013803, 0.002780],
|
||||||
|
"profit_abs": [0.000003, 0.000011, -0.000014, 0.000003],
|
||||||
|
"open_date": [Arrow(2017, 11, 14, 19, 32, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 21, 36, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 12, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 44, 00).datetime],
|
||||||
|
"close_date": [Arrow(2017, 11, 14, 21, 35, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 10, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 43, 00).datetime,
|
||||||
|
Arrow(2017, 11, 14, 22, 58, 00).datetime],
|
||||||
|
"open_rate": [0.002543, 0.003003, 0.003089, 0.003214],
|
||||||
|
"close_rate": [0.002546, 0.003014, 0.0032903, 0.003217],
|
||||||
|
"trade_duration": [123, 34, 31, 14],
|
||||||
|
"open_at_end": [False, False, False, True],
|
||||||
|
"sell_reason": [SellType.ROI, SellType.STOP_LOSS,
|
||||||
|
SellType.ROI, SellType.FORCE_SELL]
|
||||||
|
})}
|
||||||
|
|
||||||
|
assert strat_stats['max_drawdown'] == 0.0
|
||||||
|
assert strat_stats['drawdown_start'] == Arrow.fromtimestamp(0).datetime
|
||||||
|
assert strat_stats['drawdown_end'] == Arrow.fromtimestamp(0).datetime
|
||||||
|
assert strat_stats['drawdown_end_ts'] == 0
|
||||||
|
assert strat_stats['drawdown_start_ts'] == 0
|
||||||
|
assert strat_stats['pairlist'] == ['UNITTEST/BTC']
|
||||||
|
|
||||||
|
# Test storing stats
|
||||||
|
filename = Path(testdatadir / 'btresult.json')
|
||||||
|
filename_last = Path(testdatadir / LAST_BT_RESULT_FN)
|
||||||
|
_backup_file(filename_last, copy_file=True)
|
||||||
|
assert not filename.is_file()
|
||||||
|
|
||||||
|
store_backtest_stats(filename, stats)
|
||||||
|
|
||||||
|
# get real Filename (it's btresult-<date>.json)
|
||||||
|
last_fn = get_latest_backtest_filename(filename_last.parent)
|
||||||
|
assert re.match(r"btresult-.*\.json", last_fn)
|
||||||
|
|
||||||
|
filename1 = (testdatadir / last_fn)
|
||||||
|
assert filename1.is_file()
|
||||||
|
content = filename1.read_text()
|
||||||
|
assert 'max_drawdown' in content
|
||||||
|
assert 'strategy' in content
|
||||||
|
assert 'pairlist' in content
|
||||||
|
|
||||||
|
assert filename_last.is_file()
|
||||||
|
|
||||||
|
_clean_test_file(filename_last)
|
||||||
|
filename1.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
def test_store_backtest_stats(testdatadir, mocker):
|
||||||
|
|
||||||
|
dump_mock = mocker.patch('freqtrade.optimize.optimize_reports.file_dump_json')
|
||||||
|
|
||||||
|
store_backtest_stats(testdatadir, {})
|
||||||
|
|
||||||
|
assert dump_mock.call_count == 2
|
||||||
|
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||||
|
assert str(dump_mock.call_args_list[0][0][0]).startswith(str(testdatadir/'backtest-result'))
|
||||||
|
|
||||||
|
dump_mock.reset_mock()
|
||||||
|
filename = testdatadir / 'testresult.json'
|
||||||
|
store_backtest_stats(filename, {})
|
||||||
|
assert dump_mock.call_count == 2
|
||||||
|
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||||
|
# result will be testdatadir / testresult-<timestamp>.json
|
||||||
|
assert str(dump_mock.call_args_list[0][0][0]).startswith(str(testdatadir / 'testresult'))
|
||||||
|
|
||||||
|
|
||||||
def test_generate_pair_metrics(default_conf, mocker):
|
def test_generate_pair_metrics(default_conf, mocker):
|
||||||
|
|
||||||
results = pd.DataFrame(
|
results = pd.DataFrame(
|
||||||
@ -68,6 +190,29 @@ def test_generate_pair_metrics(default_conf, mocker):
|
|||||||
pytest.approx(pair_results[-1]['profit_sum_pct']) == pair_results[-1]['profit_sum'] * 100)
|
pytest.approx(pair_results[-1]['profit_sum_pct']) == pair_results[-1]['profit_sum'] * 100)
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_daily_stats(testdatadir):
|
||||||
|
|
||||||
|
filename = testdatadir / "backtest-result_new.json"
|
||||||
|
bt_data = load_backtest_data(filename)
|
||||||
|
res = generate_daily_stats(bt_data)
|
||||||
|
assert isinstance(res, dict)
|
||||||
|
assert round(res['backtest_best_day'], 4) == 0.1796
|
||||||
|
assert round(res['backtest_worst_day'], 4) == -0.1468
|
||||||
|
assert res['winning_days'] == 14
|
||||||
|
assert res['draw_days'] == 4
|
||||||
|
assert res['losing_days'] == 3
|
||||||
|
assert res['winner_holding_avg'] == timedelta(seconds=1440)
|
||||||
|
assert res['loser_holding_avg'] == timedelta(days=1, seconds=21420)
|
||||||
|
|
||||||
|
# Select empty dataframe!
|
||||||
|
res = generate_daily_stats(bt_data.loc[bt_data['open_date'] == '2000-01-01', :])
|
||||||
|
assert isinstance(res, dict)
|
||||||
|
assert round(res['backtest_best_day'], 4) == 0.0
|
||||||
|
assert res['winning_days'] == 0
|
||||||
|
assert res['draw_days'] == 0
|
||||||
|
assert res['losing_days'] == 0
|
||||||
|
|
||||||
|
|
||||||
def test_text_table_sell_reason(default_conf):
|
def test_text_table_sell_reason(default_conf):
|
||||||
|
|
||||||
results = pd.DataFrame(
|
results = pd.DataFrame(
|
||||||
@ -188,77 +333,3 @@ def test_generate_edge_table(edge_conf, mocker):
|
|||||||
assert generate_edge_table(results).count('| ETH/BTC |') == 1
|
assert generate_edge_table(results).count('| ETH/BTC |') == 1
|
||||||
assert generate_edge_table(results).count(
|
assert generate_edge_table(results).count(
|
||||||
'| Risk Reward Ratio | Required Risk Reward | Expectancy |') == 1
|
'| Risk Reward Ratio | Required Risk Reward | Expectancy |') == 1
|
||||||
|
|
||||||
|
|
||||||
def test_backtest_record(default_conf, fee, mocker):
|
|
||||||
names = []
|
|
||||||
records = []
|
|
||||||
patch_exchange(mocker)
|
|
||||||
mocker.patch('freqtrade.exchange.Exchange.get_fee', fee)
|
|
||||||
mocker.patch(
|
|
||||||
'freqtrade.optimize.optimize_reports.file_dump_json',
|
|
||||||
new=lambda n, r: (names.append(n), records.append(r))
|
|
||||||
)
|
|
||||||
|
|
||||||
results = {'DefStrat': pd.DataFrame({"pair": ["UNITTEST/BTC", "UNITTEST/BTC",
|
|
||||||
"UNITTEST/BTC", "UNITTEST/BTC"],
|
|
||||||
"profit_percent": [0.003312, 0.010801, 0.013803, 0.002780],
|
|
||||||
"profit_abs": [0.000003, 0.000011, 0.000014, 0.000003],
|
|
||||||
"open_time": [Arrow(2017, 11, 14, 19, 32, 00).datetime,
|
|
||||||
Arrow(2017, 11, 14, 21, 36, 00).datetime,
|
|
||||||
Arrow(2017, 11, 14, 22, 12, 00).datetime,
|
|
||||||
Arrow(2017, 11, 14, 22, 44, 00).datetime],
|
|
||||||
"close_time": [Arrow(2017, 11, 14, 21, 35, 00).datetime,
|
|
||||||
Arrow(2017, 11, 14, 22, 10, 00).datetime,
|
|
||||||
Arrow(2017, 11, 14, 22, 43, 00).datetime,
|
|
||||||
Arrow(2017, 11, 14, 22, 58, 00).datetime],
|
|
||||||
"open_rate": [0.002543, 0.003003, 0.003089, 0.003214],
|
|
||||||
"close_rate": [0.002546, 0.003014, 0.003103, 0.003217],
|
|
||||||
"open_index": [1, 119, 153, 185],
|
|
||||||
"close_index": [118, 151, 184, 199],
|
|
||||||
"trade_duration": [123, 34, 31, 14],
|
|
||||||
"open_at_end": [False, False, False, True],
|
|
||||||
"sell_reason": [SellType.ROI, SellType.STOP_LOSS,
|
|
||||||
SellType.ROI, SellType.FORCE_SELL]
|
|
||||||
})}
|
|
||||||
store_backtest_result(Path("backtest-result.json"), results)
|
|
||||||
# Assert file_dump_json was only called once
|
|
||||||
assert names == [Path('backtest-result.json')]
|
|
||||||
records = records[0]
|
|
||||||
# Ensure records are of correct type
|
|
||||||
assert len(records) == 4
|
|
||||||
|
|
||||||
# reset test to test with strategy name
|
|
||||||
names = []
|
|
||||||
records = []
|
|
||||||
results['Strat'] = results['DefStrat']
|
|
||||||
results['Strat2'] = results['DefStrat']
|
|
||||||
store_backtest_result(Path("backtest-result.json"), results)
|
|
||||||
assert names == [
|
|
||||||
Path('backtest-result-DefStrat.json'),
|
|
||||||
Path('backtest-result-Strat.json'),
|
|
||||||
Path('backtest-result-Strat2.json'),
|
|
||||||
]
|
|
||||||
records = records[0]
|
|
||||||
# Ensure records are of correct type
|
|
||||||
assert len(records) == 4
|
|
||||||
|
|
||||||
# ('UNITTEST/BTC', 0.00331158, '1510684320', '1510691700', 0, 117)
|
|
||||||
# Below follows just a typecheck of the schema/type of trade-records
|
|
||||||
oix = None
|
|
||||||
for (pair, profit, date_buy, date_sell, buy_index, dur,
|
|
||||||
openr, closer, open_at_end, sell_reason) in records:
|
|
||||||
assert pair == 'UNITTEST/BTC'
|
|
||||||
assert isinstance(profit, float)
|
|
||||||
# FIX: buy/sell should be converted to ints
|
|
||||||
assert isinstance(date_buy, float)
|
|
||||||
assert isinstance(date_sell, float)
|
|
||||||
assert isinstance(openr, float)
|
|
||||||
assert isinstance(closer, float)
|
|
||||||
assert isinstance(open_at_end, bool)
|
|
||||||
assert isinstance(sell_reason, str)
|
|
||||||
isinstance(buy_index, pd._libs.tslib.Timestamp)
|
|
||||||
if oix:
|
|
||||||
assert buy_index > oix
|
|
||||||
oix = buy_index
|
|
||||||
assert dur > 0
|
|
||||||
|
@ -231,9 +231,6 @@ def test_VolumePairList_refresh_empty(mocker, markets_empty, whitelist_conf):
|
|||||||
# VolumePairList only
|
# VolumePairList only
|
||||||
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"}],
|
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"}],
|
||||||
"BTC", ['ETH/BTC', 'TKN/BTC', 'LTC/BTC', 'XRP/BTC', 'HOT/BTC']),
|
"BTC", ['ETH/BTC', 'TKN/BTC', 'LTC/BTC', 'XRP/BTC', 'HOT/BTC']),
|
||||||
# Different sorting depending on quote or bid volume
|
|
||||||
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "bidVolume"}],
|
|
||||||
"BTC", ['HOT/BTC', 'FUEL/BTC', 'XRP/BTC', 'LTC/BTC', 'TKN/BTC']),
|
|
||||||
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"}],
|
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"}],
|
||||||
"USDT", ['ETH/USDT', 'NANO/USDT', 'ADAHALF/USDT', 'ADADOUBLE/USDT']),
|
"USDT", ['ETH/USDT', 'NANO/USDT', 'ADAHALF/USDT', 'ADADOUBLE/USDT']),
|
||||||
# No pair for ETH, VolumePairList
|
# No pair for ETH, VolumePairList
|
||||||
@ -263,10 +260,6 @@ def test_VolumePairList_refresh_empty(mocker, markets_empty, whitelist_conf):
|
|||||||
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"},
|
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"},
|
||||||
{"method": "PrecisionFilter"}],
|
{"method": "PrecisionFilter"}],
|
||||||
"BTC", ['ETH/BTC', 'TKN/BTC', 'LTC/BTC', 'XRP/BTC']),
|
"BTC", ['ETH/BTC', 'TKN/BTC', 'LTC/BTC', 'XRP/BTC']),
|
||||||
# Precisionfilter bid
|
|
||||||
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "bidVolume"},
|
|
||||||
{"method": "PrecisionFilter"}],
|
|
||||||
"BTC", ['FUEL/BTC', 'XRP/BTC', 'LTC/BTC', 'TKN/BTC']),
|
|
||||||
# PriceFilter and VolumePairList
|
# PriceFilter and VolumePairList
|
||||||
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"},
|
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"},
|
||||||
{"method": "PriceFilter", "low_price_ratio": 0.03}],
|
{"method": "PriceFilter", "low_price_ratio": 0.03}],
|
||||||
@ -293,9 +286,6 @@ def test_VolumePairList_refresh_empty(mocker, markets_empty, whitelist_conf):
|
|||||||
([{"method": "StaticPairList"}],
|
([{"method": "StaticPairList"}],
|
||||||
"BTC", ['ETH/BTC', 'TKN/BTC', 'HOT/BTC']),
|
"BTC", ['ETH/BTC', 'TKN/BTC', 'HOT/BTC']),
|
||||||
# Static Pairlist before VolumePairList - sorting changes
|
# Static Pairlist before VolumePairList - sorting changes
|
||||||
([{"method": "StaticPairList"},
|
|
||||||
{"method": "VolumePairList", "number_assets": 5, "sort_key": "bidVolume"}],
|
|
||||||
"BTC", ['HOT/BTC', 'TKN/BTC', 'ETH/BTC']),
|
|
||||||
# SpreadFilter
|
# SpreadFilter
|
||||||
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"},
|
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"},
|
||||||
{"method": "SpreadFilter", "max_spread_ratio": 0.005}],
|
{"method": "SpreadFilter", "max_spread_ratio": 0.005}],
|
||||||
@ -344,9 +334,9 @@ def test_VolumePairList_refresh_empty(mocker, markets_empty, whitelist_conf):
|
|||||||
([{"method": "SpreadFilter", "max_spread_ratio": 0.005}],
|
([{"method": "SpreadFilter", "max_spread_ratio": 0.005}],
|
||||||
"BTC", 'filter_at_the_beginning'), # OperationalException expected
|
"BTC", 'filter_at_the_beginning'), # OperationalException expected
|
||||||
# Static Pairlist after VolumePairList, on a non-first position
|
# Static Pairlist after VolumePairList, on a non-first position
|
||||||
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "bidVolume"},
|
([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"},
|
||||||
{"method": "StaticPairList"}],
|
{"method": "StaticPairList"}],
|
||||||
"BTC", 'static_in_the_middle'),
|
"BTC", 'static_in_the_middle'),
|
||||||
([{"method": "VolumePairList", "number_assets": 20, "sort_key": "quoteVolume"},
|
([{"method": "VolumePairList", "number_assets": 20, "sort_key": "quoteVolume"},
|
||||||
{"method": "PriceFilter", "low_price_ratio": 0.02}],
|
{"method": "PriceFilter", "low_price_ratio": 0.02}],
|
||||||
"USDT", ['ETH/USDT', 'NANO/USDT']),
|
"USDT", ['ETH/USDT', 'NANO/USDT']),
|
||||||
@ -468,7 +458,9 @@ def test_pairlist_class(mocker, whitelist_conf, markets, pairlist):
|
|||||||
# BCH/BTC not available
|
# BCH/BTC not available
|
||||||
(['ETH/BTC', 'TKN/BTC', 'BCH/BTC'], "is not compatible with exchange"),
|
(['ETH/BTC', 'TKN/BTC', 'BCH/BTC'], "is not compatible with exchange"),
|
||||||
# BTT/BTC is inactive
|
# BTT/BTC is inactive
|
||||||
(['ETH/BTC', 'TKN/BTC', 'BTT/BTC'], "Market is not active")
|
(['ETH/BTC', 'TKN/BTC', 'BTT/BTC'], "Market is not active"),
|
||||||
|
# XLTCUSDT is not a valid pair
|
||||||
|
(['ETH/BTC', 'TKN/BTC', 'XLTCUSDT'], "is not tradable with Freqtrade"),
|
||||||
])
|
])
|
||||||
def test__whitelist_for_active_markets(mocker, whitelist_conf, markets, pairlist, whitelist, caplog,
|
def test__whitelist_for_active_markets(mocker, whitelist_conf, markets, pairlist, whitelist, caplog,
|
||||||
log_message, tickers):
|
log_message, tickers):
|
||||||
@ -547,7 +539,7 @@ def test_agefilter_min_days_listed_too_small(mocker, default_conf, markets, tick
|
|||||||
)
|
)
|
||||||
|
|
||||||
with pytest.raises(OperationalException,
|
with pytest.raises(OperationalException,
|
||||||
match=r'AgeFilter requires min_days_listed must be >= 1'):
|
match=r'AgeFilter requires min_days_listed to be >= 1'):
|
||||||
get_patched_freqtradebot(mocker, default_conf)
|
get_patched_freqtradebot(mocker, default_conf)
|
||||||
|
|
||||||
|
|
||||||
@ -562,7 +554,7 @@ def test_agefilter_min_days_listed_too_large(mocker, default_conf, markets, tick
|
|||||||
)
|
)
|
||||||
|
|
||||||
with pytest.raises(OperationalException,
|
with pytest.raises(OperationalException,
|
||||||
match=r'AgeFilter requires min_days_listed must not exceed '
|
match=r'AgeFilter requires min_days_listed to not exceed '
|
||||||
r'exchange max request size \([0-9]+\)'):
|
r'exchange max request size \([0-9]+\)'):
|
||||||
get_patched_freqtradebot(mocker, default_conf)
|
get_patched_freqtradebot(mocker, default_conf)
|
||||||
|
|
||||||
@ -590,34 +582,58 @@ def test_agefilter_caching(mocker, markets, whitelist_conf_3, tickers, ohlcv_his
|
|||||||
assert freqtrade.exchange.get_historic_ohlcv.call_count == previous_call_count
|
assert freqtrade.exchange.get_historic_ohlcv.call_count == previous_call_count
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("pairlistconfig,expected", [
|
@pytest.mark.parametrize("pairlistconfig,desc_expected,exception_expected", [
|
||||||
({"method": "PriceFilter", "low_price_ratio": 0.001, "min_price": 0.00000010,
|
({"method": "PriceFilter", "low_price_ratio": 0.001, "min_price": 0.00000010,
|
||||||
"max_price": 1.0}, "[{'PriceFilter': 'PriceFilter - Filtering pairs priced below "
|
"max_price": 1.0},
|
||||||
"0.1% or below 0.00000010 or above 1.00000000.'}]"
|
"[{'PriceFilter': 'PriceFilter - Filtering pairs priced below "
|
||||||
|
"0.1% or below 0.00000010 or above 1.00000000.'}]",
|
||||||
|
None
|
||||||
),
|
),
|
||||||
({"method": "PriceFilter", "low_price_ratio": 0.001, "min_price": 0.00000010},
|
({"method": "PriceFilter", "low_price_ratio": 0.001, "min_price": 0.00000010},
|
||||||
"[{'PriceFilter': 'PriceFilter - Filtering pairs priced below 0.1% or below 0.00000010.'}]"
|
"[{'PriceFilter': 'PriceFilter - Filtering pairs priced below 0.1% or below 0.00000010.'}]",
|
||||||
|
None
|
||||||
),
|
),
|
||||||
({"method": "PriceFilter", "low_price_ratio": 0.001, "max_price": 1.00010000},
|
({"method": "PriceFilter", "low_price_ratio": 0.001, "max_price": 1.00010000},
|
||||||
"[{'PriceFilter': 'PriceFilter - Filtering pairs priced below 0.1% or above 1.00010000.'}]"
|
"[{'PriceFilter': 'PriceFilter - Filtering pairs priced below 0.1% or above 1.00010000.'}]",
|
||||||
|
None
|
||||||
),
|
),
|
||||||
({"method": "PriceFilter", "min_price": 0.00002000},
|
({"method": "PriceFilter", "min_price": 0.00002000},
|
||||||
"[{'PriceFilter': 'PriceFilter - Filtering pairs priced below 0.00002000.'}]"
|
"[{'PriceFilter': 'PriceFilter - Filtering pairs priced below 0.00002000.'}]",
|
||||||
|
None
|
||||||
),
|
),
|
||||||
({"method": "PriceFilter"},
|
({"method": "PriceFilter"},
|
||||||
"[{'PriceFilter': 'PriceFilter - No price filters configured.'}]"
|
"[{'PriceFilter': 'PriceFilter - No price filters configured.'}]",
|
||||||
|
None
|
||||||
),
|
),
|
||||||
|
({"method": "PriceFilter", "low_price_ratio": -0.001},
|
||||||
|
None,
|
||||||
|
"PriceFilter requires low_price_ratio to be >= 0"
|
||||||
|
), # OperationalException expected
|
||||||
|
({"method": "PriceFilter", "min_price": -0.00000010},
|
||||||
|
None,
|
||||||
|
"PriceFilter requires min_price to be >= 0"
|
||||||
|
), # OperationalException expected
|
||||||
|
({"method": "PriceFilter", "max_price": -1.00010000},
|
||||||
|
None,
|
||||||
|
"PriceFilter requires max_price to be >= 0"
|
||||||
|
), # OperationalException expected
|
||||||
])
|
])
|
||||||
def test_pricefilter_desc(mocker, whitelist_conf, markets, pairlistconfig, expected):
|
def test_pricefilter_desc(mocker, whitelist_conf, markets, pairlistconfig,
|
||||||
|
desc_expected, exception_expected):
|
||||||
mocker.patch.multiple('freqtrade.exchange.Exchange',
|
mocker.patch.multiple('freqtrade.exchange.Exchange',
|
||||||
markets=PropertyMock(return_value=markets),
|
markets=PropertyMock(return_value=markets),
|
||||||
exchange_has=MagicMock(return_value=True)
|
exchange_has=MagicMock(return_value=True)
|
||||||
)
|
)
|
||||||
whitelist_conf['pairlists'] = [pairlistconfig]
|
whitelist_conf['pairlists'] = [pairlistconfig]
|
||||||
|
|
||||||
freqtrade = get_patched_freqtradebot(mocker, whitelist_conf)
|
if desc_expected is not None:
|
||||||
short_desc = str(freqtrade.pairlists.short_desc())
|
freqtrade = get_patched_freqtradebot(mocker, whitelist_conf)
|
||||||
assert short_desc == expected
|
short_desc = str(freqtrade.pairlists.short_desc())
|
||||||
|
assert short_desc == desc_expected
|
||||||
|
else: # OperationalException expected
|
||||||
|
with pytest.raises(OperationalException,
|
||||||
|
match=exception_expected):
|
||||||
|
freqtrade = get_patched_freqtradebot(mocker, whitelist_conf)
|
||||||
|
|
||||||
|
|
||||||
def test_pairlistmanager_no_pairlist(mocker, markets, whitelist_conf, caplog):
|
def test_pairlistmanager_no_pairlist(mocker, markets, whitelist_conf, caplog):
|
||||||
|
@ -79,7 +79,8 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
|
|||||||
'open_rate': 1.098e-05,
|
'open_rate': 1.098e-05,
|
||||||
'close_rate': None,
|
'close_rate': None,
|
||||||
'current_rate': 1.099e-05,
|
'current_rate': 1.099e-05,
|
||||||
'amount': 91.07468124,
|
'amount': 91.07468123,
|
||||||
|
'amount_requested': 91.07468123,
|
||||||
'stake_amount': 0.001,
|
'stake_amount': 0.001,
|
||||||
'close_profit': None,
|
'close_profit': None,
|
||||||
'close_profit_pct': None,
|
'close_profit_pct': None,
|
||||||
@ -100,6 +101,7 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
|
|||||||
'initial_stop_loss_ratio': -0.1,
|
'initial_stop_loss_ratio': -0.1,
|
||||||
'stoploss_current_dist': -1.1080000000000002e-06,
|
'stoploss_current_dist': -1.1080000000000002e-06,
|
||||||
'stoploss_current_dist_ratio': -0.10081893,
|
'stoploss_current_dist_ratio': -0.10081893,
|
||||||
|
'stoploss_current_dist_pct': -10.08,
|
||||||
'stoploss_entry_dist': -0.00010475,
|
'stoploss_entry_dist': -0.00010475,
|
||||||
'stoploss_entry_dist_ratio': -0.10448878,
|
'stoploss_entry_dist_ratio': -0.10448878,
|
||||||
'open_order': None,
|
'open_order': None,
|
||||||
@ -142,7 +144,8 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
|
|||||||
'open_rate': 1.098e-05,
|
'open_rate': 1.098e-05,
|
||||||
'close_rate': None,
|
'close_rate': None,
|
||||||
'current_rate': ANY,
|
'current_rate': ANY,
|
||||||
'amount': 91.07468124,
|
'amount': 91.07468123,
|
||||||
|
'amount_requested': 91.07468123,
|
||||||
'stake_amount': 0.001,
|
'stake_amount': 0.001,
|
||||||
'close_profit': None,
|
'close_profit': None,
|
||||||
'close_profit_pct': None,
|
'close_profit_pct': None,
|
||||||
@ -163,6 +166,7 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
|
|||||||
'initial_stop_loss_ratio': -0.1,
|
'initial_stop_loss_ratio': -0.1,
|
||||||
'stoploss_current_dist': ANY,
|
'stoploss_current_dist': ANY,
|
||||||
'stoploss_current_dist_ratio': ANY,
|
'stoploss_current_dist_ratio': ANY,
|
||||||
|
'stoploss_current_dist_pct': ANY,
|
||||||
'stoploss_entry_dist': -0.00010475,
|
'stoploss_entry_dist': -0.00010475,
|
||||||
'stoploss_entry_dist_ratio': -0.10448878,
|
'stoploss_entry_dist_ratio': -0.10448878,
|
||||||
'open_order': None,
|
'open_order': None,
|
||||||
@ -253,11 +257,11 @@ def test_rpc_daily_profit(default_conf, update, ticker, fee,
|
|||||||
assert days['fiat_display_currency'] == default_conf['fiat_display_currency']
|
assert days['fiat_display_currency'] == default_conf['fiat_display_currency']
|
||||||
for day in days['data']:
|
for day in days['data']:
|
||||||
# [datetime.date(2018, 1, 11), '0.00000000 BTC', '0.000 USD']
|
# [datetime.date(2018, 1, 11), '0.00000000 BTC', '0.000 USD']
|
||||||
assert (day['abs_profit'] == '0.00000000' or
|
assert (day['abs_profit'] == 0.0 or
|
||||||
day['abs_profit'] == '0.00006217')
|
day['abs_profit'] == 0.00006217)
|
||||||
|
|
||||||
assert (day['fiat_value'] == '0.000' or
|
assert (day['fiat_value'] == 0.0 or
|
||||||
day['fiat_value'] == '0.767')
|
day['fiat_value'] == 0.76748865)
|
||||||
# ensure first day is current date
|
# ensure first day is current date
|
||||||
assert str(days['data'][0]['date']) == str(datetime.utcnow().date())
|
assert str(days['data'][0]['date']) == str(datetime.utcnow().date())
|
||||||
|
|
||||||
@ -665,7 +669,8 @@ def test_rpc_forcesell(default_conf, ticker, fee, mocker) -> None:
|
|||||||
return_value={
|
return_value={
|
||||||
'status': 'closed',
|
'status': 'closed',
|
||||||
'type': 'limit',
|
'type': 'limit',
|
||||||
'side': 'buy'
|
'side': 'buy',
|
||||||
|
'filled': 0.0,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
get_fee=fee,
|
get_fee=fee,
|
||||||
@ -691,6 +696,7 @@ def test_rpc_forcesell(default_conf, ticker, fee, mocker) -> None:
|
|||||||
msg = rpc._rpc_forcesell('all')
|
msg = rpc._rpc_forcesell('all')
|
||||||
assert msg == {'result': 'Created sell orders for all open trades.'}
|
assert msg == {'result': 'Created sell orders for all open trades.'}
|
||||||
|
|
||||||
|
freqtradebot.enter_positions()
|
||||||
msg = rpc._rpc_forcesell('1')
|
msg = rpc._rpc_forcesell('1')
|
||||||
assert msg == {'result': 'Created sell order for trade 1.'}
|
assert msg == {'result': 'Created sell order for trade 1.'}
|
||||||
|
|
||||||
@ -703,17 +709,24 @@ def test_rpc_forcesell(default_conf, ticker, fee, mocker) -> None:
|
|||||||
|
|
||||||
freqtradebot.state = State.RUNNING
|
freqtradebot.state = State.RUNNING
|
||||||
assert cancel_order_mock.call_count == 0
|
assert cancel_order_mock.call_count == 0
|
||||||
|
freqtradebot.enter_positions()
|
||||||
# make an limit-buy open trade
|
# make an limit-buy open trade
|
||||||
trade = Trade.query.filter(Trade.id == '1').first()
|
trade = Trade.query.filter(Trade.id == '1').first()
|
||||||
filled_amount = trade.amount / 2
|
filled_amount = trade.amount / 2
|
||||||
|
# Fetch order - it's open first, and closed after cancel_order is called.
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
'freqtrade.exchange.Exchange.fetch_order',
|
'freqtrade.exchange.Exchange.fetch_order',
|
||||||
return_value={
|
side_effect=[{
|
||||||
'status': 'open',
|
'status': 'open',
|
||||||
'type': 'limit',
|
'type': 'limit',
|
||||||
'side': 'buy',
|
'side': 'buy',
|
||||||
'filled': filled_amount
|
'filled': filled_amount
|
||||||
}
|
}, {
|
||||||
|
'status': 'closed',
|
||||||
|
'type': 'limit',
|
||||||
|
'side': 'buy',
|
||||||
|
'filled': filled_amount
|
||||||
|
}]
|
||||||
)
|
)
|
||||||
# check that the trade is called, which is done by ensuring exchange.cancel_order is called
|
# check that the trade is called, which is done by ensuring exchange.cancel_order is called
|
||||||
# and trade amount is updated
|
# and trade amount is updated
|
||||||
@ -721,6 +734,16 @@ def test_rpc_forcesell(default_conf, ticker, fee, mocker) -> None:
|
|||||||
assert cancel_order_mock.call_count == 1
|
assert cancel_order_mock.call_count == 1
|
||||||
assert trade.amount == filled_amount
|
assert trade.amount == filled_amount
|
||||||
|
|
||||||
|
mocker.patch(
|
||||||
|
'freqtrade.exchange.Exchange.fetch_order',
|
||||||
|
return_value={
|
||||||
|
'status': 'open',
|
||||||
|
'type': 'limit',
|
||||||
|
'side': 'buy',
|
||||||
|
'filled': filled_amount
|
||||||
|
})
|
||||||
|
|
||||||
|
freqtradebot.config['max_open_trades'] = 3
|
||||||
freqtradebot.enter_positions()
|
freqtradebot.enter_positions()
|
||||||
trade = Trade.query.filter(Trade.id == '2').first()
|
trade = Trade.query.filter(Trade.id == '2').first()
|
||||||
amount = trade.amount
|
amount = trade.amount
|
||||||
@ -740,20 +763,22 @@ def test_rpc_forcesell(default_conf, ticker, fee, mocker) -> None:
|
|||||||
assert cancel_order_mock.call_count == 2
|
assert cancel_order_mock.call_count == 2
|
||||||
assert trade.amount == amount
|
assert trade.amount == amount
|
||||||
|
|
||||||
freqtradebot.enter_positions()
|
|
||||||
# make an limit-sell open trade
|
# make an limit-sell open trade
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
'freqtrade.exchange.Exchange.fetch_order',
|
'freqtrade.exchange.Exchange.fetch_order',
|
||||||
return_value={
|
return_value={
|
||||||
'status': 'open',
|
'status': 'open',
|
||||||
'type': 'limit',
|
'type': 'limit',
|
||||||
'side': 'sell'
|
'side': 'sell',
|
||||||
|
'amount': amount,
|
||||||
|
'remaining': amount,
|
||||||
|
'filled': 0.0
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
msg = rpc._rpc_forcesell('3')
|
msg = rpc._rpc_forcesell('3')
|
||||||
assert msg == {'result': 'Created sell order for trade 3.'}
|
assert msg == {'result': 'Created sell order for trade 3.'}
|
||||||
# status quo, no exchange calls
|
# status quo, no exchange calls
|
||||||
assert cancel_order_mock.call_count == 2
|
assert cancel_order_mock.call_count == 3
|
||||||
|
|
||||||
|
|
||||||
def test_performance_handle(default_conf, ticker, limit_buy_order, fee,
|
def test_performance_handle(default_conf, ticker, limit_buy_order, fee,
|
||||||
|
@ -10,10 +10,12 @@ from flask import Flask
|
|||||||
from requests.auth import _basic_auth_str
|
from requests.auth import _basic_auth_str
|
||||||
|
|
||||||
from freqtrade.__init__ import __version__
|
from freqtrade.__init__ import __version__
|
||||||
|
from freqtrade.loggers import setup_logging, setup_logging_pre
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.rpc.api_server import BASE_URI, ApiServer
|
from freqtrade.rpc.api_server import BASE_URI, ApiServer
|
||||||
from freqtrade.state import State
|
from freqtrade.state import State
|
||||||
from tests.conftest import get_patched_freqtradebot, log_has, patch_get_signal, create_mock_trades
|
from tests.conftest import (create_mock_trades, get_patched_freqtradebot,
|
||||||
|
log_has, patch_get_signal)
|
||||||
|
|
||||||
_TEST_USER = "FreqTrader"
|
_TEST_USER = "FreqTrader"
|
||||||
_TEST_PASS = "SuperSecurePassword1!"
|
_TEST_PASS = "SuperSecurePassword1!"
|
||||||
@ -21,6 +23,9 @@ _TEST_PASS = "SuperSecurePassword1!"
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def botclient(default_conf, mocker):
|
def botclient(default_conf, mocker):
|
||||||
|
setup_logging_pre()
|
||||||
|
setup_logging(default_conf)
|
||||||
|
|
||||||
default_conf.update({"api_server": {"enabled": True,
|
default_conf.update({"api_server": {"enabled": True,
|
||||||
"listen_ip_address": "127.0.0.1",
|
"listen_ip_address": "127.0.0.1",
|
||||||
"listen_port": 8080,
|
"listen_port": 8080,
|
||||||
@ -87,20 +92,20 @@ def test_api_unauthorized(botclient):
|
|||||||
assert rc.json == {'error': 'Unauthorized'}
|
assert rc.json == {'error': 'Unauthorized'}
|
||||||
|
|
||||||
# Change only username
|
# Change only username
|
||||||
ftbot.config['api_server']['username'] = "Ftrader"
|
ftbot.config['api_server']['username'] = 'Ftrader'
|
||||||
rc = client_get(client, f"{BASE_URI}/version")
|
rc = client_get(client, f"{BASE_URI}/version")
|
||||||
assert_response(rc, 401)
|
assert_response(rc, 401)
|
||||||
assert rc.json == {'error': 'Unauthorized'}
|
assert rc.json == {'error': 'Unauthorized'}
|
||||||
|
|
||||||
# Change only password
|
# Change only password
|
||||||
ftbot.config['api_server']['username'] = _TEST_USER
|
ftbot.config['api_server']['username'] = _TEST_USER
|
||||||
ftbot.config['api_server']['password'] = "WrongPassword"
|
ftbot.config['api_server']['password'] = 'WrongPassword'
|
||||||
rc = client_get(client, f"{BASE_URI}/version")
|
rc = client_get(client, f"{BASE_URI}/version")
|
||||||
assert_response(rc, 401)
|
assert_response(rc, 401)
|
||||||
assert rc.json == {'error': 'Unauthorized'}
|
assert rc.json == {'error': 'Unauthorized'}
|
||||||
|
|
||||||
ftbot.config['api_server']['username'] = "Ftrader"
|
ftbot.config['api_server']['username'] = 'Ftrader'
|
||||||
ftbot.config['api_server']['password'] = "WrongPassword"
|
ftbot.config['api_server']['password'] = 'WrongPassword'
|
||||||
|
|
||||||
rc = client_get(client, f"{BASE_URI}/version")
|
rc = client_get(client, f"{BASE_URI}/version")
|
||||||
assert_response(rc, 401)
|
assert_response(rc, 401)
|
||||||
@ -261,7 +266,7 @@ def test_api_reloadconf(botclient):
|
|||||||
|
|
||||||
rc = client_post(client, f"{BASE_URI}/reload_config")
|
rc = client_post(client, f"{BASE_URI}/reload_config")
|
||||||
assert_response(rc)
|
assert_response(rc)
|
||||||
assert rc.json == {'status': 'reloading config ...'}
|
assert rc.json == {'status': 'Reloading config ...'}
|
||||||
assert ftbot.state == State.RELOAD_CONFIG
|
assert ftbot.state == State.RELOAD_CONFIG
|
||||||
|
|
||||||
|
|
||||||
@ -423,6 +428,34 @@ def test_api_delete_trade(botclient, mocker, fee, markets):
|
|||||||
assert stoploss_mock.call_count == 1
|
assert stoploss_mock.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_logs(botclient):
|
||||||
|
ftbot, client = botclient
|
||||||
|
rc = client_get(client, f"{BASE_URI}/logs")
|
||||||
|
assert_response(rc)
|
||||||
|
assert len(rc.json) == 2
|
||||||
|
assert 'logs' in rc.json
|
||||||
|
# Using a fixed comparison here would make this test fail!
|
||||||
|
assert rc.json['log_count'] > 1
|
||||||
|
assert len(rc.json['logs']) == rc.json['log_count']
|
||||||
|
|
||||||
|
assert isinstance(rc.json['logs'][0], list)
|
||||||
|
# date
|
||||||
|
assert isinstance(rc.json['logs'][0][0], str)
|
||||||
|
# created_timestamp
|
||||||
|
assert isinstance(rc.json['logs'][0][1], float)
|
||||||
|
assert isinstance(rc.json['logs'][0][2], str)
|
||||||
|
assert isinstance(rc.json['logs'][0][3], str)
|
||||||
|
assert isinstance(rc.json['logs'][0][4], str)
|
||||||
|
|
||||||
|
rc = client_get(client, f"{BASE_URI}/logs?limit=5")
|
||||||
|
assert_response(rc)
|
||||||
|
assert len(rc.json) == 2
|
||||||
|
assert 'logs' in rc.json
|
||||||
|
# Using a fixed comparison here would make this test fail!
|
||||||
|
assert rc.json['log_count'] == 5
|
||||||
|
assert len(rc.json['logs']) == rc.json['log_count']
|
||||||
|
|
||||||
|
|
||||||
def test_api_edge_disabled(botclient, mocker, ticker, fee, markets):
|
def test_api_edge_disabled(botclient, mocker, ticker, fee, markets):
|
||||||
ftbot, client = botclient
|
ftbot, client = botclient
|
||||||
patch_get_signal(ftbot, (True, False))
|
patch_get_signal(ftbot, (True, False))
|
||||||
@ -438,6 +471,7 @@ def test_api_edge_disabled(botclient, mocker, ticker, fee, markets):
|
|||||||
assert rc.json == {"error": "Error querying _edge: Edge is not enabled."}
|
assert rc.json == {"error": "Error querying _edge: Edge is not enabled."}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("init_persistence")
|
||||||
def test_api_profit(botclient, mocker, ticker, fee, markets, limit_buy_order, limit_sell_order):
|
def test_api_profit(botclient, mocker, ticker, fee, markets, limit_buy_order, limit_sell_order):
|
||||||
ftbot, client = botclient
|
ftbot, client = botclient
|
||||||
patch_get_signal(ftbot, (True, False))
|
patch_get_signal(ftbot, (True, False))
|
||||||
@ -465,6 +499,7 @@ def test_api_profit(botclient, mocker, ticker, fee, markets, limit_buy_order, li
|
|||||||
assert rc.json['best_pair'] == ''
|
assert rc.json['best_pair'] == ''
|
||||||
assert rc.json['best_rate'] == 0
|
assert rc.json['best_rate'] == 0
|
||||||
|
|
||||||
|
trade = Trade.query.first()
|
||||||
trade.update(limit_sell_order)
|
trade.update(limit_sell_order)
|
||||||
|
|
||||||
trade.close_date = datetime.utcnow()
|
trade.close_date = datetime.utcnow()
|
||||||
@ -566,7 +601,8 @@ def test_api_status(botclient, mocker, ticker, fee, markets):
|
|||||||
rc = client_get(client, f"{BASE_URI}/status")
|
rc = client_get(client, f"{BASE_URI}/status")
|
||||||
assert_response(rc)
|
assert_response(rc)
|
||||||
assert len(rc.json) == 1
|
assert len(rc.json) == 1
|
||||||
assert rc.json == [{'amount': 91.07468124,
|
assert rc.json == [{'amount': 91.07468123,
|
||||||
|
'amount_requested': 91.07468123,
|
||||||
'base_currency': 'BTC',
|
'base_currency': 'BTC',
|
||||||
'close_date': None,
|
'close_date': None,
|
||||||
'close_date_hum': None,
|
'close_date_hum': None,
|
||||||
@ -599,6 +635,7 @@ def test_api_status(botclient, mocker, ticker, fee, markets):
|
|||||||
'initial_stop_loss_ratio': -0.1,
|
'initial_stop_loss_ratio': -0.1,
|
||||||
'stoploss_current_dist': -1.1080000000000002e-06,
|
'stoploss_current_dist': -1.1080000000000002e-06,
|
||||||
'stoploss_current_dist_ratio': -0.10081893,
|
'stoploss_current_dist_ratio': -0.10081893,
|
||||||
|
'stoploss_current_dist_pct': -10.08,
|
||||||
'stoploss_entry_dist': -0.00010475,
|
'stoploss_entry_dist': -0.00010475,
|
||||||
'stoploss_entry_dist_ratio': -0.10448878,
|
'stoploss_entry_dist_ratio': -0.10448878,
|
||||||
'trade_id': 1,
|
'trade_id': 1,
|
||||||
@ -675,7 +712,7 @@ def test_api_forcebuy(botclient, mocker, fee):
|
|||||||
assert rc.json == {"error": "Error querying _forcebuy: Forcebuy not enabled."}
|
assert rc.json == {"error": "Error querying _forcebuy: Forcebuy not enabled."}
|
||||||
|
|
||||||
# enable forcebuy
|
# enable forcebuy
|
||||||
ftbot.config["forcebuy_enable"] = True
|
ftbot.config['forcebuy_enable'] = True
|
||||||
|
|
||||||
fbuy_mock = MagicMock(return_value=None)
|
fbuy_mock = MagicMock(return_value=None)
|
||||||
mocker.patch("freqtrade.rpc.RPC._rpc_forcebuy", fbuy_mock)
|
mocker.patch("freqtrade.rpc.RPC._rpc_forcebuy", fbuy_mock)
|
||||||
@ -688,6 +725,7 @@ def test_api_forcebuy(botclient, mocker, fee):
|
|||||||
fbuy_mock = MagicMock(return_value=Trade(
|
fbuy_mock = MagicMock(return_value=Trade(
|
||||||
pair='ETH/ETH',
|
pair='ETH/ETH',
|
||||||
amount=1,
|
amount=1,
|
||||||
|
amount_requested=1,
|
||||||
exchange='bittrex',
|
exchange='bittrex',
|
||||||
stake_amount=1,
|
stake_amount=1,
|
||||||
open_rate=0.245441,
|
open_rate=0.245441,
|
||||||
@ -704,6 +742,7 @@ def test_api_forcebuy(botclient, mocker, fee):
|
|||||||
data='{"pair": "ETH/BTC"}')
|
data='{"pair": "ETH/BTC"}')
|
||||||
assert_response(rc)
|
assert_response(rc)
|
||||||
assert rc.json == {'amount': 1,
|
assert rc.json == {'amount': 1,
|
||||||
|
'amount_requested': 1,
|
||||||
'trade_id': None,
|
'trade_id': None,
|
||||||
'close_date': None,
|
'close_date': None,
|
||||||
'close_date_hum': None,
|
'close_date_hum': None,
|
||||||
@ -740,7 +779,7 @@ def test_api_forcebuy(botclient, mocker, fee):
|
|||||||
'min_rate': None,
|
'min_rate': None,
|
||||||
'open_order_id': '123456',
|
'open_order_id': '123456',
|
||||||
'open_rate_requested': None,
|
'open_rate_requested': None,
|
||||||
'open_trade_price': 0.2460546025,
|
'open_trade_price': 0.24605460,
|
||||||
'sell_reason': None,
|
'sell_reason': None,
|
||||||
'sell_order_status': None,
|
'sell_order_status': None,
|
||||||
'strategy': None,
|
'strategy': None,
|
||||||
|
@ -14,8 +14,10 @@ from telegram import Chat, Message, Update
|
|||||||
from telegram.error import NetworkError
|
from telegram.error import NetworkError
|
||||||
|
|
||||||
from freqtrade import __version__
|
from freqtrade import __version__
|
||||||
|
from freqtrade.constants import CANCEL_REASON
|
||||||
from freqtrade.edge import PairInfo
|
from freqtrade.edge import PairInfo
|
||||||
from freqtrade.freqtradebot import FreqtradeBot
|
from freqtrade.freqtradebot import FreqtradeBot
|
||||||
|
from freqtrade.loggers import setup_logging
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.rpc import RPCMessageType
|
from freqtrade.rpc import RPCMessageType
|
||||||
from freqtrade.rpc.telegram import Telegram, authorized_only
|
from freqtrade.rpc.telegram import Telegram, authorized_only
|
||||||
@ -76,7 +78,7 @@ def test_telegram_init(default_conf, mocker, caplog) -> None:
|
|||||||
"['balance'], ['start'], ['stop'], ['forcesell'], ['forcebuy'], ['trades'], "
|
"['balance'], ['start'], ['stop'], ['forcesell'], ['forcebuy'], ['trades'], "
|
||||||
"['delete'], ['performance'], ['daily'], ['count'], ['reload_config', "
|
"['delete'], ['performance'], ['daily'], ['count'], ['reload_config', "
|
||||||
"'reload_conf'], ['show_config', 'show_conf'], ['stopbuy'], "
|
"'reload_conf'], ['show_config', 'show_conf'], ['stopbuy'], "
|
||||||
"['whitelist'], ['blacklist'], ['edge'], ['help'], ['version']]")
|
"['whitelist'], ['blacklist'], ['logs'], ['edge'], ['help'], ['version']]")
|
||||||
|
|
||||||
assert log_has(message_str, caplog)
|
assert log_has(message_str, caplog)
|
||||||
|
|
||||||
@ -145,7 +147,7 @@ def test_authorized_only_exception(default_conf, mocker, caplog) -> None:
|
|||||||
assert log_has('Exception occurred within Telegram module', caplog)
|
assert log_has('Exception occurred within Telegram module', caplog)
|
||||||
|
|
||||||
|
|
||||||
def test_status(default_conf, update, mocker, fee, ticker,) -> None:
|
def test_telegram_status(default_conf, update, mocker, fee, ticker,) -> None:
|
||||||
update.message.chat.id = "123"
|
update.message.chat.id = "123"
|
||||||
default_conf['telegram']['enabled'] = False
|
default_conf['telegram']['enabled'] = False
|
||||||
default_conf['telegram']['chat_id'] = "123"
|
default_conf['telegram']['chat_id'] = "123"
|
||||||
@ -175,6 +177,8 @@ def test_status(default_conf, update, mocker, fee, ticker,) -> None:
|
|||||||
'stop_loss': 1.099e-05,
|
'stop_loss': 1.099e-05,
|
||||||
'sell_order_status': None,
|
'sell_order_status': None,
|
||||||
'initial_stop_loss_pct': -0.05,
|
'initial_stop_loss_pct': -0.05,
|
||||||
|
'stoploss_current_dist': 1e-08,
|
||||||
|
'stoploss_current_dist_pct': -0.02,
|
||||||
'stop_loss_pct': -0.01,
|
'stop_loss_pct': -0.01,
|
||||||
'open_order': '(limit buy rem=0.00000000)'
|
'open_order': '(limit buy rem=0.00000000)'
|
||||||
}]),
|
}]),
|
||||||
@ -688,11 +692,11 @@ def test_reload_config_handle(default_conf, update, mocker) -> None:
|
|||||||
telegram._reload_config(update=update, context=MagicMock())
|
telegram._reload_config(update=update, context=MagicMock())
|
||||||
assert freqtradebot.state == State.RELOAD_CONFIG
|
assert freqtradebot.state == State.RELOAD_CONFIG
|
||||||
assert msg_mock.call_count == 1
|
assert msg_mock.call_count == 1
|
||||||
assert 'reloading config' in msg_mock.call_args_list[0][0][0]
|
assert 'Reloading config' in msg_mock.call_args_list[0][0][0]
|
||||||
|
|
||||||
|
|
||||||
def test_forcesell_handle(default_conf, update, ticker, fee,
|
def test_telegram_forcesell_handle(default_conf, update, ticker, fee,
|
||||||
ticker_sell_up, mocker) -> None:
|
ticker_sell_up, mocker) -> None:
|
||||||
mocker.patch('freqtrade.rpc.rpc.CryptoToFiatConverter._find_price', return_value=15000.0)
|
mocker.patch('freqtrade.rpc.rpc.CryptoToFiatConverter._find_price', return_value=15000.0)
|
||||||
rpc_mock = mocker.patch('freqtrade.rpc.telegram.Telegram.send_msg', MagicMock())
|
rpc_mock = mocker.patch('freqtrade.rpc.telegram.Telegram.send_msg', MagicMock())
|
||||||
mocker.patch('freqtrade.rpc.telegram.Telegram._init', MagicMock())
|
mocker.patch('freqtrade.rpc.telegram.Telegram._init', MagicMock())
|
||||||
@ -722,7 +726,7 @@ def test_forcesell_handle(default_conf, update, ticker, fee,
|
|||||||
context.args = ["1"]
|
context.args = ["1"]
|
||||||
telegram._forcesell(update=update, context=context)
|
telegram._forcesell(update=update, context=context)
|
||||||
|
|
||||||
assert rpc_mock.call_count == 2
|
assert rpc_mock.call_count == 3
|
||||||
last_msg = rpc_mock.call_args_list[-1][0][0]
|
last_msg = rpc_mock.call_args_list[-1][0][0]
|
||||||
assert {
|
assert {
|
||||||
'type': RPCMessageType.SELL_NOTIFICATION,
|
'type': RPCMessageType.SELL_NOTIFICATION,
|
||||||
@ -731,7 +735,7 @@ def test_forcesell_handle(default_conf, update, ticker, fee,
|
|||||||
'pair': 'ETH/BTC',
|
'pair': 'ETH/BTC',
|
||||||
'gain': 'profit',
|
'gain': 'profit',
|
||||||
'limit': 1.173e-05,
|
'limit': 1.173e-05,
|
||||||
'amount': 91.07468123861567,
|
'amount': 91.07468123,
|
||||||
'order_type': 'limit',
|
'order_type': 'limit',
|
||||||
'open_rate': 1.098e-05,
|
'open_rate': 1.098e-05,
|
||||||
'current_rate': 1.173e-05,
|
'current_rate': 1.173e-05,
|
||||||
@ -745,8 +749,8 @@ def test_forcesell_handle(default_conf, update, ticker, fee,
|
|||||||
} == last_msg
|
} == last_msg
|
||||||
|
|
||||||
|
|
||||||
def test_forcesell_down_handle(default_conf, update, ticker, fee,
|
def test_telegram_forcesell_down_handle(default_conf, update, ticker, fee,
|
||||||
ticker_sell_down, mocker) -> None:
|
ticker_sell_down, mocker) -> None:
|
||||||
mocker.patch('freqtrade.rpc.fiat_convert.CryptoToFiatConverter._find_price',
|
mocker.patch('freqtrade.rpc.fiat_convert.CryptoToFiatConverter._find_price',
|
||||||
return_value=15000.0)
|
return_value=15000.0)
|
||||||
rpc_mock = mocker.patch('freqtrade.rpc.telegram.Telegram.send_msg', MagicMock())
|
rpc_mock = mocker.patch('freqtrade.rpc.telegram.Telegram.send_msg', MagicMock())
|
||||||
@ -781,7 +785,7 @@ def test_forcesell_down_handle(default_conf, update, ticker, fee,
|
|||||||
context.args = ["1"]
|
context.args = ["1"]
|
||||||
telegram._forcesell(update=update, context=context)
|
telegram._forcesell(update=update, context=context)
|
||||||
|
|
||||||
assert rpc_mock.call_count == 2
|
assert rpc_mock.call_count == 3
|
||||||
|
|
||||||
last_msg = rpc_mock.call_args_list[-1][0][0]
|
last_msg = rpc_mock.call_args_list[-1][0][0]
|
||||||
assert {
|
assert {
|
||||||
@ -791,7 +795,7 @@ def test_forcesell_down_handle(default_conf, update, ticker, fee,
|
|||||||
'pair': 'ETH/BTC',
|
'pair': 'ETH/BTC',
|
||||||
'gain': 'loss',
|
'gain': 'loss',
|
||||||
'limit': 1.043e-05,
|
'limit': 1.043e-05,
|
||||||
'amount': 91.07468123861567,
|
'amount': 91.07468123,
|
||||||
'order_type': 'limit',
|
'order_type': 'limit',
|
||||||
'open_rate': 1.098e-05,
|
'open_rate': 1.098e-05,
|
||||||
'current_rate': 1.043e-05,
|
'current_rate': 1.043e-05,
|
||||||
@ -831,8 +835,9 @@ def test_forcesell_all_handle(default_conf, update, ticker, fee, mocker) -> None
|
|||||||
context.args = ["all"]
|
context.args = ["all"]
|
||||||
telegram._forcesell(update=update, context=context)
|
telegram._forcesell(update=update, context=context)
|
||||||
|
|
||||||
assert rpc_mock.call_count == 4
|
# Called for each trade 3 times
|
||||||
msg = rpc_mock.call_args_list[0][0][0]
|
assert rpc_mock.call_count == 8
|
||||||
|
msg = rpc_mock.call_args_list[1][0][0]
|
||||||
assert {
|
assert {
|
||||||
'type': RPCMessageType.SELL_NOTIFICATION,
|
'type': RPCMessageType.SELL_NOTIFICATION,
|
||||||
'trade_id': 1,
|
'trade_id': 1,
|
||||||
@ -840,7 +845,7 @@ def test_forcesell_all_handle(default_conf, update, ticker, fee, mocker) -> None
|
|||||||
'pair': 'ETH/BTC',
|
'pair': 'ETH/BTC',
|
||||||
'gain': 'loss',
|
'gain': 'loss',
|
||||||
'limit': 1.099e-05,
|
'limit': 1.099e-05,
|
||||||
'amount': 91.07468123861567,
|
'amount': 91.07468123,
|
||||||
'order_type': 'limit',
|
'order_type': 'limit',
|
||||||
'open_rate': 1.098e-05,
|
'open_rate': 1.098e-05,
|
||||||
'current_rate': 1.099e-05,
|
'current_rate': 1.099e-05,
|
||||||
@ -1107,6 +1112,40 @@ def test_blacklist_static(default_conf, update, mocker) -> None:
|
|||||||
assert freqtradebot.pairlists.blacklist == ["DOGE/BTC", "HOT/BTC", "ETH/BTC"]
|
assert freqtradebot.pairlists.blacklist == ["DOGE/BTC", "HOT/BTC", "ETH/BTC"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_telegram_logs(default_conf, update, mocker) -> None:
|
||||||
|
msg_mock = MagicMock()
|
||||||
|
mocker.patch.multiple(
|
||||||
|
'freqtrade.rpc.telegram.Telegram',
|
||||||
|
_init=MagicMock(),
|
||||||
|
_send_msg=msg_mock
|
||||||
|
)
|
||||||
|
setup_logging(default_conf)
|
||||||
|
|
||||||
|
freqtradebot = get_patched_freqtradebot(mocker, default_conf)
|
||||||
|
|
||||||
|
telegram = Telegram(freqtradebot)
|
||||||
|
context = MagicMock()
|
||||||
|
context.args = []
|
||||||
|
telegram._logs(update=update, context=context)
|
||||||
|
assert msg_mock.call_count == 1
|
||||||
|
assert "freqtrade\\.rpc\\.telegram" in msg_mock.call_args_list[0][0][0]
|
||||||
|
|
||||||
|
msg_mock.reset_mock()
|
||||||
|
context.args = ["1"]
|
||||||
|
telegram._logs(update=update, context=context)
|
||||||
|
assert msg_mock.call_count == 1
|
||||||
|
|
||||||
|
msg_mock.reset_mock()
|
||||||
|
# Test with changed MaxMessageLength
|
||||||
|
mocker.patch('freqtrade.rpc.telegram.MAX_TELEGRAM_MESSAGE_LENGTH', 200)
|
||||||
|
context = MagicMock()
|
||||||
|
context.args = []
|
||||||
|
telegram._logs(update=update, context=context)
|
||||||
|
# Called at least 3 times. Exact times will change with unrelated changes to setup messages
|
||||||
|
# Therefore we don't test for this explicitly.
|
||||||
|
assert msg_mock.call_count > 3
|
||||||
|
|
||||||
|
|
||||||
def test_edge_disabled(default_conf, update, mocker) -> None:
|
def test_edge_disabled(default_conf, update, mocker) -> None:
|
||||||
msg_mock = MagicMock()
|
msg_mock = MagicMock()
|
||||||
mocker.patch.multiple(
|
mocker.patch.multiple(
|
||||||
@ -1306,9 +1345,10 @@ def test_send_msg_buy_cancel_notification(default_conf, mocker) -> None:
|
|||||||
'type': RPCMessageType.BUY_CANCEL_NOTIFICATION,
|
'type': RPCMessageType.BUY_CANCEL_NOTIFICATION,
|
||||||
'exchange': 'Bittrex',
|
'exchange': 'Bittrex',
|
||||||
'pair': 'ETH/BTC',
|
'pair': 'ETH/BTC',
|
||||||
|
'reason': CANCEL_REASON['TIMEOUT']
|
||||||
})
|
})
|
||||||
assert msg_mock.call_args[0][0] \
|
assert (msg_mock.call_args[0][0] == '\N{WARNING SIGN} *Bittrex:* '
|
||||||
== ('\N{WARNING SIGN} *Bittrex:* Cancelling Open Buy Order for ETH/BTC')
|
'Cancelling open buy Order for ETH/BTC. Reason: cancelled due to timeout.')
|
||||||
|
|
||||||
|
|
||||||
def test_send_msg_sell_notification(default_conf, mocker) -> None:
|
def test_send_msg_sell_notification(default_conf, mocker) -> None:
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# pragma pylint: disable=missing-docstring, C0103
|
# pragma pylint: disable=missing-docstring, C0103
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
@ -8,12 +9,12 @@ import pytest
|
|||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
|
from freqtrade.data.dataprovider import DataProvider
|
||||||
from freqtrade.data.history import load_data
|
from freqtrade.data.history import load_data
|
||||||
from freqtrade.exceptions import StrategyError
|
from freqtrade.exceptions import StrategyError
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.resolvers import StrategyResolver
|
from freqtrade.resolvers import StrategyResolver
|
||||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||||
from freqtrade.data.dataprovider import DataProvider
|
|
||||||
from tests.conftest import log_has, log_has_re
|
from tests.conftest import log_has, log_has_re
|
||||||
|
|
||||||
from .strats.default_strategy import DefaultStrategy
|
from .strats.default_strategy import DefaultStrategy
|
||||||
@ -261,14 +262,14 @@ def test_min_roi_reached3(default_conf, fee) -> None:
|
|||||||
strategy = StrategyResolver.load_strategy(default_conf)
|
strategy = StrategyResolver.load_strategy(default_conf)
|
||||||
strategy.minimal_roi = min_roi
|
strategy.minimal_roi = min_roi
|
||||||
trade = Trade(
|
trade = Trade(
|
||||||
pair='ETH/BTC',
|
pair='ETH/BTC',
|
||||||
stake_amount=0.001,
|
stake_amount=0.001,
|
||||||
amount=5,
|
amount=5,
|
||||||
open_date=arrow.utcnow().shift(hours=-1).datetime,
|
open_date=arrow.utcnow().shift(hours=-1).datetime,
|
||||||
fee_open=fee.return_value,
|
fee_open=fee.return_value,
|
||||||
fee_close=fee.return_value,
|
fee_close=fee.return_value,
|
||||||
exchange='bittrex',
|
exchange='bittrex',
|
||||||
open_rate=1,
|
open_rate=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert not strategy.min_roi_reached(trade, 0.02, arrow.utcnow().shift(minutes=-56).datetime)
|
assert not strategy.min_roi_reached(trade, 0.02, arrow.utcnow().shift(minutes=-56).datetime)
|
||||||
@ -387,6 +388,31 @@ def test_is_pair_locked(default_conf):
|
|||||||
strategy.unlock_pair(pair)
|
strategy.unlock_pair(pair)
|
||||||
assert not strategy.is_pair_locked(pair)
|
assert not strategy.is_pair_locked(pair)
|
||||||
|
|
||||||
|
pair = 'BTC/USDT'
|
||||||
|
# Lock until 14:30
|
||||||
|
lock_time = datetime(2020, 5, 1, 14, 30, 0, tzinfo=timezone.utc)
|
||||||
|
strategy.lock_pair(pair, lock_time)
|
||||||
|
# Lock is in the past ...
|
||||||
|
assert not strategy.is_pair_locked(pair)
|
||||||
|
# latest candle is from 14:20, lock goes to 14:30
|
||||||
|
assert strategy.is_pair_locked(pair, lock_time + timedelta(minutes=-10))
|
||||||
|
assert strategy.is_pair_locked(pair, lock_time + timedelta(minutes=-50))
|
||||||
|
|
||||||
|
# latest candle is from 14:25 (lock should be lifted)
|
||||||
|
# Since this is the "new candle" available at 14:30
|
||||||
|
assert not strategy.is_pair_locked(pair, lock_time + timedelta(minutes=-4))
|
||||||
|
|
||||||
|
# Should not be locked after time expired
|
||||||
|
assert not strategy.is_pair_locked(pair, lock_time + timedelta(minutes=10))
|
||||||
|
|
||||||
|
# Change timeframe to 15m
|
||||||
|
strategy.timeframe = '15m'
|
||||||
|
# Candle from 14:14 - lock goes until 14:30
|
||||||
|
assert strategy.is_pair_locked(pair, lock_time + timedelta(minutes=-16))
|
||||||
|
assert strategy.is_pair_locked(pair, lock_time + timedelta(minutes=-15, seconds=-2))
|
||||||
|
# Candle from 14:15 - lock goes until 14:30
|
||||||
|
assert not strategy.is_pair_locked(pair, lock_time + timedelta(minutes=-15))
|
||||||
|
|
||||||
|
|
||||||
def test_is_informative_pairs_callback(default_conf):
|
def test_is_informative_pairs_callback(default_conf):
|
||||||
default_conf.update({'strategy': 'TestStrategyLegacy'})
|
default_conf.update({'strategy': 'TestStrategyLegacy'})
|
||||||
|
88
tests/strategy/test_strategy_helpers.py
Normal file
88
tests/strategy/test_strategy_helpers.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
import pandas as pd
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from freqtrade.strategy import merge_informative_pair, timeframe_to_minutes
|
||||||
|
|
||||||
|
|
||||||
|
def generate_test_data(timeframe: str, size: int):
|
||||||
|
np.random.seed(42)
|
||||||
|
tf_mins = timeframe_to_minutes(timeframe)
|
||||||
|
|
||||||
|
base = np.random.normal(20, 2, size=size)
|
||||||
|
|
||||||
|
date = pd.period_range('2020-07-05', periods=size, freq=f'{tf_mins}min').to_timestamp()
|
||||||
|
df = pd.DataFrame({
|
||||||
|
'date': date,
|
||||||
|
'open': base,
|
||||||
|
'high': base + np.random.normal(2, 1, size=size),
|
||||||
|
'low': base - np.random.normal(2, 1, size=size),
|
||||||
|
'close': base + np.random.normal(0, 1, size=size),
|
||||||
|
'volume': np.random.normal(200, size=size)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
df = df.dropna()
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_informative_pair():
|
||||||
|
data = generate_test_data('15m', 40)
|
||||||
|
informative = generate_test_data('1h', 40)
|
||||||
|
|
||||||
|
result = merge_informative_pair(data, informative, '15m', '1h', ffill=True)
|
||||||
|
assert isinstance(result, pd.DataFrame)
|
||||||
|
assert len(result) == len(data)
|
||||||
|
assert 'date' in result.columns
|
||||||
|
assert result['date'].equals(data['date'])
|
||||||
|
assert 'date_1h' in result.columns
|
||||||
|
|
||||||
|
assert 'open' in result.columns
|
||||||
|
assert 'open_1h' in result.columns
|
||||||
|
assert result['open'].equals(data['open'])
|
||||||
|
|
||||||
|
assert 'close' in result.columns
|
||||||
|
assert 'close_1h' in result.columns
|
||||||
|
assert result['close'].equals(data['close'])
|
||||||
|
|
||||||
|
assert 'volume' in result.columns
|
||||||
|
assert 'volume_1h' in result.columns
|
||||||
|
assert result['volume'].equals(data['volume'])
|
||||||
|
|
||||||
|
# First 4 rows are empty
|
||||||
|
assert result.iloc[0]['date_1h'] is pd.NaT
|
||||||
|
assert result.iloc[1]['date_1h'] is pd.NaT
|
||||||
|
assert result.iloc[2]['date_1h'] is pd.NaT
|
||||||
|
assert result.iloc[3]['date_1h'] is pd.NaT
|
||||||
|
# Next 4 rows contain the starting date (0:00)
|
||||||
|
assert result.iloc[4]['date_1h'] == result.iloc[0]['date']
|
||||||
|
assert result.iloc[5]['date_1h'] == result.iloc[0]['date']
|
||||||
|
assert result.iloc[6]['date_1h'] == result.iloc[0]['date']
|
||||||
|
assert result.iloc[7]['date_1h'] == result.iloc[0]['date']
|
||||||
|
# Next 4 rows contain the next Hourly date original date row 4
|
||||||
|
assert result.iloc[8]['date_1h'] == result.iloc[4]['date']
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_informative_pair_same():
|
||||||
|
data = generate_test_data('15m', 40)
|
||||||
|
informative = generate_test_data('15m', 40)
|
||||||
|
|
||||||
|
result = merge_informative_pair(data, informative, '15m', '15m', ffill=True)
|
||||||
|
assert isinstance(result, pd.DataFrame)
|
||||||
|
assert len(result) == len(data)
|
||||||
|
assert 'date' in result.columns
|
||||||
|
assert result['date'].equals(data['date'])
|
||||||
|
assert 'date_15m' in result.columns
|
||||||
|
|
||||||
|
assert 'open' in result.columns
|
||||||
|
assert 'open_15m' in result.columns
|
||||||
|
assert result['open'].equals(data['open'])
|
||||||
|
|
||||||
|
assert 'close' in result.columns
|
||||||
|
assert 'close_15m' in result.columns
|
||||||
|
assert result['close'].equals(data['close'])
|
||||||
|
|
||||||
|
assert 'volume' in result.columns
|
||||||
|
assert 'volume_15m' in result.columns
|
||||||
|
assert result['volume'].equals(data['volume'])
|
||||||
|
|
||||||
|
# Dates match 1:1
|
||||||
|
assert result['date_15m'].equals(result['date'])
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user