Merge remote-tracking branch 'origin/develop' into feat/add-pytorch-model-support

This commit is contained in:
robcaulk 2023-04-08 13:22:25 +02:00
commit 69b9b35a08
123 changed files with 7298 additions and 3422 deletions

View File

@ -16,7 +16,8 @@ on:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
repository-projects: read
jobs:
build_linux:
@ -321,7 +322,6 @@ jobs:
build_linux_online:
# Run pytest with "live" checks
runs-on: ubuntu-22.04
# permissions:
steps:
- uses: actions/checkout@v3
@ -425,7 +425,7 @@ jobs:
python setup.py sdist bdist_wheel
- name: Publish to PyPI (Test)
uses: pypa/gh-action-pypi-publish@v1.6.4
uses: pypa/gh-action-pypi-publish@v1.8.4
if: (github.event_name == 'release')
with:
user: __token__
@ -433,7 +433,7 @@ jobs:
repository_url: https://test.pypi.org/legacy/
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@v1.6.4
uses: pypa/gh-action-pypi-publish@v1.8.4
if: (github.event_name == 'release')
with:
user: __token__
@ -466,12 +466,13 @@ jobs:
- name: Build and test and push docker images
env:
IMAGE_NAME: freqtradeorg/freqtrade
BRANCH_NAME: ${{ steps.extract_branch.outputs.branch }}
run: |
build_helpers/publish_docker_multi.sh
deploy_arm:
permissions:
packages: write
needs: [ deploy ]
# Only run on 64bit machines
runs-on: [self-hosted, linux, ARM64]
@ -494,8 +495,9 @@ jobs:
- name: Build and test and push docker images
env:
IMAGE_NAME: freqtradeorg/freqtrade
BRANCH_NAME: ${{ steps.extract_branch.outputs.branch }}
GHCR_USERNAME: ${{ github.actor }}
GHCR_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
build_helpers/publish_docker_arm64.sh

View File

@ -13,12 +13,12 @@ repos:
- id: mypy
exclude: build_helpers
additional_dependencies:
- types-cachetools==5.3.0.4
- types-cachetools==5.3.0.5
- types-filelock==3.2.7
- types-requests==2.28.11.15
- types-tabulate==0.9.0.1
- types-python-dateutil==2.8.19.9
- SQLAlchemy==2.0.4
- types-requests==2.28.11.17
- types-tabulate==0.9.0.2
- types-python-dateutil==2.8.19.11
- SQLAlchemy==2.0.8
# stages: [push]
- repo: https://github.com/pycqa/isort
@ -30,7 +30,7 @@ repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.251'
rev: 'v0.0.255'
hooks:
- id: ruff

View File

@ -1,4 +1,4 @@
FROM python:3.10.10-slim-bullseye as base
FROM python:3.10.11-slim-bullseye as base
# Setup env
ENV LANG C.UTF-8

View File

@ -8,8 +8,8 @@ if [ -n "$2" ] || [ ! -f "${INSTALL_LOC}/lib/libta_lib.a" ]; then
tar zxvf ta-lib-0.4.0-src.tar.gz
cd ta-lib \
&& sed -i.bak "s|0.00000001|0.000000000000000001 |g" src/ta_func/ta_utility.h \
&& curl 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD' -o config.guess \
&& curl 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD' -o config.sub \
&& curl 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.guess' -o config.guess \
&& curl 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.sub' -o config.sub \
&& ./configure --prefix=${INSTALL_LOC}/ \
&& make
if [ $? -ne 0 ]; then

View File

@ -3,6 +3,10 @@
# Use BuildKit, otherwise building on ARM fails
export DOCKER_BUILDKIT=1
IMAGE_NAME=freqtradeorg/freqtrade
CACHE_IMAGE=freqtradeorg/freqtrade_cache
GHCR_IMAGE_NAME=ghcr.io/freqtrade/freqtrade
# Replace / with _ to create a valid tag
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
TAG_PLOT=${TAG}_plot
@ -14,7 +18,6 @@ TAG_ARM=${TAG}_arm
TAG_PLOT_ARM=${TAG_PLOT}_arm
TAG_FREQAI_ARM=${TAG_FREQAI}_arm
TAG_FREQAI_RL_ARM=${TAG_FREQAI_RL}_arm
CACHE_IMAGE=freqtradeorg/freqtrade_cache
echo "Running for ${TAG}"
@ -38,13 +41,13 @@ if [ $? -ne 0 ]; then
echo "failed building multiarch images"
return 1
fi
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
# Tag image for upload and next build step
docker tag freqtrade:$TAG_ARM ${CACHE_IMAGE}:$TAG_ARM
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
docker tag freqtrade:$TAG_PLOT_ARM ${CACHE_IMAGE}:$TAG_PLOT_ARM
docker tag freqtrade:$TAG_FREQAI_ARM ${CACHE_IMAGE}:$TAG_FREQAI_ARM
docker tag freqtrade:$TAG_FREQAI_RL_ARM ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
@ -59,7 +62,6 @@ fi
docker images
# docker push ${IMAGE_NAME}
docker push ${CACHE_IMAGE}:$TAG_PLOT_ARM
docker push ${CACHE_IMAGE}:$TAG_FREQAI_ARM
docker push ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
@ -82,14 +84,30 @@ docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI}
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_RL}
# copy images to ghcr.io
alias crane="docker run --rm -i -v $(pwd)/.crane:/home/nonroot/.docker/ gcr.io/go-containerregistry/crane"
mkdir .crane
chmod a+rwx .crane
echo "${GHCR_TOKEN}" | crane auth login ghcr.io -u "${GHCR_USERNAME}" --password-stdin
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_RL}
crane copy ${IMAGE_NAME}:${TAG_FREQAI} ${GHCR_IMAGE_NAME}:${TAG_FREQAI}
crane copy ${IMAGE_NAME}:${TAG_PLOT} ${GHCR_IMAGE_NAME}:${TAG_PLOT}
crane copy ${IMAGE_NAME}:${TAG} ${GHCR_IMAGE_NAME}:${TAG}
# Tag as latest for develop builds
if [ "${TAG}" = "develop" ]; then
echo 'Tagging image as latest'
docker manifest create ${IMAGE_NAME}:latest ${CACHE_IMAGE}:${TAG_ARM} ${IMAGE_NAME}:${TAG_PI} ${CACHE_IMAGE}:${TAG}
docker manifest push -p ${IMAGE_NAME}:latest
crane copy ${IMAGE_NAME}:latest ${GHCR_IMAGE_NAME}:latest
fi
docker images
rm -rf .crane
# Cleanup old images from arm64 node.
docker image prune -a --force --filter "until=24h"

View File

@ -2,6 +2,8 @@
# The below assumes a correctly setup docker buildx environment
IMAGE_NAME=freqtradeorg/freqtrade
CACHE_IMAGE=freqtradeorg/freqtrade_cache
# Replace / with _ to create a valid tag
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
TAG_PLOT=${TAG}_plot
@ -12,7 +14,6 @@ TAG_PI="${TAG}_pi"
PI_PLATFORM="linux/arm/v7"
echo "Running for ${TAG}"
CACHE_IMAGE=freqtradeorg/freqtrade_cache
CACHE_TAG=${CACHE_IMAGE}:${TAG_PI}_cache
# Add commit and commit_message to docker container
@ -58,9 +59,9 @@ fi
# Tag image for upload and next build step
docker tag freqtrade:$TAG ${CACHE_IMAGE}:$TAG
docker build --cache-from freqtrade:${TAG} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG} -t freqtrade:${TAG_PLOT} -f docker/Dockerfile.plot .
docker build --cache-from freqtrade:${TAG} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG} -t freqtrade:${TAG_FREQAI} -f docker/Dockerfile.freqai .
docker build --cache-from freqtrade:${TAG_FREQAI} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_FREQAI} -t freqtrade:${TAG_FREQAI_RL} -f docker/Dockerfile.freqai_rl .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_PLOT} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_FREQAI} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI} -t freqtrade:${TAG_FREQAI_RL} -f docker/Dockerfile.freqai_rl .
docker tag freqtrade:$TAG_PLOT ${CACHE_IMAGE}:$TAG_PLOT
docker tag freqtrade:$TAG_FREQAI ${CACHE_IMAGE}:$TAG_FREQAI

View File

@ -12,6 +12,9 @@ This page provides you some basic concepts on how Freqtrade works and operates.
* **Indicators**: Technical indicators (SMA, EMA, RSI, ...).
* **Limit order**: Limit orders which execute at the defined limit price or better.
* **Market order**: Guaranteed to fill, may move price depending on the order size.
* **Current Profit**: Currently pending (unrealized) profit for this trade. This is mainly used throughout the bot and UI.
* **Realized Profit**: Already realized profit. Only relevant in combination with [partial exits](strategy-callbacks.md#adjust-trade-position) - which also explains the calculation logic for this.
* **Total Profit**: Combined realized and unrealized profit. The relative number (%) is calculated against the total investment in this trade.
## Fee handling
@ -57,10 +60,10 @@ This loop will be repeated again and again until the bot is stopped.
* Load historic data for configured pairlist.
* Calls `bot_start()` once.
* Calls `bot_loop_start()` once.
* Calculate indicators (calls `populate_indicators()` once per pair).
* Calculate entry / exit signals (calls `populate_entry_trend()` and `populate_exit_trend()` once per pair).
* Loops per candle simulating entry and exit points.
* Calls `bot_loop_start()` strategy callback.
* Check for Order timeouts, either via the `unfilledtimeout` configuration, or via `check_entry_timeout()` / `check_exit_timeout()` strategy callbacks.
* Calls `adjust_entry_price()` strategy callback for open entry orders.
* Check for trade entry signals (`enter_long` / `enter_short` columns).

View File

@ -6,8 +6,8 @@ Low level feature engineering is performed in the user strategy within a set of
| Function | Description |
|---------------|-------------|
| `feature_engineering__expand_all()` | This optional function will automatically expand the defined features on the config defined `indicator_periods_candles`, `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`.
| `feature_engineering__expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `include_periods_candles`.
| `feature_engineering_expand_all()` | This optional function will automatically expand the defined features on the config defined `indicator_periods_candles`, `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`.
| `feature_engineering_expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `include_periods_candles`.
| `feature_engineering_standard()` | This optional function will be called once with the dataframe of the base timeframe. This is the final function to be called, which means that the dataframe entering this function will contain all the features and columns from the base asset created by the other `feature_engineering_expand` functions. This function is a good place to do custom exotic feature extractions (e.g. tsfresh). This function is also a good place for any feature that should not be auto-expanded upon (e.g., day of the week).
| `set_freqai_targets()` | Required function to set the targets for the model. All targets must be prepended with `&` to be recognized by the FreqAI internals.
@ -186,7 +186,7 @@ In total, the number of features the user of the presented example strat has cre
All `feature_engineering_*` and `set_freqai_targets()` functions are passed a `metadata` dictionary which contains information about the `pair`, `tf` (timeframe), and `period` that FreqAI is automating for feature building. As such, a user can use `metadata` inside `feature_engineering_*` functions as criteria for blocking/reserving features for certain timeframes, periods, pairs etc.
```py
```python
def feature_engineering_expand_all(self, dataframe, period, metadata, **kwargs):
if metadata["tf"] == "1h":
dataframe["%-roc-period"] = ta.ROC(dataframe, timeperiod=period)

View File

@ -46,7 +46,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
| `outlier_protection_percentage` | Enable to prevent outlier detection methods from discarding too much data. If more than `outlier_protection_percentage` % of points are detected as outliers by the SVM or DBSCAN, FreqAI will log a warning message and ignore outlier detection, i.e., the original dataset will be kept intact. If the outlier protection is triggered, no predictions will be made based on the training dataset. <br> **Datatype:** Float. <br> Default: `30`.
| `reverse_train_test_order` | Split the feature dataset (see below) and use the latest data split for training and test on historical split of the data. This allows the model to be trained up to the most recent data point, while avoiding overfitting. However, you should be careful to understand the unorthodox nature of this parameter before employing it. <br> **Datatype:** Boolean. <br> Default: `False` (no reversal).
| `shuffle_after_split` | Split the data into train and test sets, and then shuffle both sets individually. <br> **Datatype:** Boolean. <br> Default: `False`.
| `buffer_train_data_candles` | Cut `buffer_train_data_candles` off the beginning and end of the training data *after* the indicators were populated. The main example use is when predicting maxima and minima, the argrelextrema function cannot know the maxima/minima at the edges of the timerange. To improve model accuracy, it is best to compute argrelextrema on the full timerange and then use this function to cut off the edges (buffer) by the kernel. In another case, if the targets are set to a shifted price movement, this buffer is unnecessary because the shifted candles at the end of the timerange will be NaN and FreqAI will automatically cut those off of the training dataset.<br> **Datatype:** Boolean. <br> Default: `False`.
| `buffer_train_data_candles` | Cut `buffer_train_data_candles` off the beginning and end of the training data *after* the indicators were populated. The main example use is when predicting maxima and minima, the argrelextrema function cannot know the maxima/minima at the edges of the timerange. To improve model accuracy, it is best to compute argrelextrema on the full timerange and then use this function to cut off the edges (buffer) by the kernel. In another case, if the targets are set to a shifted price movement, this buffer is unnecessary because the shifted candles at the end of the timerange will be NaN and FreqAI will automatically cut those off of the training dataset.<br> **Datatype:** Integer. <br> Default: `0`.
### Data split parameters
@ -84,6 +84,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
| `add_state_info` | Tell FreqAI to include state information in the feature set for training and inferencing. The current state variables include trade duration, current profit, trade position. This is only available in dry/live runs, and is automatically switched to false for backtesting. <br> **Datatype:** bool. <br> Default: `False`.
| `net_arch` | Network architecture which is well described in [`stable_baselines3` doc](https://stable-baselines3.readthedocs.io/en/master/guide/custom_policy.html#examples). In summary: `[<shared layers>, dict(vf=[<non-shared value network layers>], pi=[<non-shared policy network layers>])]`. By default this is set to `[128, 128]`, which defines 2 shared hidden layers with 128 units each.
| `randomize_starting_position` | Randomize the starting point of each episode to avoid overfitting. <br> **Datatype:** bool. <br> Default: `False`.
| `drop_ohlc_from_features` | Do not include the normalized ohlc data in the feature set passed to the agent during training (ohlc will still be used for driving the environment in all cases) <br> **Datatype:** Boolean. <br> **Default:** `False`
### PyTorch parameters

View File

@ -55,7 +55,7 @@ where `ReinforcementLearner` will use the templated `ReinforcementLearner` from
dataframe["&-action"] = 0
```
Most of the function remains the same as for typical Regressors, however, the function above shows how the strategy must pass the raw price data to the agent so that it has access to raw OHLCV in the training environment:
Most of the function remains the same as for typical Regressors, however, the function below shows how the strategy must pass the raw price data to the agent so that it has access to raw OHLCV in the training environment:
```python
def feature_engineering_standard(self, dataframe, **kwargs):
@ -176,9 +176,11 @@ As you begin to modify the strategy and the prediction model, you will quickly r
factor = 100
pair = self.pair.replace(':', '')
# you can use feature values from dataframe
# Assumes the shifted RSI indicator has been generated in the strategy.
rsi_now = self.raw_features[f"%-rsi-period-10_shift-1_{self.pair}_"
rsi_now = self.raw_features[f"%-rsi-period_10_shift-1_{pair}_"
f"{self.config['timeframe']}"].iloc[self._current_tick]
# reward agent for entering trades
@ -246,13 +248,13 @@ FreqAI also provides a built in episodic summary logger called `self.tensorboard
"""
def calculate_reward(self, action: int) -> float:
if not self._is_valid(action):
self.tensorboard_log("is_valid")
self.tensorboard_log("invalid")
return -2
```
!!! Note
The `self.tensorboard_log()` function is designed for tracking incremented objects only i.e. events, actions inside the training environment. If the event of interest is a float, the float can be passed as the second argument e.g. `self.tensorboard_log("float_metric1", 0.23)` would add 0.23 to `float_metric`. In this case you can also disable incrementing using `inc=False` parameter.
The `self.tensorboard_log()` function is designed for tracking incremented objects only i.e. events, actions inside the training environment. If the event of interest is a float, the float can be passed as the second argument e.g. `self.tensorboard_log("float_metric1", 0.23)`. In this case the metric values are not incremented.
### Choosing a base environment

View File

@ -128,6 +128,9 @@ The FreqAI specific parameter `label_period_candles` defines the offset (number
You can choose to adopt a continual learning scheme by setting `"continual_learning": true` in the config. By enabling `continual_learning`, after training an initial model from scratch, subsequent trainings will start from the final model state of the preceding training. This gives the new model a "memory" of the previous state. By default, this is set to `False` which means that all new models are trained from scratch, without input from previous models.
???+ danger "Continual learning enforces a constant parameter space"
Since `continual_learning` means that the model parameter space *cannot* change between trainings, `principal_component_analysis` is automatically disabled when `continual_learning` is enabled. Hint: PCA changes the parameter space and the number of features, learn more about PCA [here](freqai-feature-engineering.md#data-dimensionality-reduction-with-principal-component-analysis).
## Hyperopt
You can hyperopt using the same command as for [typical Freqtrade hyperopt](hyperopt.md):

View File

@ -149,7 +149,7 @@ The below example assumes a timeframe of 1 hour:
* Locks each pair after selling for an additional 5 candles (`CooldownPeriod`), giving other pairs a chance to get filled.
* Stops trading for 4 hours (`4 * 1h candles`) if the last 2 days (`48 * 1h candles`) had 20 trades, which caused a max-drawdown of more than 20%. (`MaxDrawdown`).
* Stops trading if more than 4 stoploss occur for all pairs within a 1 day (`24 * 1h candles`) limit (`StoplossGuard`).
* Locks all pairs that had 4 Trades within the last 6 hours (`6 * 1h candles`) with a combined profit ratio of below 0.02 (<2%) (`LowProfitPairs`).
* Locks all pairs that had 2 Trades within the last 6 hours (`6 * 1h candles`) with a combined profit ratio of below 0.02 (<2%) (`LowProfitPairs`).
* Locks all pairs for 2 candles that had a profit of below 0.01 (<1%) within the last 24h (`24 * 1h candles`), a minimum of 4 trades.
``` python

View File

@ -42,14 +42,14 @@ Enable subscribing to an instance by adding the `external_message_consumer` sect
| `producers` | **Required.** List of producers <br> **Datatype:** Array.
| `producers.name` | **Required.** Name of this producer. This name must be used in calls to `get_producer_pairs()` and `get_producer_df()` if more than one producer is used.<br> **Datatype:** string
| `producers.host` | **Required.** The hostname or IP address from your producer.<br> **Datatype:** string
| `producers.port` | **Required.** The port matching the above host.<br> **Datatype:** string
| `producers.port` | **Required.** The port matching the above host.<br>*Defaults to `8080`.*<br> **Datatype:** Integer
| `producers.secure` | **Optional.** Use ssl in websockets connection. Default False.<br> **Datatype:** string
| `producers.ws_token` | **Required.** `ws_token` as configured on the producer.<br> **Datatype:** string
| | **Optional settings**
| `wait_timeout` | Timeout until we ping again if no message is received. <br>*Defaults to `300`.*<br> **Datatype:** Integer - in seconds.
| `wait_timeout` | Ping timeout <br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `ping_timeout` | Ping timeout <br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `sleep_time` | Sleep time before retrying to connect.<br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `remove_entry_exit_signals` | Remove signal columns from the dataframe (set them to 0) on dataframe receipt.<br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `remove_entry_exit_signals` | Remove signal columns from the dataframe (set them to 0) on dataframe receipt.<br>*Defaults to `False`.*<br> **Datatype:** Boolean.
| `message_size_limit` | Size limit per message<br>*Defaults to `8`.*<br> **Datatype:** Integer - Megabytes.
Instead of (or as well as) calculating indicators in `populate_indicators()` the follower instance listens on the connection to a producer instance's messages (or multiple producer instances in advanced configurations) and requests the producer's most recently analyzed dataframes for each pair in the active whitelist.

View File

@ -1,6 +1,6 @@
markdown==3.3.7
mkdocs==1.4.2
mkdocs-material==9.0.15
mkdocs-material==9.1.5
mdx_truly_sane_lists==1.3
pymdown-extensions==9.9.2
pymdown-extensions==9.10
jinja2==3.1.2

View File

@ -51,7 +51,8 @@ During hyperopt, this runs only once at startup.
## Bot loop start
A simple callback which is called once at the start of every bot throttling iteration (roughly every 5 seconds, unless configured differently).
A simple callback which is called once at the start of every bot throttling iteration in dry/live mode (roughly every 5
seconds, unless configured differently) or once per candle in backtest/hyperopt mode.
This can be used to perform calculations which are pair independent (apply to all pairs), loading of external data, etc.
``` python
@ -61,11 +62,12 @@ class AwesomeStrategy(IStrategy):
# ... populate_* methods
def bot_loop_start(self, **kwargs) -> None:
def bot_loop_start(self, current_time: datetime, **kwargs) -> None:
"""
Called at the start of the bot iteration (one loop).
Might be used to perform pair-independent tasks
(e.g. gather some remote resource for comparison)
:param current_time: datetime object, containing the current datetime
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
"""
if self.config['runmode'].value in ('live', 'dry_run'):
@ -316,11 +318,11 @@ class AwesomeStrategy(IStrategy):
# evaluate highest to lowest, so that highest possible stop is used
if current_profit > 0.40:
return stoploss_from_open(0.25, current_profit, is_short=trade.is_short)
return stoploss_from_open(0.25, current_profit, is_short=trade.is_short, leverage=trade.leverage)
elif current_profit > 0.25:
return stoploss_from_open(0.15, current_profit, is_short=trade.is_short)
return stoploss_from_open(0.15, current_profit, is_short=trade.is_short, leverage=trade.leverage)
elif current_profit > 0.20:
return stoploss_from_open(0.07, current_profit, is_short=trade.is_short)
return stoploss_from_open(0.07, current_profit, is_short=trade.is_short, leverage=trade.leverage)
# return maximum stoploss value, keeping current stoploss price unchanged
return 1

View File

@ -881,7 +881,7 @@ All columns of the informative dataframe will be available on the returning data
### *stoploss_from_open()*
Stoploss values returned from `custom_stoploss` must specify a percentage relative to `current_rate`, but sometimes you may want to specify a stoploss relative to the open price instead. `stoploss_from_open()` is a helper function to calculate a stoploss value that can be returned from `custom_stoploss` which will be equivalent to the desired percentage above the open price.
Stoploss values returned from `custom_stoploss` must specify a percentage relative to `current_rate`, but sometimes you may want to specify a stoploss relative to the entry point instead. `stoploss_from_open()` is a helper function to calculate a stoploss value that can be returned from `custom_stoploss` which will be equivalent to the desired trade profit above the entry point.
??? Example "Returning a stoploss relative to the open price from the custom stoploss function"
@ -889,6 +889,8 @@ Stoploss values returned from `custom_stoploss` must specify a percentage relati
If we want a stop price at 7% above the open price we can call `stoploss_from_open(0.07, current_profit, False)` which will return `0.1157024793`. 11.57% below $121 is $107, which is the same as 7% above $100.
This function will consider leverage - so at 10x leverage, the actual stoploss would be 0.7% above $100 (0.7% * 10x = 7%).
``` python
@ -907,7 +909,7 @@ Stoploss values returned from `custom_stoploss` must specify a percentage relati
# once the profit has risen above 10%, keep the stoploss at 7% above the open price
if current_profit > 0.10:
return stoploss_from_open(0.07, current_profit, is_short=trade.is_short)
return stoploss_from_open(0.07, current_profit, is_short=trade.is_short, leverage=trade.leverage)
return 1
@ -1039,10 +1041,9 @@ from datetime import timedelta, datetime, timezone
# Within populate indicators (or populate_buy):
if self.config['runmode'].value in ('live', 'dry_run'):
# fetch closed trades for the last 2 days
trades = Trade.get_trades([Trade.pair == metadata['pair'],
Trade.open_date > datetime.utcnow() - timedelta(days=2),
Trade.is_open.is_(False),
]).all()
trades = Trade.get_trades_proxy(
pair=metadata['pair'], is_open=False,
open_date=datetime.now(timezone.utc) - timedelta(days=2))
# Analyze the conditions you'd like to lock the pair .... will probably be different for every strategy
sumprofit = sum(trade.close_profit for trade in trades)
if sumprofit < 0:

View File

@ -955,3 +955,47 @@ Print trades with id 2 and 3 as json
``` bash
freqtrade show-trades --db-url sqlite:///tradesv3.sqlite --trade-ids 2 3 --print-json
```
### Strategy-Updater
Updates listed strategies or all strategies within the strategies folder to be v3 compliant.
If the command runs without --strategy-list then all strategies inside the strategies folder will be converted.
Your original strategy will remain available in the `user_data/strategies_orig_updater/` directory.
!!! Warning "Conversion results"
Strategy updater will work on a "best effort" approach. Please do your due diligence and verify the results of the conversion.
We also recommend to run a python formatter (e.g. `black`) to format results in a sane manner.
```
usage: freqtrade strategy-updater [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH]
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
options:
-h, --help show this help message and exit
--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]
Provide a space-separated list of strategies to
backtest. Please note that timeframe needs to be set
either in config or via command line. When using this
together with `--export trades`, the strategy-name is
injected into the filename (so `backtest-data.json`
becomes `backtest-data-SampleStrategy.json`
Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--logfile FILE, --log-file FILE
Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more
details.
-V, --version show program's version number and exit
-c PATH, --config PATH
Specify configuration file (default:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
```

View File

@ -1,5 +1,5 @@
""" Freqtrade bot """
__version__ = '2023.3.dev'
__version__ = '2023.4.dev'
if 'dev' in __version__:
from pathlib import Path

View File

@ -22,5 +22,6 @@ from freqtrade.commands.optimize_commands import (start_backtesting, start_backt
start_edge, start_hyperopt)
from freqtrade.commands.pairlist_commands import start_test_pairlist
from freqtrade.commands.plot_commands import start_plot_dataframe, start_plot_profit
from freqtrade.commands.strategy_utils_commands import start_strategy_update
from freqtrade.commands.trade_commands import start_trading
from freqtrade.commands.webserver_commands import start_webserver

View File

@ -40,8 +40,8 @@ def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[s
if (not Path(signals_file).exists()):
raise OperationalException(
(f"Cannot find latest backtest signals file: {signals_file}."
"Run backtesting with `--export signals`.")
f"Cannot find latest backtest signals file: {signals_file}."
"Run backtesting with `--export signals`."
)
return config

View File

@ -111,10 +111,13 @@ ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
"list-markets", "list-pairs", "list-strategies", "list-freqaimodels",
"list-data", "hyperopt-list", "hyperopt-show", "backtest-filter",
"plot-dataframe", "plot-profit", "show-trades", "trades-to-ohlcv"]
"plot-dataframe", "plot-profit", "show-trades", "trades-to-ohlcv",
"strategy-updater"]
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-strategy"]
ARGS_STRATEGY_UTILS = ["strategy_list", "strategy_path", "recursive_strategy_search"]
class Arguments:
"""
@ -198,8 +201,8 @@ class Arguments:
start_list_freqAI_models, start_list_markets,
start_list_strategies, start_list_timeframes,
start_new_config, start_new_strategy, start_plot_dataframe,
start_plot_profit, start_show_trades, start_test_pairlist,
start_trading, start_webserver)
start_plot_profit, start_show_trades, start_strategy_update,
start_test_pairlist, start_trading, start_webserver)
subparsers = self.parser.add_subparsers(dest='command',
# Use custom message when no subhandler is added
@ -440,3 +443,11 @@ class Arguments:
parents=[_common_parser])
webserver_cmd.set_defaults(func=start_webserver)
self._build_args(optionlist=ARGS_WEBSERVER, parser=webserver_cmd)
# Add strategy_updater subcommand
strategy_updater_cmd = subparsers.add_parser('strategy-updater',
help='updates outdated strategy'
'files to the current version',
parents=[_common_parser])
strategy_updater_cmd.set_defaults(func=start_strategy_update)
self._build_args(optionlist=ARGS_STRATEGY_UTILS, parser=strategy_updater_cmd)

View File

@ -204,11 +204,14 @@ def start_list_data(args: Dict[str, Any]) -> None:
pair, timeframe, candle_type,
*dhc.ohlcv_data_min_max(pair, timeframe, candle_type)
) for pair, timeframe, candle_type in paircombs]
print(tabulate([
(pair, timeframe, candle_type,
start.strftime(DATETIME_PRINT_FORMAT),
end.strftime(DATETIME_PRINT_FORMAT))
for pair, timeframe, candle_type, start, end in paircombs1
for pair, timeframe, candle_type, start, end in sorted(
paircombs1,
key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2]))
],
headers=("Pair", "Timeframe", "Type", 'From', 'To'),
tablefmt='psql', stralign='right'))

View File

@ -1,7 +1,7 @@
import logging
from typing import Any, Dict
from sqlalchemy import func
from sqlalchemy import func, select
from freqtrade.configuration.config_setup import setup_utils_configuration
from freqtrade.enums import RunMode
@ -20,7 +20,7 @@ def start_convert_db(args: Dict[str, Any]) -> None:
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
init_db(config['db_url'])
session_target = Trade._session
session_target = Trade.session
init_db(config['db_url_from'])
logger.info("Starting db migration.")
@ -36,16 +36,16 @@ def start_convert_db(args: Dict[str, Any]) -> None:
session_target.commit()
for pairlock in PairLock.query:
for pairlock in PairLock.get_all_locks():
pairlock_count += 1
make_transient(pairlock)
session_target.add(pairlock)
session_target.commit()
# Update sequences
max_trade_id = session_target.query(func.max(Trade.id)).scalar()
max_order_id = session_target.query(func.max(Order.id)).scalar()
max_pairlock_id = session_target.query(func.max(PairLock.id)).scalar()
max_trade_id = session_target.scalar(select(func.max(Trade.id)))
max_order_id = session_target.scalar(select(func.max(Order.id)))
max_pairlock_id = session_target.scalar(select(func.max(PairLock.id)))
set_sequence_ids(session_target.get_bind(),
trade_id=max_trade_id,

View File

@ -0,0 +1,55 @@
import logging
import sys
import time
from pathlib import Path
from typing import Any, Dict
from freqtrade.configuration import setup_utils_configuration
from freqtrade.enums import RunMode
from freqtrade.resolvers import StrategyResolver
from freqtrade.strategy.strategyupdater import StrategyUpdater
logger = logging.getLogger(__name__)
def start_strategy_update(args: Dict[str, Any]) -> None:
"""
Start the strategy updating script
:param args: Cli args from Arguments()
:return: None
"""
if sys.version_info == (3, 8): # pragma: no cover
sys.exit("Freqtrade strategy updater requires Python version >= 3.9")
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
strategy_objs = StrategyResolver.search_all_objects(
config, enum_failed=False, recursive=config.get('recursive_strategy_search', False))
filtered_strategy_objs = []
if args['strategy_list']:
filtered_strategy_objs = [
strategy_obj for strategy_obj in strategy_objs
if strategy_obj['name'] in args['strategy_list']
]
else:
# Use all available entries.
filtered_strategy_objs = strategy_objs
processed_locations = set()
for strategy_obj in filtered_strategy_objs:
if strategy_obj['location'] not in processed_locations:
processed_locations.add(strategy_obj['location'])
start_conversion(strategy_obj, config)
def start_conversion(strategy_obj, config):
print(f"Conversion of {Path(strategy_obj['location']).name} started.")
instance_strategy_updater = StrategyUpdater()
start = time.perf_counter()
instance_strategy_updater.start(config, strategy_obj)
elapsed = time.perf_counter() - start
print(f"Conversion of {Path(strategy_obj['location']).name} took {elapsed:.1f} seconds.")

View File

@ -27,10 +27,7 @@ def _extend_validator(validator_class):
if 'default' in subschema:
instance.setdefault(prop, subschema['default'])
for error in validate_properties(
validator, properties, instance, schema,
):
yield error
yield from validate_properties(validator, properties, instance, schema)
return validators.extend(
validator_class, {'properties': set_defaults}

View File

@ -36,9 +36,10 @@ AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList', 'ProducerPairList', '
'AgeFilter', 'OffsetFilter', 'PerformanceFilter',
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
AVAILABLE_PROTECTIONS = ['CooldownPeriod', 'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
AVAILABLE_DATAHANDLERS_TRADES = ['json', 'jsongz', 'hdf5']
AVAILABLE_DATAHANDLERS = AVAILABLE_DATAHANDLERS_TRADES + ['feather', 'parquet']
AVAILABLE_PROTECTIONS = ['CooldownPeriod',
'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
AVAILABLE_DATAHANDLERS_TRADES = ['json', 'jsongz', 'hdf5', 'feather']
AVAILABLE_DATAHANDLERS = AVAILABLE_DATAHANDLERS_TRADES + ['parquet']
BACKTEST_BREAKDOWNS = ['day', 'week', 'month']
BACKTEST_CACHE_AGE = ['none', 'day', 'week', 'month']
BACKTEST_CACHE_DEFAULT = 'day'
@ -588,6 +589,7 @@ CONF_SCHEMA = {
"rl_config": {
"type": "object",
"properties": {
"drop_ohlc_from_features": {"type": "boolean", "default": False},
"train_cycles": {"type": "integer"},
"max_trade_duration_candles": {"type": "integer"},
"add_state_info": {"type": "boolean", "default": False},
@ -596,7 +598,7 @@ CONF_SCHEMA = {
"model_type": {"type": "string", "default": "PPO"},
"policy_type": {"type": "string", "default": "MlpPolicy"},
"net_arch": {"type": "array", "default": [128, 128]},
"randomize_startinng_position": {"type": "boolean", "default": False},
"randomize_starting_position": {"type": "boolean", "default": False},
"model_reward_parameters": {
"type": "object",
"properties": {

View File

@ -373,7 +373,7 @@ def load_trades_from_db(db_url: str, strategy: Optional[str] = None) -> pd.DataF
filters = []
if strategy:
filters.append(Trade.strategy == strategy)
trades = trade_list_to_dataframe(Trade.get_trades(filters).all())
trades = trade_list_to_dataframe(list(Trade.get_trades(filters).all()))
return trades

View File

@ -21,6 +21,7 @@ from freqtrade.exchange import Exchange, timeframe_to_seconds
from freqtrade.exchange.types import OrderBook
from freqtrade.misc import append_candles_to_dataframe
from freqtrade.rpc import RPCManager
from freqtrade.rpc.rpc_types import RPCAnalyzedDFMsg
from freqtrade.util import PeriodicCache
@ -118,8 +119,7 @@ class DataProvider:
:param new_candle: This is a new candle
"""
if self.__rpc:
self.__rpc.send_msg(
{
msg: RPCAnalyzedDFMsg = {
'type': RPCMessageType.ANALYZED_DF,
'data': {
'key': pair_key,
@ -127,7 +127,7 @@ class DataProvider:
'la': datetime.now(timezone.utc)
}
}
)
self.__rpc.send_msg(msg)
if new_candle:
self.__rpc.send_msg({
'type': RPCMessageType.NEW_CANDLE,

View File

@ -4,7 +4,7 @@ from typing import Optional
from pandas import DataFrame, read_feather, to_datetime
from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList
from freqtrade.enums import CandleType
from .idatahandler import IDataHandler
@ -92,12 +92,11 @@ class FeatherDataHandler(IDataHandler):
:param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
"""
# filename = self._pair_trades_filename(self._datadir, pair)
filename = self._pair_trades_filename(self._datadir, pair)
self.create_dir_if_needed(filename)
raise NotImplementedError()
# array = pa.array(data)
# array
# feather.write_feather(data, filename)
tradesdata = DataFrame(data, columns=DEFAULT_TRADES_COLUMNS)
tradesdata.to_feather(filename, compression_level=9, compression='lz4')
def trades_append(self, pair: str, data: TradeList):
"""
@ -116,14 +115,13 @@ class FeatherDataHandler(IDataHandler):
:param timerange: Timerange to load trades for - currently not implemented
:return: List of trades
"""
raise NotImplementedError()
# filename = self._pair_trades_filename(self._datadir, pair)
# tradesdata = misc.file_load_json(filename)
filename = self._pair_trades_filename(self._datadir, pair)
if not filename.exists():
return []
# if not tradesdata:
# return []
tradesdata = read_feather(filename)
# return tradesdata
return tradesdata.values.tolist()
@classmethod
def _get_file_extension(cls):

View File

@ -4,6 +4,7 @@ from enum import Enum
class RPCMessageType(str, Enum):
STATUS = 'status'
WARNING = 'warning'
EXCEPTION = 'exception'
STARTUP = 'startup'
ENTRY = 'entry'

View File

@ -8,15 +8,15 @@ from freqtrade.exchange.bitpanda import Bitpanda
from freqtrade.exchange.bittrex import Bittrex
from freqtrade.exchange.bybit import Bybit
from freqtrade.exchange.coinbasepro import Coinbasepro
from freqtrade.exchange.exchange_utils import (amount_to_contract_precision, amount_to_contracts,
amount_to_precision, available_exchanges,
ccxt_exchanges, contracts_to_amount,
date_minus_candles, is_exchange_known_ccxt,
market_is_active, price_to_precision,
timeframe_to_minutes, timeframe_to_msecs,
timeframe_to_next_date, timeframe_to_prev_date,
timeframe_to_seconds, validate_exchange,
validate_exchanges)
from freqtrade.exchange.exchange_utils import (ROUND_DOWN, ROUND_UP, amount_to_contract_precision,
amount_to_contracts, amount_to_precision,
available_exchanges, ccxt_exchanges,
contracts_to_amount, date_minus_candles,
is_exchange_known_ccxt, market_is_active,
price_to_precision, timeframe_to_minutes,
timeframe_to_msecs, timeframe_to_next_date,
timeframe_to_prev_date, timeframe_to_seconds,
validate_exchange, validate_exchanges)
from freqtrade.exchange.gate import Gate
from freqtrade.exchange.hitbtc import Hitbtc
from freqtrade.exchange.huobi import Huobi

View File

@ -23,7 +23,7 @@ class Binance(Exchange):
_ft_has: Dict = {
"stoploss_on_exchange": True,
"stoploss_order_types": {"limit": "stop_loss_limit"},
"order_time_in_force": ['GTC', 'FOK', 'IOC'],
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
"ohlcv_candle_limit": 1000,
"trades_pagination": "id",
"trades_pagination_arg": "fromId",
@ -31,6 +31,7 @@ class Binance(Exchange):
}
_ft_has_futures: Dict = {
"stoploss_order_types": {"limit": "stop", "market": "stop_market"},
"order_time_in_force": ["GTC", "FOK", "IOC"],
"tickers_have_price": False,
"floor_leverage": True,
"stop_price_type_field": "workingType",

File diff suppressed because it is too large Load Diff

View File

@ -114,7 +114,7 @@ class Bybit(Exchange):
data = [[x['timestamp'], x['fundingRate'], 0, 0, 0, 0] for x in data]
return data
def _lev_prep(self, pair: str, leverage: float, side: BuySell):
def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False):
if self.trading_mode != TradingMode.SPOT:
params = {'leverage': leverage}
self.set_margin_mode(pair, self.margin_mode, accept_fail=True, params=params)

View File

@ -30,13 +30,14 @@ from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFun
RetryableOrderError, TemporaryError)
from freqtrade.exchange.common import (API_FETCH_ORDER_RETRY_COUNT, remove_credentials, retrier,
retrier_async)
from freqtrade.exchange.exchange_utils import (CcxtModuleType, amount_to_contract_precision,
amount_to_contracts, amount_to_precision,
contracts_to_amount, date_minus_candles,
is_exchange_known_ccxt, market_is_active,
price_to_precision, timeframe_to_minutes,
timeframe_to_msecs, timeframe_to_next_date,
timeframe_to_prev_date, timeframe_to_seconds)
from freqtrade.exchange.exchange_utils import (ROUND, ROUND_DOWN, ROUND_UP, CcxtModuleType,
amount_to_contract_precision, amount_to_contracts,
amount_to_precision, contracts_to_amount,
date_minus_candles, is_exchange_known_ccxt,
market_is_active, price_to_precision,
timeframe_to_minutes, timeframe_to_msecs,
timeframe_to_next_date, timeframe_to_prev_date,
timeframe_to_seconds)
from freqtrade.exchange.types import OHLCVResponse, OrderBook, Ticker, Tickers
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
safe_value_fallback2)
@ -59,8 +60,8 @@ class Exchange:
# or by specifying them in the configuration.
_ft_has_default: Dict = {
"stoploss_on_exchange": False,
"stop_price_param": "stopPrice",
"order_time_in_force": ["GTC"],
"time_in_force_parameter": "timeInForce",
"ohlcv_params": {},
"ohlcv_candle_limit": 500,
"ohlcv_has_history": True, # Some exchanges (Kraken) don't provide history via ohlcv
@ -69,6 +70,7 @@ class Exchange:
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
"ohlcv_volume_currency": "base", # "base" or "quote"
"tickers_have_quoteVolume": True,
"tickers_have_bid_ask": True, # bid / ask empty for fetch_tickers
"tickers_have_price": True,
"trades_pagination": "time", # Possible are "time" or "id"
"trades_pagination_arg": "since",
@ -80,6 +82,8 @@ class Exchange:
"fee_cost_in_contracts": False, # Fee cost needs contract conversion
"needs_trading_fees": False, # use fetch_trading_fees to cache fees
"order_props_in_contracts": ['amount', 'cost', 'filled', 'remaining'],
# Override createMarketBuyOrderRequiresPrice where ccxt has it wrong
"marketOrderRequiresPrice": False,
}
_ft_has: Dict = {}
_ft_has_futures: Dict = {}
@ -205,6 +209,8 @@ class Exchange:
and self._api_async.session):
logger.debug("Closing async ccxt session.")
self.loop.run_until_complete(self._api_async.close())
if self.loop and not self.loop.is_closed():
self.loop.close()
def validate_config(self, config):
# Check if timeframe is available
@ -730,12 +736,14 @@ class Exchange:
"""
return amount_to_precision(amount, self.get_precision_amount(pair), self.precisionMode)
def price_to_precision(self, pair: str, price: float) -> float:
def price_to_precision(self, pair: str, price: float, *, rounding_mode: int = ROUND) -> float:
"""
Returns the price rounded up to the precision the Exchange accepts.
Rounds up
Returns the price rounded to the precision the Exchange accepts.
The default price_rounding_mode in conf is ROUND.
For stoploss calculations, must use ROUND_UP for longs, and ROUND_DOWN for shorts.
"""
return price_to_precision(price, self.get_precision_price(pair), self.precisionMode)
return price_to_precision(price, self.get_precision_price(pair),
self.precisionMode, rounding_mode=rounding_mode)
def price_get_one_pip(self, pair: str, price: float) -> float:
"""
@ -758,12 +766,12 @@ class Exchange:
return self._get_stake_amount_limit(pair, price, stoploss, 'min', leverage)
def get_max_pair_stake_amount(self, pair: str, price: float, leverage: float = 1.0) -> float:
max_stake_amount = self._get_stake_amount_limit(pair, price, 0.0, 'max')
max_stake_amount = self._get_stake_amount_limit(pair, price, 0.0, 'max', leverage)
if max_stake_amount is None:
# * Should never be executed
raise OperationalException(f'{self.name}.get_max_pair_stake_amount should'
'never set max_stake_amount to None')
return max_stake_amount / leverage
return max_stake_amount
def _get_stake_amount_limit(
self,
@ -781,43 +789,41 @@ class Exchange:
except KeyError:
raise ValueError(f"Can't get market information for symbol {pair}")
if isMin:
# reserve some percent defined in config (5% default) + stoploss
margin_reserve: float = 1.0 + self._config.get('amount_reserve_percent',
DEFAULT_AMOUNT_RESERVE_PERCENT)
stoploss_reserve = (
margin_reserve / (1 - abs(stoploss)) if abs(stoploss) != 1 else 1.5
)
# it should not be more than 50%
stoploss_reserve = max(min(stoploss_reserve, 1.5), 1)
else:
margin_reserve = 1.0
stoploss_reserve = 1.0
stake_limits = []
limits = market['limits']
if (limits['cost'][limit] is not None):
stake_limits.append(
self._contracts_to_amount(
pair,
limits['cost'][limit]
)
self._contracts_to_amount(pair, limits['cost'][limit]) * stoploss_reserve
)
if (limits['amount'][limit] is not None):
stake_limits.append(
self._contracts_to_amount(
pair,
limits['amount'][limit] * price
)
self._contracts_to_amount(pair, limits['amount'][limit]) * price * margin_reserve
)
if not stake_limits:
return None if isMin else float('inf')
# reserve some percent defined in config (5% default) + stoploss
amount_reserve_percent = 1.0 + self._config.get('amount_reserve_percent',
DEFAULT_AMOUNT_RESERVE_PERCENT)
amount_reserve_percent = (
amount_reserve_percent / (1 - abs(stoploss)) if abs(stoploss) != 1 else 1.5
)
# it should not be more than 50%
amount_reserve_percent = max(min(amount_reserve_percent, 1.5), 1)
# The value returned should satisfy both limits: for amount (base currency) and
# for cost (quote, stake currency), so max() is used here.
# See also #2575 at github.
return self._get_stake_amount_considering_leverage(
max(stake_limits) * amount_reserve_percent,
max(stake_limits) if isMin else min(stake_limits),
leverage or 1.0
) if isMin else min(stake_limits)
)
def _get_stake_amount_considering_leverage(self, stake_amount: float, leverage: float) -> float:
"""
@ -1018,10 +1024,10 @@ class Exchange:
# Order handling
def _lev_prep(self, pair: str, leverage: float, side: BuySell):
def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False):
if self.trading_mode != TradingMode.SPOT:
self.set_margin_mode(pair, self.margin_mode)
self._set_leverage(leverage, pair)
self.set_margin_mode(pair, self.margin_mode, accept_fail)
self._set_leverage(leverage, pair, accept_fail)
def _get_params(
self,
@ -1033,12 +1039,18 @@ class Exchange:
) -> Dict:
params = self._params.copy()
if time_in_force != 'GTC' and ordertype != 'market':
param = self._ft_has.get('time_in_force_parameter', '')
params.update({param: time_in_force.upper()})
params.update({'timeInForce': time_in_force.upper()})
if reduceOnly:
params.update({'reduceOnly': True})
return params
def _order_needs_price(self, ordertype: str) -> bool:
return (
ordertype != 'market'
or self._api.options.get("createMarketBuyOrderRequiresPrice", False)
or self._ft_has.get('marketOrderRequiresPrice', False)
)
def create_order(
self,
*,
@ -1061,8 +1073,7 @@ class Exchange:
try:
# Set the precision for amount and price(rate) as accepted by the exchange
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
needs_price = (ordertype != 'market'
or self._api.options.get("createMarketBuyOrderRequiresPrice", False))
needs_price = self._order_needs_price(ordertype)
rate_for_order = self.price_to_precision(pair, rate) if needs_price else None
if not reduceOnly:
@ -1086,7 +1097,7 @@ class Exchange:
f'Tried to {side} amount {amount} at rate {rate}.'
f'Message: {e}') from e
except ccxt.InvalidOrder as e:
raise ExchangeError(
raise InvalidOrderException(
f'Could not create {ordertype} {side} order on market {pair}. '
f'Tried to {side} amount {amount} at rate {rate}. '
f'Message: {e}') from e
@ -1105,11 +1116,11 @@ class Exchange:
"""
if not self._ft_has.get('stoploss_on_exchange'):
raise OperationalException(f"stoploss is not implemented for {self.name}.")
price_param = self._ft_has['stop_price_param']
return (
order.get('stopPrice', None) is None
or ((side == "sell" and stop_loss > float(order['stopPrice'])) or
(side == "buy" and stop_loss < float(order['stopPrice'])))
order.get(price_param, None) is None
or ((side == "sell" and stop_loss > float(order[price_param])) or
(side == "buy" and stop_loss < float(order[price_param])))
)
def _get_stop_order_type(self, user_order_type) -> Tuple[str, str]:
@ -1136,14 +1147,21 @@ class Exchange:
"sell" else (stop_price >= limit_rate))
# Ensure rate is less than stop price
if bad_stop_price:
raise OperationalException(
'In stoploss limit order, stop price should be more than limit price')
# This can for example happen if the stop / liquidation price is set to 0
# Which is possible if a market-order closes right away.
# The InvalidOrderException will bubble up to exit_positions, where it will be
# handled gracefully.
raise InvalidOrderException(
"In stoploss limit order, stop price should be more than limit price. "
f"Stop price: {stop_price}, Limit price: {limit_rate}, "
f"Limit Price pct: {limit_price_pct}"
)
return limit_rate
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
params = self._params.copy()
# Verify if stopPrice works for your exchange!
params.update({'stopPrice': stop_price})
# Verify if stopPrice works for your exchange, else configure stop_price_param
params.update({self._ft_has['stop_price_param']: stop_price})
return params
@retrier(retries=0)
@ -1169,12 +1187,12 @@ class Exchange:
user_order_type = order_types.get('stoploss', 'market')
ordertype, user_order_type = self._get_stop_order_type(user_order_type)
stop_price_norm = self.price_to_precision(pair, stop_price)
round_mode = ROUND_DOWN if side == 'buy' else ROUND_UP
stop_price_norm = self.price_to_precision(pair, stop_price, rounding_mode=round_mode)
limit_rate = None
if user_order_type == 'limit':
limit_rate = self._get_stop_limit_rate(stop_price, order_types, side)
limit_rate = self.price_to_precision(pair, limit_rate)
limit_rate = self.price_to_precision(pair, limit_rate, rounding_mode=round_mode)
if self._config['dry_run']:
dry_order = self.create_dry_run_order(
@ -1200,7 +1218,7 @@ class Exchange:
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
self._lev_prep(pair, leverage, side)
self._lev_prep(pair, leverage, side, accept_fail=True)
order = self._api.create_order(symbol=pair, type=ordertype, side=side,
amount=amount, price=limit_rate, params=params)
self._log_exchange_response('create_stoploss_order', order)
@ -2525,7 +2543,6 @@ class Exchange:
self,
leverage: float,
pair: Optional[str] = None,
trading_mode: Optional[TradingMode] = None,
accept_fail: bool = False,
):
"""
@ -2543,7 +2560,7 @@ class Exchange:
self._log_exchange_response('set_leverage', res)
except ccxt.DDoSProtection as e:
raise DDosProtection(e) from e
except ccxt.BadRequest as e:
except (ccxt.BadRequest, ccxt.InsufficientFunds) as e:
if not accept_fail:
raise TemporaryError(
f'Could not set leverage due to {e.__class__.__name__}. Message: {e}') from e
@ -2754,10 +2771,10 @@ class Exchange:
raise OperationalException(
f"{self.name} does not support {self.margin_mode} {self.trading_mode}")
isolated_liq = None
liquidation_price = None
if self._config['dry_run'] or not self.exchange_has("fetchPositions"):
isolated_liq = self.dry_run_liquidation_price(
liquidation_price = self.dry_run_liquidation_price(
pair=pair,
open_rate=open_rate,
is_short=is_short,
@ -2772,16 +2789,16 @@ class Exchange:
positions = self.fetch_positions(pair)
if len(positions) > 0:
pos = positions[0]
isolated_liq = pos['liquidationPrice']
liquidation_price = pos['liquidationPrice']
if isolated_liq is not None:
buffer_amount = abs(open_rate - isolated_liq) * self.liquidation_buffer
isolated_liq = (
isolated_liq - buffer_amount
if liquidation_price is not None:
buffer_amount = abs(open_rate - liquidation_price) * self.liquidation_buffer
liquidation_price_buffer = (
liquidation_price - buffer_amount
if is_short else
isolated_liq + buffer_amount
liquidation_price + buffer_amount
)
return isolated_liq
return max(liquidation_price_buffer, 0.0)
else:
return None

View File

@ -2,11 +2,12 @@
Exchange support utils
"""
from datetime import datetime, timedelta, timezone
from math import ceil
from math import ceil, floor
from typing import Any, Dict, List, Optional, Tuple
import ccxt
from ccxt import ROUND_DOWN, ROUND_UP, TICK_SIZE, TRUNCATE, decimal_to_precision
from ccxt import (DECIMAL_PLACES, ROUND, ROUND_DOWN, ROUND_UP, SIGNIFICANT_DIGITS, TICK_SIZE,
TRUNCATE, decimal_to_precision)
from freqtrade.exchange.common import BAD_EXCHANGES, EXCHANGE_HAS_OPTIONAL, EXCHANGE_HAS_REQUIRED
from freqtrade.util import FtPrecise
@ -219,35 +220,51 @@ def amount_to_contract_precision(
return amount
def price_to_precision(price: float, price_precision: Optional[float],
precisionMode: Optional[int]) -> float:
def price_to_precision(
price: float,
price_precision: Optional[float],
precisionMode: Optional[int],
*,
rounding_mode: int = ROUND,
) -> float:
"""
Returns the price rounded up to the precision the Exchange accepts.
Returns the price rounded to the precision the Exchange accepts.
Partial Re-implementation of ccxt internal method decimal_to_precision(),
which does not support rounding up
which does not support rounding up.
For stoploss calculations, must use ROUND_UP for longs, and ROUND_DOWN for shorts.
TODO: If ccxt supports ROUND_UP for decimal_to_precision(), we could remove this and
align with amount_to_precision().
!!! Rounds up
:param price: price to convert
:param price_precision: price precision to use. Used from markets[pair]['precision']['price']
:param precisionMode: precision mode to use. Should be used from precisionMode
one of ccxt's DECIMAL_PLACES, SIGNIFICANT_DIGITS, or TICK_SIZE
:param rounding_mode: rounding mode to use. Defaults to ROUND
:return: price rounded up to the precision the Exchange accepts
"""
if price_precision is not None and precisionMode is not None:
# price = float(decimal_to_precision(price, rounding_mode=ROUND,
# precision=price_precision,
# counting_mode=self.precisionMode,
# ))
if precisionMode == TICK_SIZE:
if rounding_mode == ROUND:
ticks = price / price_precision
rounded_ticks = round(ticks)
return rounded_ticks * price_precision
precision = FtPrecise(price_precision)
price_str = FtPrecise(price)
missing = price_str % precision
if not missing == FtPrecise("0"):
price = round(float(str(price_str - missing + precision)), 14)
else:
symbol_prec = price_precision
big_price = price * pow(10, symbol_prec)
price = ceil(big_price) / pow(10, symbol_prec)
return round(float(str(price_str - missing + precision)), 14)
return price
elif precisionMode in (SIGNIFICANT_DIGITS, DECIMAL_PLACES):
ndigits = round(price_precision)
if rounding_mode == ROUND:
return round(price, ndigits)
ticks = price * (10**ndigits)
if rounding_mode == ROUND_UP:
return ceil(ticks) / (10**ndigits)
if rounding_mode == TRUNCATE:
return int(ticks) / (10**ndigits)
if rounding_mode == ROUND_DOWN:
return floor(ticks) / (10**ndigits)
raise ValueError(f"Unknown rounding_mode {rounding_mode}")
raise ValueError(f"Unknown precisionMode {precisionMode}")
return price

View File

@ -5,7 +5,6 @@ from typing import Any, Dict, List, Optional, Tuple
from freqtrade.constants import BuySell
from freqtrade.enums import MarginMode, PriceType, TradingMode
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import Exchange
from freqtrade.misc import safe_value_fallback2
@ -28,10 +27,13 @@ class Gate(Exchange):
"order_time_in_force": ['GTC', 'IOC'],
"stoploss_order_types": {"limit": "limit"},
"stoploss_on_exchange": True,
"marketOrderRequiresPrice": True,
}
_ft_has_futures: Dict = {
"needs_trading_fees": True,
"marketOrderRequiresPrice": False,
"tickers_have_bid_ask": False,
"fee_cost_in_contracts": False, # Set explicitly to false for clarity
"order_props_in_contracts": ['amount', 'filled', 'remaining'],
"stop_price_type_field": "price_type",
@ -49,14 +51,6 @@ class Gate(Exchange):
(TradingMode.FUTURES, MarginMode.ISOLATED)
]
def validate_ordertypes(self, order_types: Dict) -> None:
if self.trading_mode != TradingMode.FUTURES:
if any(v == 'market' for k, v in order_types.items()):
raise OperationalException(
f'Exchange {self.name} does not support market orders.')
super().validate_stop_ordertypes(order_types)
def _get_params(
self,
side: BuySell,
@ -74,8 +68,7 @@ class Gate(Exchange):
)
if ordertype == 'market' and self.trading_mode == TradingMode.FUTURES:
params['type'] = 'market'
param = self._ft_has.get('time_in_force_parameter', '')
params.update({param: 'IOC'})
params.update({'timeInForce': 'IOC'})
return params
def get_trades_for_order(self, order_id: str, pair: str, since: datetime,

View File

@ -12,6 +12,7 @@ from freqtrade.exceptions import (DDosProtection, InsufficientFundsError, Invali
OperationalException, TemporaryError)
from freqtrade.exchange import Exchange
from freqtrade.exchange.common import retrier
from freqtrade.exchange.exchange_utils import ROUND_DOWN, ROUND_UP
from freqtrade.exchange.types import Tickers
@ -109,6 +110,7 @@ class Kraken(Exchange):
if self.trading_mode == TradingMode.FUTURES:
params.update({'reduceOnly': True})
round_mode = ROUND_DOWN if side == 'buy' else ROUND_UP
if order_types.get('stoploss', 'market') == 'limit':
ordertype = "stop-loss-limit"
limit_price_pct = order_types.get('stoploss_on_exchange_limit_ratio', 0.99)
@ -116,11 +118,11 @@ class Kraken(Exchange):
limit_rate = stop_price * limit_price_pct
else:
limit_rate = stop_price * (2 - limit_price_pct)
params['price2'] = self.price_to_precision(pair, limit_rate)
params['price2'] = self.price_to_precision(pair, limit_rate, rounding_mode=round_mode)
else:
ordertype = "stop-loss"
stop_price = self.price_to_precision(pair, stop_price)
stop_price = self.price_to_precision(pair, stop_price, rounding_mode=round_mode)
if self._config['dry_run']:
dry_order = self.create_dry_run_order(
@ -158,7 +160,6 @@ class Kraken(Exchange):
self,
leverage: float,
pair: Optional[str] = None,
trading_mode: Optional[TradingMode] = None,
accept_fail: bool = False,
):
"""

View File

@ -1,14 +1,16 @@
import logging
from typing import Dict, List, Optional, Tuple
from typing import Any, Dict, List, Optional, Tuple
import ccxt
from freqtrade.constants import BuySell
from freqtrade.enums import CandleType, MarginMode, TradingMode
from freqtrade.enums.pricetype import PriceType
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
from freqtrade.exceptions import (DDosProtection, OperationalException, RetryableOrderError,
TemporaryError)
from freqtrade.exchange import Exchange, date_minus_candles
from freqtrade.exchange.common import retrier
from freqtrade.misc import safe_value_fallback2
logger = logging.getLogger(__name__)
@ -24,11 +26,14 @@ class Okx(Exchange):
"ohlcv_candle_limit": 100, # Warning, special case with data prior to X months
"mark_ohlcv_timeframe": "4h",
"funding_fee_timeframe": "8h",
"stoploss_order_types": {"limit": "limit"},
"stoploss_on_exchange": True,
"stop_price_param": "stopLossPrice",
}
_ft_has_futures: Dict = {
"tickers_have_quoteVolume": False,
"fee_cost_in_contracts": True,
"stop_price_type_field": "tpTriggerPxType",
"stop_price_type_field": "slTriggerPxType",
"stop_price_type_value_mapping": {
PriceType.LAST: "last",
PriceType.MARK: "index",
@ -121,10 +126,9 @@ class Okx(Exchange):
return params
@retrier
def _lev_prep(self, pair: str, leverage: float, side: BuySell):
def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False):
if self.trading_mode != TradingMode.SPOT and self.margin_mode is not None:
try:
# TODO-lev: Test me properly (check mgnMode passed)
res = self._api.set_leverage(
leverage=leverage,
symbol=pair,
@ -157,3 +161,61 @@ class Okx(Exchange):
pair_tiers = self._leverage_tiers[pair]
return pair_tiers[-1]['maxNotional'] / leverage
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
params = super()._get_stop_params(side, ordertype, stop_price)
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
params['tdMode'] = self.margin_mode.value
params['posSide'] = self._get_posSide(side, True)
return params
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
if self._config['dry_run']:
return self.fetch_dry_run_order(order_id)
try:
params1 = {'stop': True}
order_reg = self._api.fetch_order(order_id, pair, params=params1)
self._log_exchange_response('fetch_stoploss_order', order_reg)
return order_reg
except ccxt.OrderNotFound:
pass
params2 = {'stop': True, 'ordType': 'conditional'}
for method in (self._api.fetch_open_orders, self._api.fetch_closed_orders,
self._api.fetch_canceled_orders):
try:
orders = method(pair, params=params2)
orders_f = [order for order in orders if order['id'] == order_id]
if orders_f:
order = orders_f[0]
if (order['status'] == 'closed'
and (real_order_id := order.get('info', {}).get('ordId')) is not None):
# Once a order triggered, we fetch the regular followup order.
order_reg = self.fetch_order(real_order_id, pair)
self._log_exchange_response('fetch_stoploss_order1', order_reg)
order_reg['id_stop'] = order_reg['id']
order_reg['id'] = order_id
order_reg['type'] = 'stoploss'
order_reg['status_stop'] = 'triggered'
return order_reg
order['type'] = 'stoploss'
return order
except ccxt.BaseError:
pass
raise RetryableOrderError(
f'StoplossOrder not found (pair: {pair} id: {order_id}).')
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
if order['type'] == 'stop':
return safe_value_fallback2(order, order, 'id_stop', 'id')
return order['id']
def cancel_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
params1 = {'stop': True}
# 'ordType': 'conditional'
#
return self.cancel_order(
order_id=order_id,
pair=pair,
params=params1,
)

View File

@ -47,7 +47,7 @@ class Base3ActionRLEnv(BaseEnvironment):
self._update_unrealized_total_profit()
step_reward = self.calculate_reward(action)
self.total_reward += step_reward
self.tensorboard_log(self.actions._member_names_[action])
self.tensorboard_log(self.actions._member_names_[action], category="actions")
trade_type = None
if self.is_tradesignal(action):
@ -66,7 +66,7 @@ class Base3ActionRLEnv(BaseEnvironment):
elif action == Actions.Sell.value and not self.can_short:
self._update_total_profit()
self._position = Positions.Neutral
trade_type = "neutral"
trade_type = "exit"
self._last_trade_tick = None
else:
print("case not defined")
@ -74,7 +74,7 @@ class Base3ActionRLEnv(BaseEnvironment):
if trade_type is not None:
self.trade_history.append(
{'price': self.current_price(), 'index': self._current_tick,
'type': trade_type})
'type': trade_type, 'profit': self.get_unrealized_profit()})
if (self._total_profit < self.max_drawdown or
self._total_unrealized_profit < self.max_drawdown):

View File

@ -48,20 +48,10 @@ class Base4ActionRLEnv(BaseEnvironment):
self._update_unrealized_total_profit()
step_reward = self.calculate_reward(action)
self.total_reward += step_reward
self.tensorboard_log(self.actions._member_names_[action])
self.tensorboard_log(self.actions._member_names_[action], category="actions")
trade_type = None
if self.is_tradesignal(action):
"""
Action: Neutral, position: Long -> Close Long
Action: Neutral, position: Short -> Close Short
Action: Long, position: Neutral -> Open Long
Action: Long, position: Short -> Close Short and Open Long
Action: Short, position: Neutral -> Open Short
Action: Short, position: Long -> Close Long and Open Short
"""
if action == Actions.Neutral.value:
self._position = Positions.Neutral
@ -69,16 +59,16 @@ class Base4ActionRLEnv(BaseEnvironment):
self._last_trade_tick = None
elif action == Actions.Long_enter.value:
self._position = Positions.Long
trade_type = "long"
trade_type = "enter_long"
self._last_trade_tick = self._current_tick
elif action == Actions.Short_enter.value:
self._position = Positions.Short
trade_type = "short"
trade_type = "enter_short"
self._last_trade_tick = self._current_tick
elif action == Actions.Exit.value:
self._update_total_profit()
self._position = Positions.Neutral
trade_type = "neutral"
trade_type = "exit"
self._last_trade_tick = None
else:
print("case not defined")
@ -86,7 +76,7 @@ class Base4ActionRLEnv(BaseEnvironment):
if trade_type is not None:
self.trade_history.append(
{'price': self.current_price(), 'index': self._current_tick,
'type': trade_type})
'type': trade_type, 'profit': self.get_unrealized_profit()})
if (self._total_profit < self.max_drawdown or
self._total_unrealized_profit < self.max_drawdown):

View File

@ -49,20 +49,10 @@ class Base5ActionRLEnv(BaseEnvironment):
self._update_unrealized_total_profit()
step_reward = self.calculate_reward(action)
self.total_reward += step_reward
self.tensorboard_log(self.actions._member_names_[action])
self.tensorboard_log(self.actions._member_names_[action], category="actions")
trade_type = None
if self.is_tradesignal(action):
"""
Action: Neutral, position: Long -> Close Long
Action: Neutral, position: Short -> Close Short
Action: Long, position: Neutral -> Open Long
Action: Long, position: Short -> Close Short and Open Long
Action: Short, position: Neutral -> Open Short
Action: Short, position: Long -> Close Long and Open Short
"""
if action == Actions.Neutral.value:
self._position = Positions.Neutral
@ -70,21 +60,21 @@ class Base5ActionRLEnv(BaseEnvironment):
self._last_trade_tick = None
elif action == Actions.Long_enter.value:
self._position = Positions.Long
trade_type = "long"
trade_type = "enter_long"
self._last_trade_tick = self._current_tick
elif action == Actions.Short_enter.value:
self._position = Positions.Short
trade_type = "short"
trade_type = "enter_short"
self._last_trade_tick = self._current_tick
elif action == Actions.Long_exit.value:
self._update_total_profit()
self._position = Positions.Neutral
trade_type = "neutral"
trade_type = "exit_long"
self._last_trade_tick = None
elif action == Actions.Short_exit.value:
self._update_total_profit()
self._position = Positions.Neutral
trade_type = "neutral"
trade_type = "exit_short"
self._last_trade_tick = None
else:
print("case not defined")
@ -92,7 +82,7 @@ class Base5ActionRLEnv(BaseEnvironment):
if trade_type is not None:
self.trade_history.append(
{'price': self.current_price(), 'index': self._current_tick,
'type': trade_type})
'type': trade_type, 'profit': self.get_unrealized_profit()})
if (self._total_profit < self.max_drawdown or
self._total_unrealized_profit < self.max_drawdown):

View File

@ -137,7 +137,8 @@ class BaseEnvironment(gym.Env):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def tensorboard_log(self, metric: str, value: Union[int, float] = 1, inc: bool = True):
def tensorboard_log(self, metric: str, value: Optional[Union[int, float]] = None,
inc: Optional[bool] = None, category: str = "custom"):
"""
Function builds the tensorboard_metrics dictionary
to be parsed by the TensorboardCallback. This
@ -149,17 +150,24 @@ class BaseEnvironment(gym.Env):
def calculate_reward(self, action: int) -> float:
if not self._is_valid(action):
self.tensorboard_log("is_valid")
self.tensorboard_log("invalid")
return -2
:param metric: metric to be tracked and incremented
:param value: value to increment `metric` by
:param inc: sets whether the `value` is incremented or not
:param value: `metric` value
:param inc: (deprecated) sets whether the `value` is incremented or not
:param category: `metric` category
"""
if not inc or metric not in self.tensorboard_metrics:
self.tensorboard_metrics[metric] = value
increment = True if value is None else False
value = 1 if increment else value
if category not in self.tensorboard_metrics:
self.tensorboard_metrics[category] = {}
if not increment or metric not in self.tensorboard_metrics[category]:
self.tensorboard_metrics[category][metric] = value
else:
self.tensorboard_metrics[metric] += value
self.tensorboard_metrics[category][metric] += value
def reset_tensorboard_log(self):
self.tensorboard_metrics = {}

View File

@ -114,6 +114,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
# normalize all data based on train_dataset only
prices_train, prices_test = self.build_ohlc_price_dataframes(dk.data_dictionary, pair, dk)
data_dictionary = dk.normalize_data(data_dictionary)
# data cleaning/analysis
@ -148,12 +149,8 @@ class BaseReinforcementLearningModel(IFreqaiModel):
env_info = self.pack_env_dict(dk.pair)
self.train_env = self.MyRLEnv(df=train_df,
prices=prices_train,
**env_info)
self.eval_env = Monitor(self.MyRLEnv(df=test_df,
prices=prices_test,
**env_info))
self.train_env = self.MyRLEnv(df=train_df, prices=prices_train, **env_info)
self.eval_env = Monitor(self.MyRLEnv(df=test_df, prices=prices_test, **env_info))
self.eval_callback = EvalCallback(self.eval_env, deterministic=True,
render=False, eval_freq=len(train_df),
best_model_save_path=str(dk.data_path))
@ -238,6 +235,9 @@ class BaseReinforcementLearningModel(IFreqaiModel):
filtered_dataframe, _ = dk.filter_features(
unfiltered_df, dk.training_features_list, training_filter=False
)
filtered_dataframe = self.drop_ohlc_from_df(filtered_dataframe, dk)
filtered_dataframe = dk.normalize_data_from_metadata(filtered_dataframe)
dk.data_dictionary["prediction_features"] = filtered_dataframe
@ -285,7 +285,6 @@ class BaseReinforcementLearningModel(IFreqaiModel):
train_df = data_dictionary["train_features"]
test_df = data_dictionary["test_features"]
# %-raw_volume_gen_shift-2_ETH/USDT_1h
# price data for model training and evaluation
tf = self.config['timeframe']
rename_dict = {'%-raw_open': 'open', '%-raw_low': 'low',
@ -318,8 +317,24 @@ class BaseReinforcementLearningModel(IFreqaiModel):
prices_test.rename(columns=rename_dict, inplace=True)
prices_test.reset_index(drop=True)
train_df = self.drop_ohlc_from_df(train_df, dk)
test_df = self.drop_ohlc_from_df(test_df, dk)
return prices_train, prices_test
def drop_ohlc_from_df(self, df: DataFrame, dk: FreqaiDataKitchen):
"""
Given a dataframe, drop the ohlc data
"""
drop_list = ['%-raw_open', '%-raw_low', '%-raw_high', '%-raw_close']
if self.rl_config["drop_ohlc_from_features"]:
df.drop(drop_list, axis=1, inplace=True)
feature_list = dk.training_features_list
dk.training_features_list = [e for e in feature_list if e not in drop_list]
return df
def load_model_from_disk(self, dk: FreqaiDataKitchen) -> Any:
"""
Can be used by user if they are trying to limit_ram_usage *and*

View File

@ -13,7 +13,7 @@ class TensorboardCallback(BaseCallback):
episodic summary reports.
"""
def __init__(self, verbose=1, actions: Type[Enum] = BaseActions):
super(TensorboardCallback, self).__init__(verbose)
super().__init__(verbose)
self.model: Any = None
self.logger = None # type: Any
self.training_env: BaseEnvironment = None # type: ignore
@ -46,14 +46,12 @@ class TensorboardCallback(BaseCallback):
local_info = self.locals["infos"][0]
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
for info in local_info:
if info not in ["episode", "terminal_observation"]:
self.logger.record(f"_info/{info}", local_info[info])
for metric in local_info:
if metric not in ["episode", "terminal_observation"]:
self.logger.record(f"info/{metric}", local_info[metric])
for info in tensorboard_metrics:
if info in [action.name for action in self.actions]:
self.logger.record(f"_actions/{info}", tensorboard_metrics[info])
else:
self.logger.record(f"_custom/{info}", tensorboard_metrics[info])
for category in tensorboard_metrics:
for metric in tensorboard_metrics[category]:
self.logger.record(f"{category}/{metric}", tensorboard_metrics[category][metric])
return True

View File

@ -251,7 +251,7 @@ class FreqaiDataKitchen:
(drop_index == 0) & (drop_index_labels == 0)
]
logger.info(
f"dropped {len(unfiltered_df) - len(filtered_df)} training points"
f"{self.pair}: dropped {len(unfiltered_df) - len(filtered_df)} training points"
f" due to NaNs in populated dataset {len(unfiltered_df)}."
)
if (1 - len(filtered_df) / len(unfiltered_df)) > 0.1 and self.live:
@ -675,7 +675,7 @@ class FreqaiDataKitchen:
]
logger.info(
f"SVM tossed {len(y_pred) - kept_points.sum()}"
f"{self.pair}: SVM tossed {len(y_pred) - kept_points.sum()}"
f" test points from {len(y_pred)} total points."
)
@ -949,7 +949,7 @@ class FreqaiDataKitchen:
if (len(do_predict) - do_predict.sum()) > 0:
logger.info(
f"DI tossed {len(do_predict) - do_predict.sum()} predictions for "
f"{self.pair}: DI tossed {len(do_predict) - do_predict.sum()} predictions for "
"being too far from training data."
)

View File

@ -105,6 +105,10 @@ class IFreqaiModel(ABC):
self.data_provider: Optional[DataProvider] = None
self.max_system_threads = max(int(psutil.cpu_count() * 2 - 2), 1)
self.can_short = True # overridden in start() with strategy.can_short
self.model: Any = None
if self.ft_params.get('principal_component_analysis', False) and self.continual_learning:
self.ft_params.update({'principal_component_analysis': False})
logger.warning('User tried to use PCA with continual learning. Deactivating PCA.')
record_params(config, self.full_path)
@ -154,8 +158,7 @@ class IFreqaiModel(ABC):
dk = self.start_backtesting(dataframe, metadata, self.dk, strategy)
dataframe = dk.remove_features_from_df(dk.return_dataframe)
else:
logger.info(
"Backtesting using historic predictions (live models)")
logger.info("Backtesting using historic predictions (live models)")
dk = self.start_backtesting_from_historic_predictions(
dataframe, metadata, self.dk)
dataframe = dk.return_dataframe
@ -339,13 +342,14 @@ class IFreqaiModel(ABC):
except Exception as msg:
logger.warning(
f"Training {pair} raised exception {msg.__class__.__name__}. "
f"Message: {msg}, skipping.")
f"Message: {msg}, skipping.", exc_info=True)
self.model = None
self.dd.pair_dict[pair]["trained_timestamp"] = int(
tr_train.stopts)
if self.plot_features:
if self.plot_features and self.model is not None:
plot_feature_importance(self.model, pair, dk, self.plot_features)
if self.save_backtest_models:
if self.save_backtest_models and self.model is not None:
logger.info('Saving backtest model to disk.')
self.dd.save_data(self.model, pair, dk)
else:

View File

@ -100,7 +100,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
"""
# first, penalize if the action is not valid
if not self._is_valid(action):
self.tensorboard_log("is_valid")
self.tensorboard_log("invalid", category="actions")
return -2
pnl = self.get_unrealized_profit()

View File

@ -21,7 +21,8 @@ from freqtrade.enums import (ExitCheckTuple, ExitType, RPCMessageType, RunMode,
State, TradingMode)
from freqtrade.exceptions import (DependencyException, ExchangeError, InsufficientFundsError,
InvalidOrderException, PricingError)
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date, timeframe_to_seconds
from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, timeframe_to_minutes, timeframe_to_next_date,
timeframe_to_seconds)
from freqtrade.misc import safe_value_fallback, safe_value_fallback2
from freqtrade.mixins import LoggingMixin
from freqtrade.persistence import Order, PairLocks, Trade, init_db
@ -30,6 +31,8 @@ from freqtrade.plugins.protectionmanager import ProtectionManager
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
from freqtrade.rpc import RPCManager
from freqtrade.rpc.external_message_consumer import ExternalMessageConsumer
from freqtrade.rpc.rpc_types import (RPCBuyMsg, RPCCancelMsg, RPCProtectionMsg, RPCSellCancelMsg,
RPCSellMsg)
from freqtrade.strategy.interface import IStrategy
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
from freqtrade.util import FtPrecise
@ -133,13 +136,13 @@ class FreqtradeBot(LoggingMixin):
# Initialize protections AFTER bot start - otherwise parameters are not loaded.
self.protections = ProtectionManager(self.config, self.strategy.protections)
def notify_status(self, msg: str) -> None:
def notify_status(self, msg: str, msg_type=RPCMessageType.STATUS) -> None:
"""
Public method for users of this class (worker, etc.) to send notifications
via RPC about changes in the bot status.
"""
self.rpc.send_msg({
'type': RPCMessageType.STATUS,
'type': msg_type,
'status': msg
})
@ -212,7 +215,8 @@ class FreqtradeBot(LoggingMixin):
self.dataprovider.refresh(self.pairlists.create_pair_list(self.active_pair_whitelist),
self.strategy.gather_informative_pairs())
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)()
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)(
current_time=datetime.now(timezone.utc))
self.strategy.analyze(self.active_pair_whitelist)
@ -586,7 +590,7 @@ class FreqtradeBot(LoggingMixin):
min_entry_stake = self.exchange.get_min_pair_stake_amount(trade.pair,
current_entry_rate,
self.strategy.stoploss)
0.0)
min_exit_stake = self.exchange.get_min_pair_stake_amount(trade.pair,
current_exit_rate,
self.strategy.stoploss)
@ -594,7 +598,7 @@ class FreqtradeBot(LoggingMixin):
stake_available = self.wallets.get_available_stake_amount()
logger.debug(f"Calling adjust_trade_position for pair {trade.pair}")
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
default_retval=None)(
default_retval=None, supress_error=True)(
trade=trade,
current_time=datetime.now(timezone.utc), current_rate=current_entry_rate,
current_profit=current_entry_profit, min_stake=min_entry_stake,
@ -700,7 +704,8 @@ class FreqtradeBot(LoggingMixin):
pos_adjust = trade is not None
enter_limit_requested, stake_amount, leverage = self.get_valid_enter_price_and_stake(
pair, price, stake_amount, trade_side, enter_tag, trade, order_adjust, leverage_)
pair, price, stake_amount, trade_side, enter_tag, trade, order_adjust, leverage_,
pos_adjust)
if not stake_amount:
return False
@ -809,6 +814,9 @@ class FreqtradeBot(LoggingMixin):
precision_mode=self.exchange.precisionMode,
contract_size=self.exchange.get_contract_size(pair),
)
stoploss = self.strategy.stoploss if not self.edge else self.edge.get_stoploss(pair)
trade.adjust_stop_loss(trade.open_rate, stoploss, initial=True)
else:
# This is additional buy, we reset fee_open_currency so timeout checking can work
trade.is_open = True
@ -818,7 +826,7 @@ class FreqtradeBot(LoggingMixin):
trade.orders.append(order_obj)
trade.recalc_trade_from_orders()
Trade.query.session.add(trade)
Trade.session.add(trade)
Trade.commit()
# Updating wallets
@ -846,11 +854,13 @@ class FreqtradeBot(LoggingMixin):
logger.info(f"Canceling stoploss on exchange for {trade}")
co = self.exchange.cancel_stoploss_order_with_result(
trade.stoploss_order_id, trade.pair, trade.amount)
trade.update_order(co)
self.update_trade_state(trade, trade.stoploss_order_id, co, stoploss_order=True)
# Reset stoploss order id.
trade.stoploss_order_id = None
except InvalidOrderException:
logger.exception(f"Could not cancel stoploss order {trade.stoploss_order_id}")
logger.exception(f"Could not cancel stoploss order {trade.stoploss_order_id} "
f"for pair {trade.pair}")
return trade
def get_valid_enter_price_and_stake(
@ -860,7 +870,12 @@ class FreqtradeBot(LoggingMixin):
trade: Optional[Trade],
order_adjust: bool,
leverage_: Optional[float],
pos_adjust: bool,
) -> Tuple[float, float, float]:
"""
Validate and eventually adjust (within limits) limit, amount and leverage
:return: Tuple with (price, amount, leverage)
"""
if price:
enter_limit_requested = price
@ -906,7 +921,9 @@ class FreqtradeBot(LoggingMixin):
# We do however also need min-stake to determine leverage, therefore this is ignored as
# edge-case for now.
min_stake_amount = self.exchange.get_min_pair_stake_amount(
pair, enter_limit_requested, self.strategy.stoploss, leverage)
pair, enter_limit_requested,
self.strategy.stoploss if not pos_adjust else 0.0,
leverage)
max_stake_amount = self.exchange.get_max_pair_stake_amount(
pair, enter_limit_requested, leverage)
@ -930,12 +947,11 @@ class FreqtradeBot(LoggingMixin):
return enter_limit_requested, stake_amount, leverage
def _notify_enter(self, trade: Trade, order: Order, order_type: Optional[str] = None,
def _notify_enter(self, trade: Trade, order: Order, order_type: str,
fill: bool = False, sub_trade: bool = False) -> None:
"""
Sends rpc notification when a entry order occurred.
"""
msg_type = RPCMessageType.ENTRY_FILL if fill else RPCMessageType.ENTRY
open_rate = order.safe_price
if open_rate is None:
@ -946,9 +962,9 @@ class FreqtradeBot(LoggingMixin):
current_rate = self.exchange.get_rate(
trade.pair, side='entry', is_short=trade.is_short, refresh=False)
msg = {
msg: RPCBuyMsg = {
'trade_id': trade.id,
'type': msg_type,
'type': RPCMessageType.ENTRY_FILL if fill else RPCMessageType.ENTRY,
'buy_tag': trade.enter_tag,
'enter_tag': trade.enter_tag,
'exchange': trade.exchange.capitalize(),
@ -960,6 +976,7 @@ class FreqtradeBot(LoggingMixin):
'order_type': order_type,
'stake_amount': trade.stake_amount,
'stake_currency': self.config['stake_currency'],
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
'fiat_currency': self.config.get('fiat_display_currency', None),
'amount': order.safe_amount_after_fee if fill else (order.amount or trade.amount),
'open_date': trade.open_date or datetime.utcnow(),
@ -978,7 +995,7 @@ class FreqtradeBot(LoggingMixin):
current_rate = self.exchange.get_rate(
trade.pair, side='entry', is_short=trade.is_short, refresh=False)
msg = {
msg: RPCCancelMsg = {
'trade_id': trade.id,
'type': RPCMessageType.ENTRY_CANCEL,
'buy_tag': trade.enter_tag,
@ -990,7 +1007,9 @@ class FreqtradeBot(LoggingMixin):
'limit': trade.open_rate,
'order_type': order_type,
'stake_amount': trade.stake_amount,
'open_rate': trade.open_rate,
'stake_currency': self.config['stake_currency'],
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
'fiat_currency': self.config.get('fiat_display_currency', None),
'amount': trade.amount,
'open_date': trade.open_date,
@ -1013,12 +1032,16 @@ class FreqtradeBot(LoggingMixin):
trades_closed = 0
for trade in trades:
try:
try:
if (self.strategy.order_types.get('stoploss_on_exchange') and
self.handle_stoploss_on_exchange(trade)):
trades_closed += 1
Trade.commit()
continue
except InvalidOrderException as exception:
logger.warning(
f'Unable to handle stoploss on exchange for {trade.pair}: {exception}')
# Check if we can sell our current pair
if trade.open_order_id is None and trade.is_open and self.handle_trade(trade):
trades_closed += 1
@ -1122,8 +1145,7 @@ class FreqtradeBot(LoggingMixin):
trade.stoploss_order_id = None
logger.error(f'Unable to place a stoploss order on exchange. {e}')
logger.warning('Exiting the trade forcefully')
self.execute_trade_exit(trade, stop_price, exit_check=ExitCheckTuple(
exit_type=ExitType.EMERGENCY_EXIT))
self.emergency_exit(trade, stop_price)
except ExchangeError:
trade.stoploss_order_id = None
@ -1151,7 +1173,8 @@ class FreqtradeBot(LoggingMixin):
logger.warning('Unable to fetch stoploss order: %s', exception)
if stoploss_order:
trade.update_order(stoploss_order)
self.update_trade_state(trade, trade.stoploss_order_id, stoploss_order,
stoploss_order=True)
# We check if stoploss order is fulfilled
if stoploss_order and stoploss_order['status'] in ('closed', 'triggered'):
@ -1215,7 +1238,9 @@ class FreqtradeBot(LoggingMixin):
:param order: Current on exchange stoploss order
:return: None
"""
stoploss_norm = self.exchange.price_to_precision(trade.pair, trade.stoploss_or_liquidation)
stoploss_norm = self.exchange.price_to_precision(
trade.pair, trade.stoploss_or_liquidation,
rounding_mode=ROUND_DOWN if trade.is_short else ROUND_UP)
if self.exchange.stoploss_adjust(stoploss_norm, order, side=trade.exit_side):
# we check if the update is necessary
@ -1225,13 +1250,8 @@ class FreqtradeBot(LoggingMixin):
# cancelling the current stoploss on exchange first
logger.info(f"Cancelling current stoploss on exchange for pair {trade.pair} "
f"(orderid:{order['id']}) in order to add another one ...")
try:
co = self.exchange.cancel_stoploss_order_with_result(order['id'], trade.pair,
trade.amount)
trade.update_order(co)
except InvalidOrderException:
logger.exception(f"Could not cancel stoploss order {order['id']} "
f"for pair {trade.pair}")
self.cancel_stoploss_on_exchange(trade)
# Create new stoploss order
if not self.create_stoploss_order(trade=trade, stop_price=stoploss_norm):
@ -1281,13 +1301,16 @@ class FreqtradeBot(LoggingMixin):
if canceled and max_timeouts > 0 and canceled_count >= max_timeouts:
logger.warning(f'Emergency exiting trade {trade}, as the exit order '
f'timed out {max_timeouts} times.')
self.emergency_exit(trade, order['price'])
def emergency_exit(self, trade: Trade, price: float) -> None:
try:
self.execute_trade_exit(
trade, order['price'],
trade, price,
exit_check=ExitCheckTuple(exit_type=ExitType.EMERGENCY_EXIT))
except DependencyException as exception:
logger.warning(
f'Unable to emergency sell trade {trade.pair}: {exception}')
f'Unable to emergency exit trade {trade.pair}: {exception}')
def replace_order(self, order: Dict, order_obj: Optional[Order], trade: Trade) -> None:
"""
@ -1460,35 +1483,34 @@ class FreqtradeBot(LoggingMixin):
return False
try:
co = self.exchange.cancel_order_with_result(order['id'], trade.pair,
trade.amount)
order = self.exchange.cancel_order_with_result(
order['id'], trade.pair, trade.amount)
except InvalidOrderException:
logger.exception(
f"Could not cancel {trade.exit_side} order {trade.open_order_id}")
return False
trade.close_rate = None
trade.close_rate_requested = None
trade.close_profit = None
trade.close_profit_abs = None
# Set exit_reason for fill message
exit_reason_prev = trade.exit_reason
trade.exit_reason = trade.exit_reason + f", {reason}" if trade.exit_reason else reason
self.update_trade_state(trade, trade.open_order_id, co)
# Order might be filled above in odd timing issues.
if co.get('status') in ('canceled', 'cancelled'):
if order.get('status') in ('canceled', 'cancelled'):
trade.exit_reason = None
trade.open_order_id = None
else:
trade.exit_reason = exit_reason_prev
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
cancelled = True
else:
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
self.update_trade_state(trade, trade.open_order_id, order)
trade.exit_reason = None
trade.open_order_id = None
self.update_trade_state(trade, trade.open_order_id, order)
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
trade.close_rate = None
trade.close_rate_requested = None
self._notify_exit_cancel(
trade,
order_type=self.strategy.order_types['exit'],
@ -1651,7 +1673,7 @@ class FreqtradeBot(LoggingMixin):
amount = trade.amount
gain = "profit" if profit_ratio > 0 else "loss"
msg = {
msg: RPCSellMsg = {
'type': (RPCMessageType.EXIT_FILL if fill
else RPCMessageType.EXIT),
'trade_id': trade.id,
@ -1677,6 +1699,7 @@ class FreqtradeBot(LoggingMixin):
'close_date': trade.close_date or datetime.utcnow(),
'stake_amount': trade.stake_amount,
'stake_currency': self.config['stake_currency'],
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
'fiat_currency': self.config.get('fiat_display_currency'),
'sub_trade': sub_trade,
'cumulative_profit': trade.realized_profit,
@ -1707,7 +1730,7 @@ class FreqtradeBot(LoggingMixin):
profit_ratio = trade.calc_profit_ratio(profit_rate)
gain = "profit" if profit_ratio > 0 else "loss"
msg = {
msg: RPCSellCancelMsg = {
'type': RPCMessageType.EXIT_CANCEL,
'trade_id': trade.id,
'exchange': trade.exchange.capitalize(),
@ -1729,6 +1752,7 @@ class FreqtradeBot(LoggingMixin):
'open_date': trade.open_date,
'close_date': trade.close_date or datetime.now(timezone.utc),
'stake_currency': self.config['stake_currency'],
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
'fiat_currency': self.config.get('fiat_display_currency', None),
'reason': reason,
'sub_trade': sub_trade,
@ -1760,11 +1784,11 @@ class FreqtradeBot(LoggingMixin):
return False
# Update trade with order values
if not stoploss_order:
logger.info(f'Found open order for {trade}')
try:
order = action_order or self.exchange.fetch_order_or_stoploss_order(order_id,
trade.pair,
stoploss_order)
order = action_order or self.exchange.fetch_order_or_stoploss_order(
order_id, trade.pair, stoploss_order)
except InvalidOrderException as exception:
logger.warning('Unable to fetch order %s: %s', order_id, exception)
return False
@ -1793,7 +1817,7 @@ class FreqtradeBot(LoggingMixin):
# TODO: should shorting/leverage be supported by Edge,
# then this will need to be fixed.
trade.adjust_stop_loss(trade.open_rate, self.strategy.stoploss, initial=True)
if order.get('side') == trade.entry_side or trade.amount > 0:
if order.get('side') == trade.entry_side or (trade.amount > 0 and trade.is_open):
# Must also run for partial exits
# TODO: Margin will need to use interest_rate as well.
# interest_rate = self.exchange.get_interest_rate()
@ -1829,21 +1853,27 @@ class FreqtradeBot(LoggingMixin):
self.handle_protections(trade.pair, trade.trade_direction)
elif send_msg and not trade.open_order_id and not stoploss_order:
# Enter fill
self._notify_enter(trade, order, fill=True, sub_trade=sub_trade)
self._notify_enter(trade, order, order.order_type, fill=True, sub_trade=sub_trade)
def handle_protections(self, pair: str, side: LongShort) -> None:
# Lock pair for one candle to prevent immediate rebuys
self.strategy.lock_pair(pair, datetime.now(timezone.utc), reason='Auto lock')
prot_trig = self.protections.stop_per_pair(pair, side=side)
if prot_trig:
msg = {'type': RPCMessageType.PROTECTION_TRIGGER, }
msg.update(prot_trig.to_json())
msg: RPCProtectionMsg = {
'type': RPCMessageType.PROTECTION_TRIGGER,
'base_currency': self.exchange.get_pair_base_currency(prot_trig.pair),
**prot_trig.to_json() # type: ignore
}
self.rpc.send_msg(msg)
prot_trig_glb = self.protections.global_stop(side=side)
if prot_trig_glb:
msg = {'type': RPCMessageType.PROTECTION_TRIGGER_GLOBAL, }
msg.update(prot_trig_glb.to_json())
msg = {
'type': RPCMessageType.PROTECTION_TRIGGER_GLOBAL,
'base_currency': self.exchange.get_pair_base_currency(prot_trig_glb.pair),
**prot_trig_glb.to_json() # type: ignore
}
self.rpc.send_msg(msg)
def apply_fee_conditional(self, trade: Trade, trade_base_currency: str,

View File

@ -6,8 +6,7 @@ import logging
import re
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, Iterator, List, Mapping, Optional, Union
from typing.io import IO
from typing import Any, Dict, Iterator, List, Mapping, Optional, TextIO, Union
from urllib.parse import urlparse
import orjson
@ -103,7 +102,7 @@ def file_dump_joblib(filename: Path, data: Any, log: bool = True) -> None:
logger.debug(f'done joblib dump to "{filename}"')
def json_load(datafile: IO) -> Any:
def json_load(datafile: Union[gzip.GzipFile, TextIO]) -> Any:
"""
load data with rapidjson
Use this to have a consistent experience,

View File

@ -203,9 +203,10 @@ class Backtesting:
# since a "perfect" stoploss-exit is assumed anyway
# And the regular "stoploss" function would not apply to that case
self.strategy.order_types['stoploss_on_exchange'] = False
# Update can_short flag
self._can_short = self.trading_mode != TradingMode.SPOT and strategy.can_short
self.strategy.ft_bot_start()
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)()
def _load_protections(self, strategy: IStrategy):
if self.config.get('enable_protections', False):
@ -442,10 +443,6 @@ class Backtesting:
# Worst case: price ticks tiny bit above open and dives down.
stop_rate = row[OPEN_IDX] * (1 - side_1 * abs(
(trade.stop_loss_pct or 0.0) / leverage))
if is_short:
assert stop_rate > row[LOW_IDX]
else:
assert stop_rate < row[HIGH_IDX]
# Limit lower-end to candle low to avoid exits below the low.
# This still remains "worst case" - but "worst realistic case".
@ -526,7 +523,7 @@ class Backtesting:
max_stake = self.exchange.get_max_pair_stake_amount(trade.pair, current_rate)
stake_available = self.wallets.get_available_stake_amount()
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
default_retval=None)(
default_retval=None, supress_error=True)(
trade=trade, # type: ignore[arg-type]
current_time=current_date, current_rate=current_rate,
current_profit=current_profit, min_stake=min_stake,
@ -744,12 +741,12 @@ class Backtesting:
proposed_leverage=1.0,
max_leverage=max_leverage,
side=direction, entry_tag=entry_tag,
) if self._can_short else 1.0
) if self.trading_mode != TradingMode.SPOT else 1.0
# Cap leverage between 1.0 and max_leverage.
leverage = min(max(leverage, 1.0), max_leverage)
min_stake_amount = self.exchange.get_min_pair_stake_amount(
pair, propose_rate, -0.05, leverage=leverage) or 0
pair, propose_rate, -0.05 if not pos_adjust else 0.0, leverage=leverage) or 0
max_stake_amount = self.exchange.get_max_pair_stake_amount(
pair, propose_rate, leverage=leverage)
stake_available = self.wallets.get_available_stake_amount()
@ -1034,6 +1031,9 @@ class Backtesting:
requested_stake=(
order.safe_remaining * order.ft_price / trade.leverage),
direction='short' if trade.is_short else 'long')
# Delete trade if no successful entries happened (if placing the new order failed)
if trade.open_order_id is None and trade.nr_of_successful_entries == 0:
return True
self.replaced_entry_orders += 1
else:
# assumption: there can't be multiple open entry orders at any given time
@ -1159,6 +1159,8 @@ class Backtesting:
while current_time <= end_date:
open_trade_count_start = LocalTrade.bt_open_open_trade_count
self.check_abort()
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)(
current_time=current_time)
for i, pair in enumerate(data):
row_index = indexes[pair]
row = self.validate_row(data, pair, row_index, current_time)

View File

@ -1,4 +1,3 @@
import io
import logging
from copy import deepcopy
from datetime import datetime, timezone
@ -24,6 +23,8 @@ logger = logging.getLogger(__name__)
NON_OPT_PARAM_APPENDIX = " # value loaded from strategy"
HYPER_PARAMS_FILE_FORMAT = rapidjson.NM_NATIVE | rapidjson.NM_NAN
def hyperopt_serializer(x):
if isinstance(x, np.integer):
@ -77,9 +78,18 @@ class HyperoptTools():
with filename.open('w') as f:
rapidjson.dump(final_params, f, indent=2,
default=hyperopt_serializer,
number_mode=rapidjson.NM_NATIVE | rapidjson.NM_NAN
number_mode=HYPER_PARAMS_FILE_FORMAT
)
@staticmethod
def load_params(filename: Path) -> Dict:
"""
Load parameters from file
"""
with filename.open('r') as f:
params = rapidjson.load(f, number_mode=HYPER_PARAMS_FILE_FORMAT)
return params
@staticmethod
def try_export_params(config: Config, strategy_name: str, params: Dict):
if params.get(FTHYPT_FILEVERSION, 1) >= 2 and not config.get('disableparamexport', False):
@ -190,7 +200,7 @@ class HyperoptTools():
for s in ['buy', 'sell', 'protection',
'roi', 'stoploss', 'trailing', 'max_open_trades']:
HyperoptTools._params_update_for_json(result_dict, params, non_optimized, s)
print(rapidjson.dumps(result_dict, default=str, number_mode=rapidjson.NM_NATIVE))
print(rapidjson.dumps(result_dict, default=str, number_mode=HYPER_PARAMS_FILE_FORMAT))
else:
HyperoptTools._params_pretty_print(params, 'buy', "Buy hyperspace params:",
@ -464,8 +474,8 @@ class HyperoptTools():
return
try:
io.open(csv_file, 'w+').close()
except IOError:
Path(csv_file).open('w+').close()
except OSError:
logger.error(f"Failed to create CSV file: {csv_file}")
return

View File

@ -2,7 +2,9 @@
This module contains the class to persist trades into SQLite
"""
import logging
from typing import Any, Dict
import threading
from contextvars import ContextVar
from typing import Any, Dict, Final, Optional
from sqlalchemy import create_engine, inspect
from sqlalchemy.exc import NoSuchModuleError
@ -19,6 +21,22 @@ from freqtrade.persistence.trade_model import Order, Trade
logger = logging.getLogger(__name__)
REQUEST_ID_CTX_KEY: Final[str] = 'request_id'
_request_id_ctx_var: ContextVar[Optional[str]] = ContextVar(REQUEST_ID_CTX_KEY, default=None)
def get_request_or_thread_id() -> Optional[str]:
"""
Helper method to get either async context (for fastapi requests), or thread id
"""
id = _request_id_ctx_var.get()
if id is None:
# when not in request context - use thread id
id = str(threading.current_thread().ident)
return id
_SQL_DOCS_URL = 'http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls'
@ -53,13 +71,11 @@ def init_db(db_url: str) -> None:
# https://docs.sqlalchemy.org/en/13/orm/contextual.html#thread-local-scope
# Scoped sessions proxy requests to the appropriate thread-local session.
# We should use the scoped_session object - not a seperately initialized version
Trade._session = scoped_session(sessionmaker(bind=engine, autoflush=False))
Order._session = Trade._session
PairLock._session = Trade._session
Trade.query = Trade._session.query_property()
Order.query = Trade._session.query_property()
PairLock.query = Trade._session.query_property()
# Since we also use fastAPI, we need to make it aware of the request id, too
Trade.session = scoped_session(sessionmaker(
bind=engine, autoflush=False), scopefunc=get_request_or_thread_id)
Order.session = Trade.session
PairLock.session = Trade.session
previous_tables = inspect(engine).get_table_names()
ModelBase.metadata.create_all(engine)

View File

@ -1,9 +1,8 @@
from datetime import datetime, timezone
from typing import Any, ClassVar, Dict, Optional
from sqlalchemy import String, or_
from sqlalchemy.orm import Mapped, Query, mapped_column
from sqlalchemy.orm.scoping import _QueryDescriptorType
from sqlalchemy import ScalarResult, String, or_, select
from sqlalchemy.orm import Mapped, mapped_column
from freqtrade.constants import DATETIME_PRINT_FORMAT
from freqtrade.persistence.base import ModelBase, SessionType
@ -14,8 +13,7 @@ class PairLock(ModelBase):
Pair Locks database model.
"""
__tablename__ = 'pairlocks'
query: ClassVar[_QueryDescriptorType]
_session: ClassVar[SessionType]
session: ClassVar[SessionType]
id: Mapped[int] = mapped_column(primary_key=True)
@ -38,7 +36,8 @@ class PairLock(ModelBase):
f'lock_end_time={lock_end_time}, reason={self.reason}, active={self.active})')
@staticmethod
def query_pair_locks(pair: Optional[str], now: datetime, side: str = '*') -> Query:
def query_pair_locks(
pair: Optional[str], now: datetime, side: str = '*') -> ScalarResult['PairLock']:
"""
Get all currently active locks for this pair
:param pair: Pair to check for. Returns all current locks if pair is empty
@ -54,9 +53,11 @@ class PairLock(ModelBase):
else:
filters.append(PairLock.side == '*')
return PairLock.query.filter(
*filters
)
return PairLock.session.scalars(select(PairLock).filter(*filters))
@staticmethod
def get_all_locks() -> ScalarResult['PairLock']:
return PairLock.session.scalars(select(PairLock))
def to_json(self) -> Dict[str, Any]:
return {

View File

@ -1,6 +1,8 @@
import logging
from datetime import datetime, timezone
from typing import List, Optional
from typing import List, Optional, Sequence
from sqlalchemy import select
from freqtrade.exchange import timeframe_to_next_date
from freqtrade.persistence.models import PairLock
@ -51,15 +53,15 @@ class PairLocks():
active=True
)
if PairLocks.use_db:
PairLock.query.session.add(lock)
PairLock.query.session.commit()
PairLock.session.add(lock)
PairLock.session.commit()
else:
PairLocks.locks.append(lock)
return lock
@staticmethod
def get_pair_locks(
pair: Optional[str], now: Optional[datetime] = None, side: str = '*') -> List[PairLock]:
def get_pair_locks(pair: Optional[str], now: Optional[datetime] = None,
side: str = '*') -> Sequence[PairLock]:
"""
Get all currently active locks for this pair
:param pair: Pair to check for. Returns all current locks if pair is empty
@ -106,7 +108,7 @@ class PairLocks():
for lock in locks:
lock.active = False
if PairLocks.use_db:
PairLock.query.session.commit()
PairLock.session.commit()
@staticmethod
def unlock_reason(reason: str, now: Optional[datetime] = None) -> None:
@ -126,11 +128,11 @@ class PairLocks():
PairLock.active.is_(True),
PairLock.reason == reason
]
locks = PairLock.query.filter(*filters)
locks = PairLock.session.scalars(select(PairLock).filter(*filters)).all()
for lock in locks:
logger.info(f"Releasing lock for {lock.pair} with reason '{reason}'.")
lock.active = False
PairLock.query.session.commit()
PairLock.session.commit()
else:
# used in backtesting mode; don't show log messages for speed
locksb = PairLocks.get_pair_locks(None)
@ -165,11 +167,11 @@ class PairLocks():
)
@staticmethod
def get_all_locks() -> List[PairLock]:
def get_all_locks() -> Sequence[PairLock]:
"""
Return all locks, also locks with expired end date
"""
if PairLocks.use_db:
return PairLock.query.all()
return PairLock.get_all_locks().all()
else:
return PairLocks.locks

View File

@ -5,17 +5,18 @@ import logging
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from math import isclose
from typing import Any, ClassVar, Dict, List, Optional, cast
from typing import Any, ClassVar, Dict, List, Optional, Sequence, cast
from sqlalchemy import Enum, Float, ForeignKey, Integer, String, UniqueConstraint, desc, func
from sqlalchemy.orm import Mapped, Query, lazyload, mapped_column, relationship
from sqlalchemy.orm.scoping import _QueryDescriptorType
from sqlalchemy import (Enum, Float, ForeignKey, Integer, ScalarResult, Select, String,
UniqueConstraint, desc, func, select)
from sqlalchemy.orm import Mapped, lazyload, mapped_column, relationship
from freqtrade.constants import (DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC, NON_OPEN_EXCHANGE_STATES,
BuySell, LongShort)
from freqtrade.enums import ExitType, TradingMode
from freqtrade.exceptions import DependencyException, OperationalException
from freqtrade.exchange import amount_to_contract_precision, price_to_precision
from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, amount_to_contract_precision,
price_to_precision)
from freqtrade.leverage import interest
from freqtrade.persistence.base import ModelBase, SessionType
from freqtrade.util import FtPrecise
@ -36,8 +37,7 @@ class Order(ModelBase):
Mirrors CCXT Order structure
"""
__tablename__ = 'orders'
query: ClassVar[_QueryDescriptorType]
_session: ClassVar[SessionType]
session: ClassVar[SessionType]
# Uniqueness should be ensured over pair, order_id
# its likely that order_id is unique per Pair on some exchanges.
@ -263,12 +263,12 @@ class Order(ModelBase):
return o
@staticmethod
def get_open_orders() -> List['Order']:
def get_open_orders() -> Sequence['Order']:
"""
Retrieve open orders from the database
:return: List of open orders
"""
return Order.query.filter(Order.ft_is_open.is_(True)).all()
return Order.session.scalars(select(Order).filter(Order.ft_is_open.is_(True))).all()
@staticmethod
def order_by_id(order_id: str) -> Optional['Order']:
@ -276,7 +276,7 @@ class Order(ModelBase):
Retrieve order based on order_id
:return: Order or None
"""
return Order.query.filter(Order.order_id == order_id).first()
return Order.session.scalars(select(Order).filter(Order.order_id == order_id)).first()
class LocalTrade():
@ -561,6 +561,9 @@ class LocalTrade():
'trading_mode': self.trading_mode,
'funding_fees': self.funding_fees,
'open_order_id': self.open_order_id,
'amount_precision': self.amount_precision,
'price_precision': self.price_precision,
'precision_mode': self.precision_mode,
'orders': orders,
}
@ -595,7 +598,8 @@ class LocalTrade():
"""
Method used internally to set self.stop_loss.
"""
stop_loss_norm = price_to_precision(stop_loss, self.price_precision, self.precision_mode)
stop_loss_norm = price_to_precision(stop_loss, self.price_precision, self.precision_mode,
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
if not self.stop_loss:
self.initial_stop_loss = stop_loss_norm
self.stop_loss = stop_loss_norm
@ -626,7 +630,8 @@ class LocalTrade():
if self.initial_stop_loss_pct is None or refresh:
self.__set_stop_loss(new_loss, stoploss)
self.initial_stop_loss = price_to_precision(
new_loss, self.price_precision, self.precision_mode)
new_loss, self.price_precision, self.precision_mode,
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
self.initial_stop_loss_pct = -1 * abs(stoploss)
# evaluate if the stop loss needs to be updated
@ -690,21 +695,24 @@ class LocalTrade():
else:
logger.warning(
f'Got different open_order_id {self.open_order_id} != {order.order_id}')
elif order.ft_order_side == 'stoploss' and order.status not in ('open', ):
self.stoploss_order_id = None
self.close_rate_requested = self.stop_loss
self.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
if self.is_open:
logger.info(f'{order.order_type.upper()} is hit for {self}.')
else:
raise ValueError(f'Unknown order type: {order.order_type}')
if order.ft_order_side != self.entry_side:
amount_tr = amount_to_contract_precision(self.amount, self.amount_precision,
self.precision_mode, self.contract_size)
if isclose(order.safe_amount_after_fee, amount_tr, abs_tol=MATH_CLOSE_PREC):
self.close(order.safe_price)
else:
self.recalc_trade_from_orders()
elif order.ft_order_side == 'stoploss' and order.status not in ('canceled', 'open'):
self.stoploss_order_id = None
self.close_rate_requested = self.stop_loss
self.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
if self.is_open:
logger.info(f'{order.order_type.upper()} is hit for {self}.')
self.close(order.safe_price)
else:
raise ValueError(f'Unknown order type: {order.order_type}')
Trade.commit()
def close(self, rate: float, *, show_msg: bool = True) -> None:
@ -1088,6 +1096,11 @@ class LocalTrade():
In live mode, converts the filter to a database query and returns all rows
In Backtest mode, uses filters on Trade.trades to get the result.
:param pair: Filter by pair
:param is_open: Filter by open/closed status
:param open_date: Filter by open_date (filters via trade.open_date > input)
:param close_date: Filter by close_date (filters via trade.close_date > input)
Will implicitly only return closed trades.
:return: unsorted List[Trade]
"""
@ -1148,7 +1161,9 @@ class LocalTrade():
get open trade count
"""
if Trade.use_db:
return Trade.query.filter(Trade.is_open.is_(True)).count()
return Trade.session.execute(
select(func.count(Trade.id)).filter(Trade.is_open.is_(True))
).scalar_one()
else:
return LocalTrade.bt_open_open_trade_count
@ -1181,8 +1196,7 @@ class Trade(ModelBase, LocalTrade):
Note: Fields must be aligned with LocalTrade class
"""
__tablename__ = 'trades'
query: ClassVar[_QueryDescriptorType]
_session: ClassVar[SessionType]
session: ClassVar[SessionType]
use_db: bool = True
@ -1282,18 +1296,18 @@ class Trade(ModelBase, LocalTrade):
def delete(self) -> None:
for order in self.orders:
Order.query.session.delete(order)
Order.session.delete(order)
Trade.query.session.delete(self)
Trade.session.delete(self)
Trade.commit()
@staticmethod
def commit():
Trade.query.session.commit()
Trade.session.commit()
@staticmethod
def rollback():
Trade.query.session.rollback()
Trade.session.rollback()
@staticmethod
def get_trades_proxy(*, pair: Optional[str] = None, is_open: Optional[bool] = None,
@ -1327,7 +1341,7 @@ class Trade(ModelBase, LocalTrade):
)
@staticmethod
def get_trades(trade_filter=None, include_orders: bool = True) -> Query['Trade']:
def get_trades_query(trade_filter=None, include_orders: bool = True) -> Select:
"""
Helper function to query Trades using filters.
NOTE: Not supported in Backtesting.
@ -1342,22 +1356,35 @@ class Trade(ModelBase, LocalTrade):
if trade_filter is not None:
if not isinstance(trade_filter, list):
trade_filter = [trade_filter]
this_query = Trade.query.filter(*trade_filter)
this_query = select(Trade).filter(*trade_filter)
else:
this_query = Trade.query
this_query = select(Trade)
if not include_orders:
# Don't load order relations
# Consider using noload or raiseload instead of lazyload
this_query = this_query.options(lazyload(Trade.orders))
return this_query
@staticmethod
def get_trades(trade_filter=None, include_orders: bool = True) -> ScalarResult['Trade']:
"""
Helper function to query Trades using filters.
NOTE: Not supported in Backtesting.
:param trade_filter: Optional filter to apply to trades
Can be either a Filter object, or a List of filters
e.g. `(trade_filter=[Trade.id == trade_id, Trade.is_open.is_(True),])`
e.g. `(trade_filter=Trade.id == trade_id)`
:return: unsorted query object
"""
return Trade.session.scalars(Trade.get_trades_query(trade_filter, include_orders))
@staticmethod
def get_open_order_trades() -> List['Trade']:
"""
Returns all open trades
NOTE: Not supported in Backtesting.
"""
return Trade.get_trades(Trade.open_order_id.isnot(None)).all()
return cast(List[Trade], Trade.get_trades(Trade.open_order_id.isnot(None)).all())
@staticmethod
def get_open_trades_without_assigned_fees():
@ -1387,11 +1414,12 @@ class Trade(ModelBase, LocalTrade):
Retrieves total realized profit
"""
if Trade.use_db:
total_profit = Trade.query.with_entities(
func.sum(Trade.close_profit_abs)).filter(Trade.is_open.is_(False)).scalar()
total_profit: float = Trade.session.execute(
select(func.sum(Trade.close_profit_abs)).filter(Trade.is_open.is_(False))
).scalar_one()
else:
total_profit = sum(
t.close_profit_abs for t in LocalTrade.get_trades_proxy(is_open=False))
total_profit = sum(t.close_profit_abs # type: ignore
for t in LocalTrade.get_trades_proxy(is_open=False))
return total_profit or 0
@staticmethod
@ -1401,8 +1429,9 @@ class Trade(ModelBase, LocalTrade):
in stake currency
"""
if Trade.use_db:
total_open_stake_amount = Trade.query.with_entities(
func.sum(Trade.stake_amount)).filter(Trade.is_open.is_(True)).scalar()
total_open_stake_amount = Trade.session.scalar(
select(func.sum(Trade.stake_amount)).filter(Trade.is_open.is_(True))
)
else:
total_open_stake_amount = sum(
t.stake_amount for t in LocalTrade.get_trades_proxy(is_open=True))
@ -1418,15 +1447,18 @@ class Trade(ModelBase, LocalTrade):
if minutes:
start_date = datetime.now(timezone.utc) - timedelta(minutes=minutes)
filters.append(Trade.close_date >= start_date)
pair_rates = Trade.query.with_entities(
pair_rates = Trade.session.execute(
select(
Trade.pair,
func.sum(Trade.close_profit).label('profit_sum'),
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
func.count(Trade.pair).label('count')
).filter(*filters)\
.group_by(Trade.pair) \
.order_by(desc('profit_sum_abs')) \
.all()
).filter(*filters)
.group_by(Trade.pair)
.order_by(desc('profit_sum_abs'))
).all()
return [
{
'pair': pair,
@ -1451,15 +1483,16 @@ class Trade(ModelBase, LocalTrade):
if (pair is not None):
filters.append(Trade.pair == pair)
enter_tag_perf = Trade.query.with_entities(
enter_tag_perf = Trade.session.execute(
select(
Trade.enter_tag,
func.sum(Trade.close_profit).label('profit_sum'),
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
func.count(Trade.pair).label('count')
).filter(*filters)\
.group_by(Trade.enter_tag) \
.order_by(desc('profit_sum_abs')) \
.all()
).filter(*filters)
.group_by(Trade.enter_tag)
.order_by(desc('profit_sum_abs'))
).all()
return [
{
@ -1483,16 +1516,16 @@ class Trade(ModelBase, LocalTrade):
filters: List = [Trade.is_open.is_(False)]
if (pair is not None):
filters.append(Trade.pair == pair)
sell_tag_perf = Trade.query.with_entities(
sell_tag_perf = Trade.session.execute(
select(
Trade.exit_reason,
func.sum(Trade.close_profit).label('profit_sum'),
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
func.count(Trade.pair).label('count')
).filter(*filters)\
.group_by(Trade.exit_reason) \
.order_by(desc('profit_sum_abs')) \
.all()
).filter(*filters)
.group_by(Trade.exit_reason)
.order_by(desc('profit_sum_abs'))
).all()
return [
{
@ -1516,18 +1549,18 @@ class Trade(ModelBase, LocalTrade):
filters: List = [Trade.is_open.is_(False)]
if (pair is not None):
filters.append(Trade.pair == pair)
mix_tag_perf = Trade.query.with_entities(
mix_tag_perf = Trade.session.execute(
select(
Trade.id,
Trade.enter_tag,
Trade.exit_reason,
func.sum(Trade.close_profit).label('profit_sum'),
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
func.count(Trade.pair).label('count')
).filter(*filters)\
.group_by(Trade.id) \
.order_by(desc('profit_sum_abs')) \
.all()
).filter(*filters)
.group_by(Trade.id)
.order_by(desc('profit_sum_abs'))
).all()
return_list: List[Dict] = []
for id, enter_tag, exit_reason, profit, profit_abs, count in mix_tag_perf:
@ -1563,11 +1596,15 @@ class Trade(ModelBase, LocalTrade):
NOTE: Not supported in Backtesting.
:returns: Tuple containing (pair, profit_sum)
"""
best_pair = Trade.query.with_entities(
Trade.pair, func.sum(Trade.close_profit).label('profit_sum')
).filter(Trade.is_open.is_(False) & (Trade.close_date >= start_date)) \
.group_by(Trade.pair) \
.order_by(desc('profit_sum')).first()
best_pair = Trade.session.execute(
select(
Trade.pair,
func.sum(Trade.close_profit).label('profit_sum')
).filter(Trade.is_open.is_(False) & (Trade.close_date >= start_date))
.group_by(Trade.pair)
.order_by(desc('profit_sum'))
).first()
return best_pair
@staticmethod
@ -1577,12 +1614,13 @@ class Trade(ModelBase, LocalTrade):
NOTE: Not supported in Backtesting.
:returns: Tuple containing (pair, profit_sum)
"""
trading_volume = Order.query.with_entities(
trading_volume = Trade.session.execute(
select(
func.sum(Order.cost).label('volume')
).filter(
Order.order_filled_date >= start_date,
Order.status == 'closed'
).scalar()
)).scalar_one()
return trading_volume
@staticmethod
@ -1631,8 +1669,10 @@ class Trade(ModelBase, LocalTrade):
stop_loss=data["stop_loss_abs"],
stop_loss_pct=data["stop_loss_ratio"],
stoploss_order_id=data["stoploss_order_id"],
stoploss_last_update=(datetime.fromtimestamp(data["stoploss_last_update"] // 1000,
tz=timezone.utc) if data["stoploss_last_update"] else None),
stoploss_last_update=(
datetime.fromtimestamp(data["stoploss_last_update_timestamp"] // 1000,
tz=timezone.utc)
if data["stoploss_last_update_timestamp"] else None),
initial_stop_loss=data["initial_stop_loss_abs"],
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
min_rate=data["min_rate"],

View File

@ -1,4 +1,5 @@
import logging
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, List, Optional
@ -635,7 +636,7 @@ def load_and_plot_trades(config: Config):
exchange = ExchangeResolver.load_exchange(config['exchange']['name'], config)
IStrategy.dp = DataProvider(config, exchange)
strategy.ft_bot_start()
strategy.bot_loop_start()
strategy.bot_loop_start(datetime.now(timezone.utc))
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
timerange = plot_elements['timerange']
trades = plot_elements['trades']

View File

@ -6,6 +6,7 @@ from typing import Any, Dict, Optional
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import ROUND_UP
from freqtrade.exchange.types import Ticker
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -61,9 +62,10 @@ class PrecisionFilter(IPairList):
stop_price = ticker['last'] * self._stoploss
# Adjust stop-prices to precision
sp = self._exchange.price_to_precision(pair, stop_price)
sp = self._exchange.price_to_precision(pair, stop_price, rounding_mode=ROUND_UP)
stop_gap_price = self._exchange.price_to_precision(pair, stop_price * 0.99)
stop_gap_price = self._exchange.price_to_precision(pair, stop_price * 0.99,
rounding_mode=ROUND_UP)
logger.debug(f"{pair} - {sp} : {stop_gap_price}")
if sp <= stop_gap_price:

View File

@ -5,6 +5,7 @@ import logging
from typing import Any, Dict, Optional
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.exchange.types import Ticker
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -22,6 +23,12 @@ class SpreadFilter(IPairList):
self._max_spread_ratio = pairlistconfig.get('max_spread_ratio', 0.005)
self._enabled = self._max_spread_ratio != 0
if not self._exchange.get_option('tickers_have_bid_ask'):
raise OperationalException(
f"{self.name} requires exchange to have bid/ask data for tickers, "
"which is not available for the selected exchange / trading mode."
)
@property
def needstickers(self) -> bool:
"""

View File

@ -276,6 +276,10 @@ class TradeSchema(BaseModel):
funding_fees: Optional[float]
trading_mode: Optional[TradingMode]
amount_precision: Optional[float]
price_precision: Optional[float]
precision_mode: Optional[int]
class OpenTradeSchema(TradeSchema):
stoploss_current_dist: Optional[float]
@ -286,6 +290,7 @@ class OpenTradeSchema(TradeSchema):
current_rate: float
total_profit_abs: float
total_profit_fiat: Optional[float]
total_profit_ratio: Optional[float]
open_order: Optional[str]
@ -310,7 +315,7 @@ class LockModel(BaseModel):
lock_timestamp: int
pair: str
side: str
reason: str
reason: Optional[str]
class Locks(BaseModel):

View File

@ -42,7 +42,8 @@ logger = logging.getLogger(__name__)
# 2.22: Add FreqAI to backtesting
# 2.23: Allow plot config request in webserver mode
# 2.24: Add cancel_open_order endpoint
API_VERSION = 2.24
# 2.25: Add several profit values to /status endpoint
API_VERSION = 2.25
# Public API, requires no auth.
router_public = APIRouter()

View File

@ -1,9 +1,11 @@
from typing import Any, Dict, Iterator, Optional
from typing import Any, AsyncIterator, Dict, Optional
from uuid import uuid4
from fastapi import Depends
from freqtrade.enums import RunMode
from freqtrade.persistence import Trade
from freqtrade.persistence.models import _request_id_ctx_var
from freqtrade.rpc.rpc import RPC, RPCException
from .webserver import ApiServer
@ -15,12 +17,19 @@ def get_rpc_optional() -> Optional[RPC]:
return None
def get_rpc() -> Optional[Iterator[RPC]]:
async def get_rpc() -> Optional[AsyncIterator[RPC]]:
_rpc = get_rpc_optional()
if _rpc:
request_id = str(uuid4())
ctx_token = _request_id_ctx_var.set(request_id)
Trade.rollback()
try:
yield _rpc
Trade.rollback()
finally:
Trade.session.remove()
_request_id_ctx_var.reset(ctx_token)
else:
raise RPCException('Bot is not in the correct state')

View File

@ -55,7 +55,7 @@ class UvicornServer(uvicorn.Server):
@contextlib.contextmanager
def run_in_thread(self):
self.thread = threading.Thread(target=self.run)
self.thread = threading.Thread(target=self.run, name='FTUvicorn')
self.thread.start()
while not self.started:
time.sleep(1e-3)

View File

@ -13,6 +13,7 @@ from freqtrade.exceptions import OperationalException
from freqtrade.rpc.api_server.uvicorn_threaded import UvicornServer
from freqtrade.rpc.api_server.ws.message_stream import MessageStream
from freqtrade.rpc.rpc import RPC, RPCException, RPCHandler
from freqtrade.rpc.rpc_types import RPCSendMsg
logger = logging.getLogger(__name__)
@ -108,7 +109,7 @@ class ApiServer(RPCHandler):
cls._has_rpc = False
cls._rpc = None
def send_msg(self, msg: Dict[str, Any]) -> None:
def send_msg(self, msg: RPCSendMsg) -> None:
"""
Publish the message to the message stream
"""

View File

@ -5,7 +5,7 @@ import logging
from abc import abstractmethod
from datetime import date, datetime, timedelta, timezone
from math import isnan
from typing import Any, Dict, Generator, List, Optional, Tuple, Union
from typing import Any, Dict, Generator, List, Optional, Sequence, Tuple, Union
import arrow
import psutil
@ -13,6 +13,7 @@ from dateutil.relativedelta import relativedelta
from dateutil.tz import tzlocal
from numpy import NAN, inf, int64, mean
from pandas import DataFrame, NaT
from sqlalchemy import func, select
from freqtrade import __version__
from freqtrade.configuration.timerange import TimeRange
@ -29,6 +30,7 @@ from freqtrade.persistence import Order, PairLocks, Trade
from freqtrade.persistence.models import PairLock
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
from freqtrade.rpc.rpc_types import RPCSendMsg
from freqtrade.wallets import PositionWallet, Wallet
@ -78,7 +80,7 @@ class RPCHandler:
""" Cleanup pending module resources """
@abstractmethod
def send_msg(self, msg: Dict[str, str]) -> None:
def send_msg(self, msg: RPCSendMsg) -> None:
""" Sends a message to all registered rpc modules """
@ -122,7 +124,8 @@ class RPC:
if config['max_open_trades'] != float('inf') else -1),
'minimal_roi': config['minimal_roi'].copy() if 'minimal_roi' in config else {},
'stoploss': config.get('stoploss'),
'stoploss_on_exchange': config.get('stoploss_on_exchange', False),
'stoploss_on_exchange': config.get('order_types',
{}).get('stoploss_on_exchange', False),
'trailing_stop': config.get('trailing_stop'),
'trailing_stop_positive': config.get('trailing_stop_positive'),
'trailing_stop_positive_offset': config.get('trailing_stop_positive_offset'),
@ -158,7 +161,7 @@ class RPC:
"""
# Fetch open trades
if trade_ids:
trades: List[Trade] = Trade.get_trades(trade_filter=Trade.id.in_(trade_ids)).all()
trades: Sequence[Trade] = Trade.get_trades(trade_filter=Trade.id.in_(trade_ids)).all()
else:
trades = Trade.get_open_trades()
@ -192,6 +195,11 @@ class RPC:
current_profit = trade.close_profit or 0.0
current_profit_abs = trade.close_profit_abs or 0.0
total_profit_abs = trade.realized_profit + current_profit_abs
total_profit_ratio: Optional[float] = None
if trade.max_stake_amount:
total_profit_ratio = (
(total_profit_abs / trade.max_stake_amount) * trade.leverage
)
# Calculate fiat profit
if not isnan(current_profit_abs) and self._fiat_converter:
@ -224,6 +232,7 @@ class RPC:
total_profit_abs=total_profit_abs,
total_profit_fiat=total_profit_fiat,
total_profit_ratio=total_profit_ratio,
stoploss_current_dist=stoploss_current_dist,
stoploss_current_dist_ratio=round(stoploss_current_dist_ratio, 8),
stoploss_current_dist_pct=round(stoploss_current_dist_ratio * 100, 2),
@ -333,11 +342,13 @@ class RPC:
for day in range(0, timescale):
profitday = start_date - time_offset(day)
# Only query for necessary columns for performance reasons.
trades = Trade.query.session.query(Trade.close_profit_abs).filter(
Trade.is_open.is_(False),
trades = Trade.session.execute(
select(Trade.close_profit_abs)
.filter(Trade.is_open.is_(False),
Trade.close_date >= profitday,
Trade.close_date < (profitday + time_offset(1))
).order_by(Trade.close_date).all()
Trade.close_date < (profitday + time_offset(1)))
.order_by(Trade.close_date)
).all()
curdayprofit = sum(
trade.close_profit_abs for trade in trades if trade.close_profit_abs is not None)
@ -375,19 +386,25 @@ class RPC:
""" Returns the X last trades """
order_by: Any = Trade.id if order_by_id else Trade.close_date.desc()
if limit:
trades = Trade.get_trades([Trade.is_open.is_(False)]).order_by(
order_by).limit(limit).offset(offset)
trades = Trade.session.scalars(
Trade.get_trades_query([Trade.is_open.is_(False)])
.order_by(order_by)
.limit(limit)
.offset(offset))
else:
trades = Trade.get_trades([Trade.is_open.is_(False)]).order_by(
Trade.close_date.desc())
trades = Trade.session.scalars(
Trade.get_trades_query([Trade.is_open.is_(False)])
.order_by(Trade.close_date.desc()))
output = [trade.to_json() for trade in trades]
total_trades = Trade.session.scalar(
select(func.count(Trade.id)).filter(Trade.is_open.is_(False)))
return {
"trades": output,
"trades_count": len(output),
"offset": offset,
"total_trades": Trade.get_trades([Trade.is_open.is_(False)]).count(),
"total_trades": total_trades,
}
def _rpc_stats(self) -> Dict[str, Any]:
@ -429,8 +446,8 @@ class RPC:
""" Returns cumulative profit statistics """
trade_filter = ((Trade.is_open.is_(False) & (Trade.close_date >= start_date)) |
Trade.is_open.is_(True))
trades: List[Trade] = Trade.get_trades(
trade_filter, include_orders=False).order_by(Trade.id).all()
trades: Sequence[Trade] = Trade.session.scalars(Trade.get_trades_query(
trade_filter, include_orders=False).order_by(Trade.id)).all()
profit_all_coin = []
profit_all_ratio = []
@ -939,12 +956,12 @@ class RPC:
def _rpc_delete_lock(self, lockid: Optional[int] = None,
pair: Optional[str] = None) -> Dict[str, Any]:
""" Delete specific lock(s) """
locks = []
locks: Sequence[PairLock] = []
if pair:
locks = PairLocks.get_pair_locks(pair)
if lockid:
locks = PairLock.query.filter(PairLock.id == lockid).all()
locks = PairLock.session.scalars(select(PairLock).filter(PairLock.id == lockid)).all()
for lock in locks:
lock.active = False

View File

@ -3,11 +3,12 @@ This module contains class to manage RPC communications (Telegram, API, ...)
"""
import logging
from collections import deque
from typing import Any, Dict, List
from typing import List
from freqtrade.constants import Config
from freqtrade.enums import NO_ECHO_MESSAGES, RPCMessageType
from freqtrade.rpc import RPC, RPCHandler
from freqtrade.rpc.rpc_types import RPCSendMsg
logger = logging.getLogger(__name__)
@ -58,7 +59,7 @@ class RPCManager:
mod.cleanup()
del mod
def send_msg(self, msg: Dict[str, Any]) -> None:
def send_msg(self, msg: RPCSendMsg) -> None:
"""
Send given message to all registered rpc modules.
A message consists of one or more key value pairs of strings.
@ -69,10 +70,6 @@ class RPCManager:
"""
if msg.get('type') not in NO_ECHO_MESSAGES:
logger.info('Sending rpc message: %s', msg)
if 'pair' in msg:
msg.update({
'base_currency': self._rpc._freqtrade.exchange.get_pair_base_currency(msg['pair'])
})
for mod in self.registered_modules:
logger.debug('Forwarding message to rpc.%s', mod.name)
try:

128
freqtrade/rpc/rpc_types.py Normal file
View File

@ -0,0 +1,128 @@
from datetime import datetime
from typing import Any, List, Literal, Optional, TypedDict, Union
from freqtrade.constants import PairWithTimeframe
from freqtrade.enums import RPCMessageType
class RPCSendMsgBase(TypedDict):
pass
# ty1pe: Literal[RPCMessageType]
class RPCStatusMsg(RPCSendMsgBase):
"""Used for Status, Startup and Warning messages"""
type: Literal[RPCMessageType.STATUS, RPCMessageType.STARTUP, RPCMessageType.WARNING]
status: str
class RPCStrategyMsg(RPCSendMsgBase):
"""Used for Status, Startup and Warning messages"""
type: Literal[RPCMessageType.STRATEGY_MSG]
msg: str
class RPCProtectionMsg(RPCSendMsgBase):
type: Literal[RPCMessageType.PROTECTION_TRIGGER, RPCMessageType.PROTECTION_TRIGGER_GLOBAL]
id: int
pair: str
base_currency: Optional[str]
lock_time: str
lock_timestamp: int
lock_end_time: str
lock_end_timestamp: int
reason: str
side: str
active: bool
class RPCWhitelistMsg(RPCSendMsgBase):
type: Literal[RPCMessageType.WHITELIST]
data: List[str]
class __RPCBuyMsgBase(RPCSendMsgBase):
trade_id: int
buy_tag: Optional[str]
enter_tag: Optional[str]
exchange: str
pair: str
base_currency: str
leverage: Optional[float]
direction: str
limit: float
open_rate: float
order_type: str
stake_amount: float
stake_currency: str
fiat_currency: Optional[str]
amount: float
open_date: datetime
current_rate: Optional[float]
sub_trade: bool
class RPCBuyMsg(__RPCBuyMsgBase):
type: Literal[RPCMessageType.ENTRY, RPCMessageType.ENTRY_FILL]
class RPCCancelMsg(__RPCBuyMsgBase):
type: Literal[RPCMessageType.ENTRY_CANCEL]
reason: str
class RPCSellMsg(__RPCBuyMsgBase):
type: Literal[RPCMessageType.EXIT, RPCMessageType.EXIT_FILL]
cumulative_profit: float
gain: str # Literal["profit", "loss"]
close_rate: float
profit_amount: float
profit_ratio: float
sell_reason: Optional[str]
exit_reason: Optional[str]
close_date: datetime
# current_rate: Optional[float]
order_rate: Optional[float]
class RPCSellCancelMsg(__RPCBuyMsgBase):
type: Literal[RPCMessageType.EXIT_CANCEL]
reason: str
gain: str # Literal["profit", "loss"]
profit_amount: float
profit_ratio: float
sell_reason: Optional[str]
exit_reason: Optional[str]
close_date: datetime
class _AnalyzedDFData(TypedDict):
key: PairWithTimeframe
df: Any
la: datetime
class RPCAnalyzedDFMsg(RPCSendMsgBase):
"""New Analyzed dataframe message"""
type: Literal[RPCMessageType.ANALYZED_DF]
data: _AnalyzedDFData
class RPCNewCandleMsg(RPCSendMsgBase):
"""New candle ping message, issued once per new candle/pair"""
type: Literal[RPCMessageType.NEW_CANDLE]
data: PairWithTimeframe
RPCSendMsg = Union[
RPCStatusMsg,
RPCStrategyMsg,
RPCProtectionMsg,
RPCWhitelistMsg,
RPCBuyMsg,
RPCCancelMsg,
RPCSellMsg,
RPCSellCancelMsg,
RPCAnalyzedDFMsg,
RPCNewCandleMsg
]

View File

@ -30,6 +30,7 @@ from freqtrade.exceptions import OperationalException
from freqtrade.misc import chunks, plural, round_coin_value
from freqtrade.persistence import Trade
from freqtrade.rpc import RPC, RPCException, RPCHandler
from freqtrade.rpc.rpc_types import RPCSendMsg
logger = logging.getLogger(__name__)
@ -83,6 +84,8 @@ def authorized_only(command_handler: Callable[..., None]) -> Callable[..., Any]:
self._send_msg(str(e))
except BaseException:
logger.exception('Exception occurred within Telegram module')
finally:
Trade.session.remove()
return wrapper
@ -414,6 +417,9 @@ class Telegram(RPCHandler):
elif msg_type == RPCMessageType.WARNING:
message = f"\N{WARNING SIGN} *Warning:* `{msg['status']}`"
elif msg_type == RPCMessageType.EXCEPTION:
# Errors will contain exceptions, which are wrapped in tripple ticks.
message = f"\N{WARNING SIGN} *ERROR:* \n {msg['status']}"
elif msg_type == RPCMessageType.STARTUP:
message = f"{msg['status']}"
@ -424,14 +430,14 @@ class Telegram(RPCHandler):
return None
return message
def send_msg(self, msg: Dict[str, Any]) -> None:
def send_msg(self, msg: RPCSendMsg) -> None:
""" Send a message to telegram channel """
default_noti = 'on'
msg_type = msg['type']
noti = ''
if msg_type == RPCMessageType.EXIT:
if msg['type'] == RPCMessageType.EXIT:
sell_noti = self._config['telegram'] \
.get('notification_settings', {}).get(str(msg_type), {})
# For backward compatibility sell still can be string
@ -448,7 +454,7 @@ class Telegram(RPCHandler):
# Notification disabled
return
message = self.compose_message(deepcopy(msg), msg_type)
message = self.compose_message(deepcopy(msg), msg_type) # type: ignore
if message:
self._send_msg(message, disable_notification=(noti == 'silent'))
@ -510,14 +516,14 @@ class Telegram(RPCHandler):
if prev_avg_price:
minus_on_entry = (cur_entry_average - prev_avg_price) / prev_avg_price
lines.append(f"*{wording} #{order_nr}:* at {minus_on_entry:.2%} avg profit")
lines.append(f"*{wording} #{order_nr}:* at {minus_on_entry:.2%} avg Profit")
if is_open:
lines.append("({})".format(cur_entry_datetime
.humanize(granularity=["day", "hour", "minute"])))
lines.append(f"*Amount:* {cur_entry_amount} "
f"({round_coin_value(order['cost'], quote_currency)})")
lines.append(f"*Average {wording} Price:* {cur_entry_average} "
f"({price_to_1st_entry:.2%} from 1st entry rate)")
f"({price_to_1st_entry:.2%} from 1st entry Rate)")
lines.append(f"*Order filled:* {order['order_filled_date']}")
# TODO: is this really useful?
@ -569,6 +575,8 @@ class Telegram(RPCHandler):
and not o['ft_order_side'] == 'stoploss'])
r['exit_reason'] = r.get('exit_reason', "")
r['stake_amount_r'] = round_coin_value(r['stake_amount'], r['quote_currency'])
r['max_stake_amount_r'] = round_coin_value(
r['max_stake_amount'] or r['stake_amount'], r['quote_currency'])
r['profit_abs_r'] = round_coin_value(r['profit_abs'], r['quote_currency'])
r['realized_profit_r'] = round_coin_value(r['realized_profit'], r['quote_currency'])
r['total_profit_abs_r'] = round_coin_value(
@ -580,31 +588,37 @@ class Telegram(RPCHandler):
f"*Direction:* {'`Short`' if r.get('is_short') else '`Long`'}"
+ " ` ({leverage}x)`" if r.get('leverage') else "",
"*Amount:* `{amount} ({stake_amount_r})`",
"*Total invested:* `{max_stake_amount_r}`" if position_adjust else "",
"*Enter Tag:* `{enter_tag}`" if r['enter_tag'] else "",
"*Exit Reason:* `{exit_reason}`" if r['exit_reason'] else "",
]
if position_adjust:
max_buy_str = (f"/{max_entries + 1}" if (max_entries > 0) else "")
lines.append("*Number of Entries:* `{num_entries}" + max_buy_str + "`")
lines.append("*Number of Exits:* `{num_exits}`")
lines.extend([
"*Number of Entries:* `{num_entries}" + max_buy_str + "`",
"*Number of Exits:* `{num_exits}`"
])
lines.extend([
"*Open Rate:* `{open_rate:.8f}`",
"*Close Rate:* `{close_rate:.8f}`" if r['close_rate'] else "",
"*Open Date:* `{open_date}`",
"*Close Date:* `{close_date}`" if r['close_date'] else "",
"*Current Rate:* `{current_rate:.8f}`" if r['is_open'] else "",
" \n*Current Rate:* `{current_rate:.8f}`" if r['is_open'] else "",
("*Unrealized Profit:* " if r['is_open'] else "*Close Profit: *")
+ "`{profit_ratio:.2%}` `({profit_abs_r})`",
])
if r['is_open']:
if r.get('realized_profit'):
lines.append(
"*Realized Profit:* `{realized_profit_r} {realized_profit_ratio:.2%}`")
lines.append("*Total Profit:* `{total_profit_abs_r}` ")
lines.extend([
"*Realized Profit:* `{realized_profit_ratio:.2%} ({realized_profit_r})`",
"*Total Profit:* `{total_profit_ratio:.2%} ({total_profit_abs_r})`"
])
# Append empty line to improve readability
lines.append(" ")
if (r['stop_loss_abs'] != r['initial_stop_loss_abs']
and r['initial_stop_loss_ratio'] is not None):
# Adding initial stoploss only if it is different from stoploss
@ -1329,7 +1343,7 @@ class Telegram(RPCHandler):
message = tabulate({k: [v] for k, v in counts.items()},
headers=['current', 'max', 'total stake'],
tablefmt='simple')
message = "<pre>{}</pre>".format(message)
message = f"<pre>{message}</pre>"
logger.debug(message)
self._send_msg(message, parse_mode=ParseMode.HTML,
reload_able=True, callback_path="update_count",
@ -1631,7 +1645,7 @@ class Telegram(RPCHandler):
])
else:
reply_markup = InlineKeyboardMarkup([[]])
msg += "\nUpdated: {}".format(datetime.now().ctime())
msg += f"\nUpdated: {datetime.now().ctime()}"
if not query.message:
return
chat_id = query.message.chat_id

View File

@ -10,6 +10,7 @@ from requests import RequestException, post
from freqtrade.constants import Config
from freqtrade.enums import RPCMessageType
from freqtrade.rpc import RPC, RPCHandler
from freqtrade.rpc.rpc_types import RPCSendMsg
logger = logging.getLogger(__name__)
@ -41,7 +42,7 @@ class Webhook(RPCHandler):
"""
pass
def _get_value_dict(self, msg: Dict[str, Any]) -> Optional[Dict[str, Any]]:
def _get_value_dict(self, msg: RPCSendMsg) -> Optional[Dict[str, Any]]:
whconfig = self._config['webhook']
# Deprecated 2022.10 - only keep generic method.
if msg['type'] in [RPCMessageType.ENTRY]:
@ -58,6 +59,7 @@ class Webhook(RPCHandler):
valuedict = whconfig.get('webhookexitcancel')
elif msg['type'] in (RPCMessageType.STATUS,
RPCMessageType.STARTUP,
RPCMessageType.EXCEPTION,
RPCMessageType.WARNING):
valuedict = whconfig.get('webhookstatus')
elif msg['type'].value in whconfig:
@ -74,7 +76,7 @@ class Webhook(RPCHandler):
return None
return valuedict
def send_msg(self, msg: Dict[str, Any]) -> None:
def send_msg(self, msg: RPCSendMsg) -> None:
""" Send a message to telegram channel """
try:
@ -112,7 +114,7 @@ class Webhook(RPCHandler):
response = post(self._url, data=payload['data'],
headers={'Content-Type': 'text/plain'})
else:
raise NotImplementedError('Unknown format: {}'.format(self._format))
raise NotImplementedError(f'Unknown format: {self._format}')
# Throw a RequestException if the post was not successful
response.raise_for_status()

View File

@ -8,7 +8,7 @@ from typing import Any, Dict, Iterator, List, Optional, Tuple, Type, Union
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.misc import deep_merge_dicts, json_load
from freqtrade.misc import deep_merge_dicts
from freqtrade.optimize.hyperopt_tools import HyperoptTools
from freqtrade.strategy.parameters import BaseParameter
@ -124,8 +124,7 @@ class HyperStrategyMixin:
if filename.is_file():
logger.info(f"Loading parameters from file {filename}")
try:
with filename.open('r') as f:
params = json_load(f)
params = HyperoptTools.load_params(filename)
if params.get('strategy_name') != self.__class__.__name__:
raise OperationalException('Invalid parameter file provided.')
return params

View File

@ -251,11 +251,12 @@ class IStrategy(ABC, HyperStrategyMixin):
"""
pass
def bot_loop_start(self, **kwargs) -> None:
def bot_loop_start(self, current_time: datetime, **kwargs) -> None:
"""
Called at the start of the bot iteration (one loop).
Might be used to perform pair-independent tasks
(e.g. gather some remote resource for comparison)
:param current_time: datetime object, containing the current datetime
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
"""
pass

View File

@ -86,37 +86,41 @@ def merge_informative_pair(dataframe: pd.DataFrame, informative: pd.DataFrame,
def stoploss_from_open(
open_relative_stop: float,
current_profit: float,
is_short: bool = False
is_short: bool = False,
leverage: float = 1.0
) -> float:
"""
Given the current profit, and a desired stop loss value relative to the open price,
Given the current profit, and a desired stop loss value relative to the trade entry price,
return a stop loss value that is relative to the current price, and which can be
returned from `custom_stoploss`.
The requested stop can be positive for a stop above the open price, or negative for
a stop below the open price. The return value is always >= 0.
`open_relative_stop` will be considered as adjusted for leverage if leverage is provided..
Returns 0 if the resulting stop price would be above/below (longs/shorts) the current price
:param open_relative_stop: Desired stop loss percentage relative to open price
:param open_relative_stop: Desired stop loss percentage, relative to the open price,
adjusted for leverage
:param current_profit: The current profit percentage
:param is_short: When true, perform the calculation for short instead of long
:param leverage: Leverage to use for the calculation
:return: Stop loss value relative to current price
"""
# formula is undefined for current_profit -1 (longs) or 1 (shorts), return maximum value
if (current_profit == -1 and not is_short) or (is_short and current_profit == 1):
_current_profit = current_profit / leverage
if (_current_profit == -1 and not is_short) or (is_short and _current_profit == 1):
return 1
if is_short is True:
stoploss = -1 + ((1 - open_relative_stop) / (1 - current_profit))
stoploss = -1 + ((1 - open_relative_stop / leverage) / (1 - _current_profit))
else:
stoploss = 1 - ((1 + open_relative_stop) / (1 + current_profit))
stoploss = 1 - ((1 + open_relative_stop / leverage) / (1 + _current_profit))
# negative stoploss values indicate the requested stop price is higher/lower
# (long/short) than the current price
return max(stoploss, 0.0)
return max(stoploss * leverage, 0.0)
def stoploss_from_absolute(stop_rate: float, current_rate: float, is_short: bool = False) -> float:

View File

@ -0,0 +1,255 @@
import shutil
from pathlib import Path
import ast_comments
from freqtrade.constants import Config
class StrategyUpdater:
name_mapping = {
'ticker_interval': 'timeframe',
'buy': 'enter_long',
'sell': 'exit_long',
'buy_tag': 'enter_tag',
'sell_reason': 'exit_reason',
'sell_signal': 'exit_signal',
'custom_sell': 'custom_exit',
'force_sell': 'force_exit',
'emergency_sell': 'emergency_exit',
# Strategy/config settings:
'use_sell_signal': 'use_exit_signal',
'sell_profit_only': 'exit_profit_only',
'sell_profit_offset': 'exit_profit_offset',
'ignore_roi_if_buy_signal': 'ignore_roi_if_entry_signal',
'forcebuy_enable': 'force_entry_enable',
}
function_mapping = {
'populate_buy_trend': 'populate_entry_trend',
'populate_sell_trend': 'populate_exit_trend',
'custom_sell': 'custom_exit',
'check_buy_timeout': 'check_entry_timeout',
'check_sell_timeout': 'check_exit_timeout',
# '': '',
}
# order_time_in_force, order_types, unfilledtimeout
otif_ot_unfilledtimeout = {
'buy': 'entry',
'sell': 'exit',
}
# create a dictionary that maps the old column names to the new ones
rename_dict = {'buy': 'enter_long', 'sell': 'exit_long', 'buy_tag': 'enter_tag'}
def start(self, config: Config, strategy_obj: dict) -> None:
"""
Run strategy updater
It updates a strategy to v3 with the help of the ast-module
:return: None
"""
source_file = strategy_obj['location']
strategies_backup_folder = Path.joinpath(config['user_data_dir'], "strategies_orig_updater")
target_file = Path.joinpath(strategies_backup_folder, strategy_obj['location_rel'])
# read the file
with Path(source_file).open('r') as f:
old_code = f.read()
if not strategies_backup_folder.is_dir():
Path(strategies_backup_folder).mkdir(parents=True, exist_ok=True)
# backup original
# => currently no date after the filename,
# could get overridden pretty fast if this is fired twice!
# The folder is always the same and the file name too (currently).
shutil.copy(source_file, target_file)
# update the code
new_code = self.update_code(old_code)
# write the modified code to the destination folder
with Path(source_file).open('w') as f:
f.write(new_code)
# define the function to update the code
def update_code(self, code):
# parse the code into an AST
tree = ast_comments.parse(code)
# use the AST to update the code
updated_code = self.modify_ast(tree)
# return the modified code without executing it
return updated_code
# function that uses the ast module to update the code
def modify_ast(self, tree): # noqa
# use the visitor to update the names and functions in the AST
NameUpdater().visit(tree)
# first fix the comments, so it understands "\n" properly inside multi line comments.
ast_comments.fix_missing_locations(tree)
ast_comments.increment_lineno(tree, n=1)
# generate the new code from the updated AST
# without indent {} parameters would just be written straight one after the other.
# ast_comments would be amazing since this is the only solution that carries over comments,
# but it does currently not have an unparse function, hopefully in the future ... !
# return ast_comments.unparse(tree)
return ast_comments.unparse(tree)
# Here we go through each respective node, slice, elt, key ... to replace outdated entries.
class NameUpdater(ast_comments.NodeTransformer):
def generic_visit(self, node):
# space is not yet transferred from buy/sell to entry/exit and thereby has to be skipped.
if isinstance(node, ast_comments.keyword):
if node.arg == "space":
return node
# from here on this is the original function.
for field, old_value in ast_comments.iter_fields(node):
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, ast_comments.AST):
value = self.visit(value)
if value is None:
continue
elif not isinstance(value, ast_comments.AST):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, ast_comments.AST):
new_node = self.visit(old_value)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
def visit_Expr(self, node):
if hasattr(node.value, "left") and hasattr(node.value.left, "id"):
node.value.left.id = self.check_dict(StrategyUpdater.name_mapping, node.value.left.id)
self.visit(node.value)
return node
# Renames an element if contained inside a dictionary.
@staticmethod
def check_dict(current_dict: dict, element: str):
if element in current_dict:
element = current_dict[element]
return element
def visit_arguments(self, node):
if isinstance(node.args, list):
for arg in node.args:
arg.arg = self.check_dict(StrategyUpdater.name_mapping, arg.arg)
return node
def visit_Name(self, node):
# if the name is in the mapping, update it
node.id = self.check_dict(StrategyUpdater.name_mapping, node.id)
return node
def visit_Import(self, node):
# do not update the names in import statements
return node
def visit_ImportFrom(self, node):
# if hasattr(node, "module"):
# if node.module == "freqtrade.strategy.hyper":
# node.module = "freqtrade.strategy"
return node
def visit_If(self, node: ast_comments.If):
for child in ast_comments.iter_child_nodes(node):
self.visit(child)
return node
def visit_FunctionDef(self, node):
node.name = self.check_dict(StrategyUpdater.function_mapping, node.name)
self.generic_visit(node)
return node
def visit_Attribute(self, node):
if (
isinstance(node.value, ast_comments.Name)
and node.value.id == 'trade'
and node.attr == 'nr_of_successful_buys'
):
node.attr = 'nr_of_successful_entries'
return node
def visit_ClassDef(self, node):
# check if the class is derived from IStrategy
if any(isinstance(base, ast_comments.Name) and
base.id == 'IStrategy' for base in node.bases):
# check if the INTERFACE_VERSION variable exists
has_interface_version = any(
isinstance(child, ast_comments.Assign) and
isinstance(child.targets[0], ast_comments.Name) and
child.targets[0].id == 'INTERFACE_VERSION'
for child in node.body
)
# if the INTERFACE_VERSION variable does not exist, add it as the first child
if not has_interface_version:
node.body.insert(0, ast_comments.parse('INTERFACE_VERSION = 3').body[0])
# otherwise, update its value to 3
else:
for child in node.body:
if (
isinstance(child, ast_comments.Assign)
and isinstance(child.targets[0], ast_comments.Name)
and child.targets[0].id == 'INTERFACE_VERSION'
):
child.value = ast_comments.parse('3').body[0].value
self.generic_visit(node)
return node
def visit_Subscript(self, node):
if isinstance(node.slice, ast_comments.Constant):
if node.slice.value in StrategyUpdater.rename_dict:
# Replace the slice attributes with the values from rename_dict
node.slice.value = StrategyUpdater.rename_dict[node.slice.value]
if hasattr(node.slice, "elts"):
self.visit_elts(node.slice.elts)
if hasattr(node.slice, "value"):
if hasattr(node.slice.value, "elts"):
self.visit_elts(node.slice.value.elts)
return node
# elts can have elts (technically recursively)
def visit_elts(self, elts):
if isinstance(elts, list):
for elt in elts:
self.visit_elt(elt)
else:
self.visit_elt(elts)
return elts
# sub function again needed since the structure itself is highly flexible ...
def visit_elt(self, elt):
if isinstance(elt, ast_comments.Constant) and elt.value in StrategyUpdater.rename_dict:
elt.value = StrategyUpdater.rename_dict[elt.value]
if hasattr(elt, "elts"):
self.visit_elts(elt.elts)
if hasattr(elt, "args"):
if isinstance(elt.args, ast_comments.arguments):
self.visit_elts(elt.args)
else:
for arg in elt.args:
self.visit_elts(arg)
return elt
def visit_Constant(self, node):
node.value = self.check_dict(StrategyUpdater.otif_ot_unfilledtimeout, node.value)
node.value = self.check_dict(StrategyUpdater.name_mapping, node.value)
return node

View File

@ -1,5 +1,5 @@
def bot_loop_start(self, **kwargs) -> None:
def bot_loop_start(self, current_time: datetime, **kwargs) -> None:
"""
Called at the start of the bot iteration (one loop).
Might be used to perform pair-independent tasks
@ -8,6 +8,7 @@ def bot_loop_start(self, **kwargs) -> None:
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-advanced/
When not implemented by a strategy, this simply does nothing.
:param current_time: datetime object, containing the current datetime
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
"""
pass

View File

@ -1,6 +1,7 @@
import logging
from packaging import version
from sqlalchemy import select
from freqtrade.constants import Config
from freqtrade.enums.tradingmode import TradingMode
@ -44,7 +45,7 @@ def _migrate_binance_futures_db(config: Config):
# Should symbol be migrated too?
# order.symbol = new_pair
Trade.commit()
pls = PairLock.query.filter(PairLock.pair.notlike('%:%'))
pls = PairLock.session.scalars(select(PairLock).filter(PairLock.pair.notlike('%:%'))).all()
for pl in pls:
pl.pair = f"{pl.pair}:{config['stake_currency']}"
# print(pls)

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
#
# QTPyLib: Quantitative Trading Python Library
# https://github.com/ranaroussi/qtpylib
#
@ -18,7 +16,6 @@
# limitations under the License.
#
import sys
import warnings
from datetime import datetime, timedelta
@ -27,11 +24,6 @@ import pandas as pd
from pandas.core.base import PandasObject
# =============================================
# check min, python version
if sys.version_info < (3, 4):
raise SystemError("QTPyLib requires Python version >= 3.4")
# =============================================
warnings.simplefilter(action="ignore", category=RuntimeWarning)

View File

@ -12,7 +12,7 @@ import sdnotify
from freqtrade import __version__
from freqtrade.configuration import Configuration
from freqtrade.constants import PROCESS_THROTTLE_SECS, RETRY_TIMEOUT, Config
from freqtrade.enums import State
from freqtrade.enums import RPCMessageType, State
from freqtrade.exceptions import OperationalException, TemporaryError
from freqtrade.exchange import timeframe_to_next_date
from freqtrade.freqtradebot import FreqtradeBot
@ -185,7 +185,10 @@ class Worker:
tb = traceback.format_exc()
hint = 'Issue `/start` if you think it is safe to restart.'
self.freqtrade.notify_status(f'OperationalException:\n```\n{tb}```{hint}')
self.freqtrade.notify_status(
f'*OperationalException:*\n```\n{tb}```\n {hint}',
msg_type=RPCMessageType.EXCEPTION
)
logger.exception('OperationalException. Stopping trader ...')
self.freqtrade.state = State.STOPPED

View File

@ -1,3 +1,7 @@
[build-system]
requires = ["setuptools >= 46.4.0", "wheel"]
build-backend = "setuptools.build_meta"
[tool.black]
line-length = 100
exclude = '''
@ -48,10 +52,6 @@ ignore_errors = true
module = "telegram.*"
implicit_optional = true
[build-system]
requires = ["setuptools >= 46.4.0", "wheel"]
build-backend = "setuptools.build_meta"
[tool.pyright]
include = ["freqtrade"]
exclude = [
@ -68,10 +68,11 @@ target-version = "py38"
extend-select = [
"C90", # mccabe
# "N", # pep8-naming
# "UP", # pyupgrade
"UP", # pyupgrade
"TID", # flake8-tidy-imports
# "EXE", # flake8-executable
"YTT", # flake8-2020
# "S", # flake8-bandit
# "DTZ", # flake8-datetimez
# "RSE", # flake8-raise
# "TCH", # flake8-type-checking
@ -80,3 +81,6 @@ extend-select = [
[tool.ruff.mccabe]
max-complexity = 12
[tool.ruff.per-file-ignores]
"tests/*" = ["S"]

View File

@ -7,11 +7,11 @@
-r docs/requirements-docs.txt
coveralls==3.3.1
ruff==0.0.253
mypy==1.0.1
pre-commit==3.1.1
pytest==7.2.1
pytest-asyncio==0.20.3
ruff==0.0.260
mypy==1.1.1
pre-commit==3.2.1
pytest==7.2.2
pytest-asyncio==0.21.0
pytest-cov==4.0.0
pytest-mock==3.10.0
pytest-random-order==1.1.0
@ -22,11 +22,11 @@ time-machine==2.9.0
httpx==0.23.3
# Convert jupyter notebooks to markdown documents
nbconvert==7.2.9
nbconvert==7.2.10
# mypy types
types-cachetools==5.3.0.4
types-cachetools==5.3.0.5
types-filelock==3.2.7
types-requests==2.28.11.15
types-tabulate==0.9.0.1
types-python-dateutil==2.8.19.9
types-requests==2.28.11.17
types-tabulate==0.9.0.2
types-python-dateutil==2.8.19.11

View File

@ -5,7 +5,7 @@
# Required for freqai
scikit-learn==1.1.3
joblib==1.2.0
catboost==1.1.1; platform_machine != 'aarch64' and python_version < '3.11'
catboost==1.1.1; platform_machine != 'aarch64' and 'arm' not in platform_machine and python_version < '3.11'
lightgbm==3.3.5
xgboost==1.7.4
tensorboard==2.12.0
xgboost==1.7.5
tensorboard==2.12.1

View File

@ -5,5 +5,5 @@
scipy==1.10.1
scikit-learn==1.1.3
scikit-optimize==0.9.0
filelock==3.9.0
filelock==3.10.6
progressbar2==4.2.0

View File

@ -1,4 +1,4 @@
# Include all requirements to run the bot.
-r requirements.txt
plotly==5.13.1
plotly==5.14.0

View File

@ -2,15 +2,15 @@ numpy==1.24.2
pandas==1.5.3
pandas-ta==0.3.14b
ccxt==2.8.54
cryptography==39.0.1
ccxt==3.0.50
cryptography==40.0.1
aiohttp==3.8.4
SQLAlchemy==2.0.4
SQLAlchemy==2.0.8
python-telegram-bot==13.15
arrow==1.2.3
cachetools==4.2.2
requests==2.28.2
urllib3==1.26.14
urllib3==1.26.15
jsonschema==4.17.3
TA-Lib==0.4.25
technical==1.4.0
@ -26,17 +26,17 @@ pyarrow==11.0.0; platform_machine != 'armv7l'
py_find_1st==1.1.5
# Load ticker files 30% faster
python-rapidjson==1.9
python-rapidjson==1.10
# Properly format api responses
orjson==3.8.6
orjson==3.8.9
# Notify systemd
sdnotify==0.3.2
# API Server
fastapi==0.92.0
pydantic==1.10.5
uvicorn==0.20.0
fastapi==0.95.0
pydantic==1.10.7
uvicorn==0.21.1
pyjwt==2.6.0
aiofiles==23.1.0
psutil==5.9.4
@ -45,7 +45,7 @@ psutil==5.9.4
colorama==0.4.6
# Building config files interactively
questionary==1.10.0
prompt-toolkit==3.0.37
prompt-toolkit==3.0.38
# Extensions to datetime library
python-dateutil==2.8.2
@ -53,5 +53,7 @@ python-dateutil==2.8.2
schedule==1.1.0
#WS Messages
websockets==10.4
websockets==11.0
janus==1.0.0
ast-comments==1.0.1

View File

@ -340,11 +340,13 @@ class FtRestClient():
:param limit: Limit result to the last n candles.
:return: json object
"""
return self._get("pair_candles", params={
params = {
"pair": pair,
"timeframe": timeframe,
"limit": limit,
})
}
if limit:
params['limit'] = limit
return self._get("pair_candles", params=params)
def pair_history(self, pair, timeframe, strategy, timerange=None):
"""Return historic, analyzed dataframe

View File

@ -59,7 +59,7 @@ setup(
install_requires=[
# from requirements.txt
'ccxt>=2.6.26',
'SQLAlchemy',
'SQLAlchemy>=2.0.6',
'python-telegram-bot>=13.4',
'arrow>=0.17.0',
'cachetools',

View File

@ -14,7 +14,8 @@ from freqtrade.commands import (start_backtesting_show, start_convert_data, star
start_hyperopt_show, start_install_ui, start_list_data,
start_list_exchanges, start_list_markets, start_list_strategies,
start_list_timeframes, start_new_strategy, start_show_trades,
start_test_pairlist, start_trading, start_webserver)
start_strategy_update, start_test_pairlist, start_trading,
start_webserver)
from freqtrade.commands.db_commands import start_convert_db
from freqtrade.commands.deploy_commands import (clean_ui_subdir, download_and_install_ui,
get_ui_download_url, read_ui_version)
@ -1546,3 +1547,37 @@ def test_start_convert_db(mocker, fee, tmpdir, caplog):
start_convert_db(pargs)
assert db_target_file.is_file()
def test_start_strategy_updater(mocker, tmpdir):
sc_mock = mocker.patch('freqtrade.commands.strategy_utils_commands.start_conversion')
teststrats = Path(__file__).parent.parent / 'strategy/strats'
args = [
"strategy-updater",
"--userdir",
str(tmpdir),
"--strategy-path",
str(teststrats),
]
pargs = get_args(args)
pargs['config'] = None
start_strategy_update(pargs)
# Number of strategies in the test directory
assert sc_mock.call_count == 11
sc_mock.reset_mock()
args = [
"strategy-updater",
"--userdir",
str(tmpdir),
"--strategy-path",
str(teststrats),
"--strategy-list",
"StrategyTestV3",
"StrategyTestV2"
]
pargs = get_args(args)
pargs['config'] = None
start_strategy_update(pargs)
# Number of strategies in the test directory
assert sc_mock.call_count == 2

View File

@ -299,7 +299,7 @@ def create_mock_trades(fee, is_short: Optional[bool] = False, use_db: bool = Tru
"""
def add_trade(trade):
if use_db:
Trade.query.session.add(trade)
Trade.session.add(trade)
else:
LocalTrade.add_bt_trade(trade)
is_short1 = is_short if is_short is not None else True
@ -332,11 +332,11 @@ def create_mock_trades_with_leverage(fee, use_db: bool = True):
Create some fake trades ...
"""
if use_db:
Trade.query.session.rollback()
Trade.session.rollback()
def add_trade(trade):
if use_db:
Trade.query.session.add(trade)
Trade.session.add(trade)
else:
LocalTrade.add_bt_trade(trade)
@ -366,7 +366,7 @@ def create_mock_trades_with_leverage(fee, use_db: bool = True):
add_trade(trade)
if use_db:
Trade.query.session.flush()
Trade.session.flush()
def create_mock_trades_usdt(fee, is_short: Optional[bool] = False, use_db: bool = True):
@ -375,7 +375,7 @@ def create_mock_trades_usdt(fee, is_short: Optional[bool] = False, use_db: bool
"""
def add_trade(trade):
if use_db:
Trade.query.session.add(trade)
Trade.session.add(trade)
else:
LocalTrade.add_bt_trade(trade)

View File

@ -98,7 +98,7 @@ def test_load_backtest_data_new_format(testdatadir):
assert bt_data.equals(bt_data3)
with pytest.raises(ValueError, match=r"File .* does not exist\."):
load_backtest_data(str("filename") + "nofile")
load_backtest_data("filename" + "nofile")
with pytest.raises(ValueError, match=r"Unknown dataformat."):
load_backtest_data(testdatadir / "backtest_results" / LAST_BT_RESULT_FN)

View File

@ -252,7 +252,7 @@ def test_datahandler__check_empty_df(testdatadir, caplog):
assert log_has_re(expected_text, caplog)
@pytest.mark.parametrize('datahandler', ['feather', 'parquet'])
@pytest.mark.parametrize('datahandler', ['parquet'])
def test_datahandler_trades_not_supported(datahandler, testdatadir, ):
dh = get_datahandler(testdatadir, datahandler)
with pytest.raises(NotImplementedError):
@ -496,6 +496,58 @@ def test_hdf5datahandler_ohlcv_purge(mocker, testdatadir):
assert unlinkmock.call_count == 2
def test_featherdatahandler_trades_load(testdatadir):
dh = get_datahandler(testdatadir, 'feather')
trades = dh.trades_load('XRP/ETH')
assert isinstance(trades, list)
assert trades[0][0] == 1570752011620
assert trades[-1][-1] == 0.1986231
trades1 = dh.trades_load('UNITTEST/NONEXIST')
assert trades1 == []
def test_featherdatahandler_trades_store(testdatadir, tmpdir):
tmpdir1 = Path(tmpdir)
dh = get_datahandler(testdatadir, 'feather')
trades = dh.trades_load('XRP/ETH')
dh1 = get_datahandler(tmpdir1, 'feather')
dh1.trades_store('XRP/NEW', trades)
file = tmpdir1 / 'XRP_NEW-trades.feather'
assert file.is_file()
# Load trades back
trades_new = dh1.trades_load('XRP/NEW')
assert len(trades_new) == len(trades)
assert trades[0][0] == trades_new[0][0]
assert trades[0][1] == trades_new[0][1]
# assert trades[0][2] == trades_new[0][2] # This is nan - so comparison does not make sense
assert trades[0][3] == trades_new[0][3]
assert trades[0][4] == trades_new[0][4]
assert trades[0][5] == trades_new[0][5]
assert trades[0][6] == trades_new[0][6]
assert trades[-1][0] == trades_new[-1][0]
assert trades[-1][1] == trades_new[-1][1]
# assert trades[-1][2] == trades_new[-1][2] # This is nan - so comparison does not make sense
assert trades[-1][3] == trades_new[-1][3]
assert trades[-1][4] == trades_new[-1][4]
assert trades[-1][5] == trades_new[-1][5]
assert trades[-1][6] == trades_new[-1][6]
def test_featherdatahandler_trades_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
dh = get_datahandler(testdatadir, 'feather')
assert not dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 0
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
assert dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 1
def test_gethandlerclass():
cl = get_datahandlerclass('json')
assert cl == JsonDataHandler

View File

@ -409,7 +409,7 @@ def test_init_with_refresh(default_conf, mocker) -> None:
def test_file_dump_json_tofile(testdatadir) -> None:
file = testdatadir / 'test_{id}.json'.format(id=str(uuid.uuid4()))
file = testdatadir / f'test_{uuid.uuid4()}.json'
data = {'bar': 'foo'}
# check the file we will create does not exist

View File

@ -11,6 +11,19 @@ from tests.conftest import EXMS, get_mock_coro, get_patched_exchange, log_has_re
from tests.exchange.test_exchange import ccxt_exceptionhandlers
@pytest.mark.parametrize('side,type,time_in_force,expected', [
('buy', 'limit', 'gtc', {'timeInForce': 'GTC'}),
('buy', 'limit', 'IOC', {'timeInForce': 'IOC'}),
('buy', 'market', 'IOC', {}),
('buy', 'limit', 'PO', {'timeInForce': 'PO'}),
('sell', 'limit', 'PO', {'timeInForce': 'PO'}),
('sell', 'market', 'PO', {}),
])
def test__get_params_binance(default_conf, mocker, side, type, time_in_force, expected):
exchange = get_patched_exchange(mocker, default_conf, id='binance')
assert exchange._get_params(side, type, 1, False, time_in_force) == expected
@pytest.mark.parametrize('trademode', [TradingMode.FUTURES, TradingMode.SPOT])
@pytest.mark.parametrize('limitratio,expected,side', [
(None, 220 * 0.99, "sell"),
@ -35,11 +48,11 @@ def test_create_stoploss_order_binance(default_conf, mocker, limitratio, expecte
default_conf['margin_mode'] = MarginMode.ISOLATED
default_conf['trading_mode'] = trademode
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance')
with pytest.raises(OperationalException):
with pytest.raises(InvalidOrderException):
order = exchange.create_stoploss(
pair='ETH/BTC',
amount=1,
@ -114,11 +127,11 @@ def test_create_stoploss_order_dry_run_binance(default_conf, mocker):
order_type = 'stop_loss_limit'
default_conf['dry_run'] = True
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance')
with pytest.raises(OperationalException):
with pytest.raises(InvalidOrderException):
order = exchange.create_stoploss(
pair='ETH/BTC',
amount=1,
@ -542,7 +555,6 @@ def test__set_leverage_binance(mocker, default_conf):
"set_leverage",
pair="XRP/USDT",
leverage=5.0,
trading_mode=TradingMode.FUTURES
)

View File

@ -37,7 +37,7 @@ EXCHANGES = {
'stake_currency': 'USDT',
'use_ci_proxy': True,
'hasQuoteVolume': True,
'timeframe': '5m',
'timeframe': '1h',
'futures': True,
'futures_pair': 'BTC/USDT:USDT',
'hasQuoteVolumeFutures': True,
@ -66,7 +66,7 @@ EXCHANGES = {
'pair': 'BTC/USDT',
'stake_currency': 'USDT',
'hasQuoteVolume': True,
'timeframe': '5m',
'timeframe': '1h',
'futures': False,
'sample_order': [{
"symbol": "SOLUSDT",
@ -91,7 +91,7 @@ EXCHANGES = {
'pair': 'BTC/USDT',
'stake_currency': 'USDT',
'hasQuoteVolume': True,
'timeframe': '5m',
'timeframe': '1h',
'leverage_tiers_public': False,
'leverage_in_spot_market': True,
},
@ -99,7 +99,7 @@ EXCHANGES = {
'pair': 'XRP/USDT',
'stake_currency': 'USDT',
'hasQuoteVolume': True,
'timeframe': '5m',
'timeframe': '1h',
'leverage_tiers_public': False,
'leverage_in_spot_market': True,
'sample_order': [
@ -141,7 +141,7 @@ EXCHANGES = {
'pair': 'BTC/USDT',
'stake_currency': 'USDT',
'hasQuoteVolume': True,
'timeframe': '5m',
'timeframe': '1h',
'futures': True,
'futures_pair': 'BTC/USDT:USDT',
'hasQuoteVolumeFutures': True,
@ -215,7 +215,7 @@ EXCHANGES = {
'pair': 'BTC/USDT',
'stake_currency': 'USDT',
'hasQuoteVolume': True,
'timeframe': '5m',
'timeframe': '1h',
'futures': True,
'futures_pair': 'BTC/USDT:USDT',
'hasQuoteVolumeFutures': False,
@ -226,7 +226,7 @@ EXCHANGES = {
'pair': 'BTC/USDT',
'stake_currency': 'USDT',
'hasQuoteVolume': True,
'timeframe': '5m',
'timeframe': '1h',
'futures_pair': 'BTC/USDT:USDT',
'futures': True,
'leverage_tiers_public': True,
@ -253,14 +253,14 @@ EXCHANGES = {
'pair': 'ETH/BTC',
'stake_currency': 'BTC',
'hasQuoteVolume': True,
'timeframe': '5m',
'timeframe': '1h',
'futures': False,
},
'bitvavo': {
'pair': 'BTC/EUR',
'stake_currency': 'EUR',
'hasQuoteVolume': True,
'timeframe': '5m',
'timeframe': '1h',
'leverage_tiers_public': False,
'leverage_in_spot_market': False,
},

View File

@ -8,12 +8,13 @@ from unittest.mock import MagicMock, Mock, PropertyMock, patch
import arrow
import ccxt
import pytest
from ccxt import DECIMAL_PLACES, ROUND, ROUND_UP, TICK_SIZE, TRUNCATE
from pandas import DataFrame
from freqtrade.enums import CandleType, MarginMode, TradingMode
from freqtrade.exceptions import (DDosProtection, DependencyException, ExchangeError,
InvalidOrderException, OperationalException, PricingError,
TemporaryError)
InsufficientFundsError, InvalidOrderException,
OperationalException, PricingError, TemporaryError)
from freqtrade.exchange import (Binance, Bittrex, Exchange, Kraken, amount_to_precision,
date_minus_candles, market_is_active, price_to_precision,
timeframe_to_minutes, timeframe_to_msecs, timeframe_to_next_date,
@ -113,18 +114,21 @@ async def async_ccxt_exception(mocker, default_conf, api_mock, fun, mock_ccxt_fu
exchange = get_patched_exchange(mocker, default_conf, api_mock)
await getattr(exchange, fun)(**kwargs)
assert api_mock.__dict__[mock_ccxt_fun].call_count == retries
exchange.close()
with pytest.raises(TemporaryError):
api_mock.__dict__[mock_ccxt_fun] = MagicMock(side_effect=ccxt.NetworkError("DeadBeef"))
exchange = get_patched_exchange(mocker, default_conf, api_mock)
await getattr(exchange, fun)(**kwargs)
assert api_mock.__dict__[mock_ccxt_fun].call_count == retries
exchange.close()
with pytest.raises(OperationalException):
api_mock.__dict__[mock_ccxt_fun] = MagicMock(side_effect=ccxt.BaseError("DeadBeef"))
exchange = get_patched_exchange(mocker, default_conf, api_mock)
await getattr(exchange, fun)(**kwargs)
assert api_mock.__dict__[mock_ccxt_fun].call_count == 1
exchange.close()
def test_init(default_conf, mocker, caplog):
@ -312,35 +316,54 @@ def test_amount_to_precision(amount, precision_mode, precision, expected,):
assert amount_to_precision(amount, precision, precision_mode) == expected
@pytest.mark.parametrize("price,precision_mode,precision,expected", [
(2.34559, 2, 4, 2.3456),
(2.34559, 2, 5, 2.34559),
(2.34559, 2, 3, 2.346),
(2.9999, 2, 3, 3.000),
(2.9909, 2, 3, 2.991),
# Tests for Tick_size
(2.34559, 4, 0.0001, 2.3456),
(2.34559, 4, 0.00001, 2.34559),
(2.34559, 4, 0.001, 2.346),
(2.9999, 4, 0.001, 3.000),
(2.9909, 4, 0.001, 2.991),
(2.9909, 4, 0.005, 2.995),
(2.9973, 4, 0.005, 3.0),
(2.9977, 4, 0.005, 3.0),
(234.43, 4, 0.5, 234.5),
(234.53, 4, 0.5, 235.0),
(0.891534, 4, 0.0001, 0.8916),
(64968.89, 4, 0.01, 64968.89),
(0.000000003483, 4, 1e-12, 0.000000003483),
@pytest.mark.parametrize("price,precision_mode,precision,expected,rounding_mode", [
# Tests for DECIMAL_PLACES, ROUND_UP
(2.34559, 2, 4, 2.3456, ROUND_UP),
(2.34559, 2, 5, 2.34559, ROUND_UP),
(2.34559, 2, 3, 2.346, ROUND_UP),
(2.9999, 2, 3, 3.000, ROUND_UP),
(2.9909, 2, 3, 2.991, ROUND_UP),
# Tests for DECIMAL_PLACES, ROUND
(2.345600000000001, DECIMAL_PLACES, 4, 2.3456, ROUND),
(2.345551, DECIMAL_PLACES, 4, 2.3456, ROUND),
(2.49, DECIMAL_PLACES, 0, 2., ROUND),
(2.51, DECIMAL_PLACES, 0, 3., ROUND),
(5.1, DECIMAL_PLACES, -1, 10., ROUND),
(4.9, DECIMAL_PLACES, -1, 0., ROUND),
# Tests for TICK_SIZE, ROUND_UP
(2.34559, TICK_SIZE, 0.0001, 2.3456, ROUND_UP),
(2.34559, TICK_SIZE, 0.00001, 2.34559, ROUND_UP),
(2.34559, TICK_SIZE, 0.001, 2.346, ROUND_UP),
(2.9999, TICK_SIZE, 0.001, 3.000, ROUND_UP),
(2.9909, TICK_SIZE, 0.001, 2.991, ROUND_UP),
(2.9909, TICK_SIZE, 0.005, 2.995, ROUND_UP),
(2.9973, TICK_SIZE, 0.005, 3.0, ROUND_UP),
(2.9977, TICK_SIZE, 0.005, 3.0, ROUND_UP),
(234.43, TICK_SIZE, 0.5, 234.5, ROUND_UP),
(234.53, TICK_SIZE, 0.5, 235.0, ROUND_UP),
(0.891534, TICK_SIZE, 0.0001, 0.8916, ROUND_UP),
(64968.89, TICK_SIZE, 0.01, 64968.89, ROUND_UP),
(0.000000003483, TICK_SIZE, 1e-12, 0.000000003483, ROUND_UP),
# Tests for TICK_SIZE, ROUND
(2.49, TICK_SIZE, 1., 2., ROUND),
(2.51, TICK_SIZE, 1., 3., ROUND),
(2.000000051, TICK_SIZE, 0.0000001, 2.0000001, ROUND),
(2.000000049, TICK_SIZE, 0.0000001, 2., ROUND),
(2.9909, TICK_SIZE, 0.005, 2.990, ROUND),
(2.9973, TICK_SIZE, 0.005, 2.995, ROUND),
(2.9977, TICK_SIZE, 0.005, 3.0, ROUND),
(234.24, TICK_SIZE, 0.5, 234., ROUND),
(234.26, TICK_SIZE, 0.5, 234.5, ROUND),
# Tests for TRUNCATTE
(2.34559, 2, 4, 2.3455, TRUNCATE),
(2.34559, 2, 5, 2.34559, TRUNCATE),
(2.34559, 2, 3, 2.345, TRUNCATE),
(2.9999, 2, 3, 2.999, TRUNCATE),
(2.9909, 2, 3, 2.990, TRUNCATE),
])
def test_price_to_precision(price, precision_mode, precision, expected):
# digits counting mode
# DECIMAL_PLACES = 2
# SIGNIFICANT_DIGITS = 3
# TICK_SIZE = 4
assert price_to_precision(price, precision, precision_mode) == expected
def test_price_to_precision(price, precision_mode, precision, expected, rounding_mode):
assert price_to_precision(
price, precision, precision_mode, rounding_mode=rounding_mode) == expected
@pytest.mark.parametrize("price,precision_mode,precision,expected", [
@ -414,7 +437,7 @@ def test__get_stake_amount_limit(mocker, default_conf) -> None:
}
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value=markets))
result = exchange.get_min_pair_stake_amount('ETH/BTC', 2, stoploss)
expected_result = 2 * 2 * (1 + 0.05) / (1 - abs(stoploss))
expected_result = 2 * 2 * (1 + 0.05)
assert pytest.approx(result) == expected_result
# With Leverage
result = exchange.get_min_pair_stake_amount('ETH/BTC', 2, stoploss, 5.0)
@ -423,14 +446,14 @@ def test__get_stake_amount_limit(mocker, default_conf) -> None:
result = exchange.get_max_pair_stake_amount('ETH/BTC', 2)
assert result == 20000
# min amount and cost are set (cost is minimal)
# min amount and cost are set (cost is minimal and therefore ignored)
markets["ETH/BTC"]["limits"] = {
'cost': {'min': 2, 'max': None},
'amount': {'min': 2, 'max': None},
}
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value=markets))
result = exchange.get_min_pair_stake_amount('ETH/BTC', 2, stoploss)
expected_result = max(2, 2 * 2) * (1 + 0.05) / (1 - abs(stoploss))
expected_result = max(2, 2 * 2) * (1 + 0.05)
assert pytest.approx(result) == expected_result
# With Leverage
result = exchange.get_min_pair_stake_amount('ETH/BTC', 2, stoploss, 10)
@ -473,6 +496,9 @@ def test__get_stake_amount_limit(mocker, default_conf) -> None:
result = exchange.get_max_pair_stake_amount('ETH/BTC', 2)
assert result == 1000
result = exchange.get_max_pair_stake_amount('ETH/BTC', 2, 12.0)
assert result == 1000 / 12
markets["ETH/BTC"]["contractSize"] = '0.01'
default_conf['trading_mode'] = 'futures'
default_conf['margin_mode'] = 'isolated'
@ -1039,9 +1065,9 @@ def test_validate_ordertypes(default_conf, mocker):
('bybit', 'last', True),
('bybit', 'mark', True),
('bybit', 'index', True),
# ('okx', 'last', True),
# ('okx', 'mark', True),
# ('okx', 'index', True),
('okx', 'last', True),
('okx', 'mark', True),
('okx', 'index', True),
('gate', 'last', True),
('gate', 'mark', True),
('gate', 'index', True),
@ -1436,6 +1462,9 @@ def test_buy_prod(default_conf, mocker, exchange_name):
assert api_mock.create_order.call_args[0][1] == order_type
assert api_mock.create_order.call_args[0][2] == 'buy'
assert api_mock.create_order.call_args[0][3] == 1
if exchange._order_needs_price(order_type):
assert api_mock.create_order.call_args[0][4] == 200
else:
assert api_mock.create_order.call_args[0][4] is None
api_mock.create_order.reset_mock()
@ -1541,6 +1570,9 @@ def test_buy_considers_time_in_force(default_conf, mocker, exchange_name):
assert api_mock.create_order.call_args[0][1] == order_type
assert api_mock.create_order.call_args[0][2] == 'buy'
assert api_mock.create_order.call_args[0][3] == 1
if exchange._order_needs_price(order_type):
assert api_mock.create_order.call_args[0][4] == 200
else:
assert api_mock.create_order.call_args[0][4] is None
# Market orders should not send timeInForce!!
assert "timeInForce" not in api_mock.create_order.call_args[0][5]
@ -1585,6 +1617,9 @@ def test_sell_prod(default_conf, mocker, exchange_name):
assert api_mock.create_order.call_args[0][1] == order_type
assert api_mock.create_order.call_args[0][2] == 'sell'
assert api_mock.create_order.call_args[0][3] == 1
if exchange._order_needs_price(order_type):
assert api_mock.create_order.call_args[0][4] == 200
else:
assert api_mock.create_order.call_args[0][4] is None
api_mock.create_order.reset_mock()
@ -1599,13 +1634,13 @@ def test_sell_prod(default_conf, mocker, exchange_name):
assert api_mock.create_order.call_args[0][4] == 200
# test exception handling
with pytest.raises(DependencyException):
with pytest.raises(InsufficientFundsError):
api_mock.create_order = MagicMock(side_effect=ccxt.InsufficientFunds("0 balance"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
exchange.create_order(pair='ETH/BTC', ordertype=order_type, side="sell", amount=1, rate=200,
leverage=1.0)
with pytest.raises(DependencyException):
with pytest.raises(InvalidOrderException):
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
exchange.create_order(pair='ETH/BTC', ordertype='limit', side="sell", amount=1, rate=200,
@ -1679,6 +1714,9 @@ def test_sell_considers_time_in_force(default_conf, mocker, exchange_name):
assert api_mock.create_order.call_args[0][1] == order_type
assert api_mock.create_order.call_args[0][2] == 'sell'
assert api_mock.create_order.call_args[0][3] == 1
if exchange._order_needs_price(order_type):
assert api_mock.create_order.call_args[0][4] == 200
else:
assert api_mock.create_order.call_args[0][4] is None
# Market orders should not send timeInForce!!
assert "timeInForce" not in api_mock.create_order.call_args[0][5]
@ -2248,7 +2286,6 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
assert res[pair2].at[0, 'open']
@pytest.mark.asyncio
@pytest.mark.parametrize("exchange_name", EXCHANGES)
async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_name):
ohlcv = [
@ -2277,7 +2314,7 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
assert res[3] == ohlcv
assert exchange._api_async.fetch_ohlcv.call_count == 1
assert not log_has(f"Using cached candle (OHLCV) data for {pair} ...", caplog)
exchange.close()
# exchange = Exchange(default_conf)
await async_ccxt_exception(mocker, default_conf, MagicMock(),
"_async_get_candle_history", "fetch_ohlcv",
@ -2292,15 +2329,17 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT,
(arrow.utcnow().int_timestamp - 2000) * 1000)
exchange.close()
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
r'historical candle \(OHLCV\) data\..*'):
api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT,
(arrow.utcnow().int_timestamp - 2000) * 1000)
exchange.close()
@pytest.mark.asyncio
async def test__async_kucoin_get_candle_history(default_conf, mocker, caplog):
from freqtrade.exchange.common import _reset_logging_mixin
_reset_logging_mixin()
@ -2341,9 +2380,9 @@ async def test__async_kucoin_get_candle_history(default_conf, mocker, caplog):
# Expect the "returned exception" message 12 times (4 retries * 3 (loop))
assert num_log_has_re(msg, caplog) == 12
assert num_log_has_re(msg2, caplog) == 9
exchange.close()
@pytest.mark.asyncio
async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
""" Test empty exchange result """
ohlcv = []
@ -2363,6 +2402,7 @@ async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
assert res[2] == CandleType.SPOT
assert res[3] == ohlcv
assert exchange._api_async.fetch_ohlcv.call_count == 1
exchange.close()
def test_refresh_latest_ohlcv_inv_result(default_conf, mocker, caplog):
@ -2757,7 +2797,6 @@ async def test___async_get_candle_history_sort(default_conf, mocker, exchange_na
assert res_ohlcv[9][5] == 2.31452783
@pytest.mark.asyncio
@pytest.mark.parametrize("exchange_name", EXCHANGES)
async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
fetch_trades_result):
@ -2785,8 +2824,8 @@ async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
assert exchange._api_async.fetch_trades.call_args[1]['limit'] == 1000
assert exchange._api_async.fetch_trades.call_args[1]['params'] == {'from': '123'}
assert log_has_re(f"Fetching trades for pair {pair}, params: .*", caplog)
exchange.close()
exchange = Exchange(default_conf)
await async_ccxt_exception(mocker, default_conf, MagicMock(),
"_async_fetch_trades", "fetch_trades",
pair='ABCD/BTC', since=None)
@ -2796,15 +2835,16 @@ async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
api_mock.fetch_trades = MagicMock(side_effect=ccxt.BaseError("Unknown error"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
exchange.close()
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
r'historical trade data\..*'):
api_mock.fetch_trades = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
exchange.close()
@pytest.mark.asyncio
@pytest.mark.parametrize("exchange_name", EXCHANGES)
async def test__async_fetch_trades_contract_size(default_conf, mocker, caplog, exchange_name,
fetch_trades_result):
@ -2839,6 +2879,7 @@ async def test__async_fetch_trades_contract_size(default_conf, mocker, caplog, e
pair = 'ETH/USDT:USDT'
res = await exchange._async_fetch_trades(pair, since=None, params=None)
assert res[0][5] == 300
exchange.close()
@pytest.mark.asyncio
@ -3387,7 +3428,7 @@ def test_merge_ft_has_dict(default_conf, mocker):
ex = Binance(default_conf)
assert ex._ft_has != Exchange._ft_has_default
assert ex.get_option('stoploss_on_exchange')
assert ex.get_option('order_time_in_force') == ['GTC', 'FOK', 'IOC']
assert ex.get_option('order_time_in_force') == ['GTC', 'FOK', 'IOC', 'PO']
assert ex.get_option('trades_pagination') == 'id'
assert ex.get_option('trades_pagination_arg') == 'fromId'
@ -3868,29 +3909,6 @@ def test_get_stake_amount_considering_leverage(
stake_amount, leverage) == min_stake_with_lev
@pytest.mark.parametrize("exchange_name,trading_mode", [
("binance", TradingMode.FUTURES),
])
def test__set_leverage(mocker, default_conf, exchange_name, trading_mode):
api_mock = MagicMock()
api_mock.set_leverage = MagicMock()
type(api_mock).has = PropertyMock(return_value={'setLeverage': True})
default_conf['dry_run'] = False
ccxt_exceptionhandlers(
mocker,
default_conf,
api_mock,
exchange_name,
"_set_leverage",
"set_leverage",
pair="XRP/USDT",
leverage=5.0,
trading_mode=trading_mode
)
@pytest.mark.parametrize("margin_mode", [
(MarginMode.CROSS),
(MarginMode.ISOLATED)
@ -4830,7 +4848,6 @@ def test_load_leverage_tiers(mocker, default_conf, leverage_tiers, exchange_name
)
@pytest.mark.asyncio
@pytest.mark.parametrize('exchange_name', EXCHANGES)
async def test_get_market_leverage_tiers(mocker, default_conf, exchange_name):
default_conf['exchange']['name'] = exchange_name
@ -5287,7 +5304,7 @@ def test_stoploss_contract_size(mocker, default_conf, contract_size, order_amoun
})
default_conf['dry_run'] = False
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock)
exchange.get_contract_size = MagicMock(return_value=contract_size)
@ -5307,3 +5324,10 @@ def test_stoploss_contract_size(mocker, default_conf, contract_size, order_amoun
assert order['cost'] == 100
assert order['filled'] == 100
assert order['remaining'] == 100
def test_price_to_precision_with_default_conf(default_conf, mocker):
conf = copy.deepcopy(default_conf)
patched_ex = get_patched_exchange(mocker, conf)
prec_price = patched_ex.price_to_precision("XRP/USDT", 1.0000000101)
assert prec_price == 1.00000001

View File

@ -4,42 +4,9 @@ from unittest.mock import MagicMock
import pytest
from freqtrade.enums import MarginMode, TradingMode
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import Gate
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
from tests.conftest import EXMS, get_patched_exchange
def test_validate_order_types_gate(default_conf, mocker):
default_conf['exchange']['name'] = 'gate'
mocker.patch(f'{EXMS}._init_ccxt')
mocker.patch(f'{EXMS}._load_markets', return_value={})
mocker.patch(f'{EXMS}.validate_pairs')
mocker.patch(f'{EXMS}.validate_timeframes')
mocker.patch(f'{EXMS}.validate_stakecurrency')
mocker.patch(f'{EXMS}.validate_pricing')
mocker.patch(f'{EXMS}.name', 'Gate')
exch = ExchangeResolver.load_exchange('gate', default_conf, True)
assert isinstance(exch, Gate)
default_conf['order_types'] = {
'entry': 'market',
'exit': 'limit',
'stoploss': 'market',
'stoploss_on_exchange': False
}
with pytest.raises(OperationalException,
match=r'Exchange .* does not support market orders.'):
ExchangeResolver.load_exchange('gate', default_conf, True)
# market-orders supported on futures markets.
default_conf['trading_mode'] = 'futures'
default_conf['margin_mode'] = 'isolated'
ex = ExchangeResolver.load_exchange('gate', default_conf, True)
assert ex
@pytest.mark.usefixtures("init_persistence")
def test_fetch_stoploss_order_gate(default_conf, mocker):
exchange = get_patched_exchange(mocker, default_conf, id='gate')

View File

@ -4,7 +4,7 @@ from unittest.mock import MagicMock
import ccxt
import pytest
from freqtrade.exceptions import DependencyException, InvalidOrderException, OperationalException
from freqtrade.exceptions import DependencyException, InvalidOrderException
from tests.conftest import EXMS, get_patched_exchange
from tests.exchange.test_exchange import ccxt_exceptionhandlers
@ -27,11 +27,11 @@ def test_create_stoploss_order_huobi(default_conf, mocker, limitratio, expected,
})
default_conf['dry_run'] = False
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'huobi')
with pytest.raises(OperationalException):
with pytest.raises(InvalidOrderException):
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
order_types={'stoploss_on_exchange_limit_ratio': 1.05},
side=side,
@ -80,11 +80,11 @@ def test_create_stoploss_order_dry_run_huobi(default_conf, mocker):
order_type = 'stop-limit'
default_conf['dry_run'] = True
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'huobi')
with pytest.raises(OperationalException):
with pytest.raises(InvalidOrderException):
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
order_types={'stoploss_on_exchange_limit_ratio': 1.05},
side='sell', leverage=1.0)

View File

@ -29,7 +29,7 @@ def test_buy_kraken_trading_agreement(default_conf, mocker):
default_conf['dry_run'] = False
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, id="kraken")
order = exchange.create_order(
@ -192,7 +192,7 @@ def test_create_stoploss_order_kraken(default_conf, mocker, ordertype, side, adj
default_conf['dry_run'] = False
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'kraken')
@ -263,7 +263,7 @@ def test_create_stoploss_order_dry_run_kraken(default_conf, mocker, side):
api_mock = MagicMock()
default_conf['dry_run'] = True
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'kraken')

View File

@ -4,7 +4,7 @@ from unittest.mock import MagicMock
import ccxt
import pytest
from freqtrade.exceptions import DependencyException, InvalidOrderException, OperationalException
from freqtrade.exceptions import DependencyException, InvalidOrderException
from tests.conftest import EXMS, get_patched_exchange
from tests.exchange.test_exchange import ccxt_exceptionhandlers
@ -27,11 +27,11 @@ def test_create_stoploss_order_kucoin(default_conf, mocker, limitratio, expected
})
default_conf['dry_run'] = False
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'kucoin')
if order_type == 'limit':
with pytest.raises(OperationalException):
with pytest.raises(InvalidOrderException):
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
order_types={
'stoploss': order_type,
@ -88,11 +88,11 @@ def test_stoploss_order_dry_run_kucoin(default_conf, mocker):
order_type = 'market'
default_conf['dry_run'] = True
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'kucoin')
with pytest.raises(OperationalException):
with pytest.raises(InvalidOrderException):
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
order_types={'stoploss': 'limit',
'stoploss_on_exchange_limit_ratio': 1.05},

View File

@ -2,11 +2,13 @@ from datetime import datetime, timedelta, timezone
from pathlib import Path
from unittest.mock import MagicMock, PropertyMock
import ccxt
import pytest
from freqtrade.enums import CandleType, MarginMode, TradingMode
from freqtrade.exceptions import RetryableOrderError
from freqtrade.exchange.exchange import timeframe_to_minutes
from tests.conftest import get_mock_coro, get_patched_exchange, log_has
from tests.conftest import EXMS, get_mock_coro, get_patched_exchange, log_has
from tests.exchange.test_exchange import ccxt_exceptionhandlers
@ -476,3 +478,116 @@ def test_load_leverage_tiers_okx(default_conf, mocker, markets, tmpdir, caplog,
exchange.load_leverage_tiers()
assert log_has(logmsg, caplog)
def test__set_leverage_okx(mocker, default_conf):
api_mock = MagicMock()
api_mock.set_leverage = MagicMock()
type(api_mock).has = PropertyMock(return_value={'setLeverage': True})
default_conf['dry_run'] = False
default_conf['trading_mode'] = TradingMode.FUTURES
default_conf['margin_mode'] = MarginMode.ISOLATED
exchange = get_patched_exchange(mocker, default_conf, api_mock, id="okx")
exchange._lev_prep('BTC/USDT:USDT', 3.2, 'buy')
assert api_mock.set_leverage.call_count == 1
# Leverage is rounded to 3.
assert api_mock.set_leverage.call_args_list[0][1]['leverage'] == 3.2
assert api_mock.set_leverage.call_args_list[0][1]['symbol'] == 'BTC/USDT:USDT'
assert api_mock.set_leverage.call_args_list[0][1]['params'] == {
'mgnMode': 'isolated',
'posSide': 'net'}
ccxt_exceptionhandlers(
mocker,
default_conf,
api_mock,
"okx",
"_lev_prep",
"set_leverage",
pair="XRP/USDT:USDT",
leverage=5.0,
side='buy'
)
@pytest.mark.usefixtures("init_persistence")
def test_fetch_stoploss_order_okx(default_conf, mocker):
default_conf['dry_run'] = False
api_mock = MagicMock()
api_mock.fetch_order = MagicMock()
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='okx')
exchange.fetch_stoploss_order('1234', 'ETH/BTC')
assert api_mock.fetch_order.call_count == 1
assert api_mock.fetch_order.call_args_list[0][0][0] == '1234'
assert api_mock.fetch_order.call_args_list[0][0][1] == 'ETH/BTC'
assert api_mock.fetch_order.call_args_list[0][1]['params'] == {'stop': True}
api_mock.fetch_order = MagicMock(side_effect=ccxt.OrderNotFound)
api_mock.fetch_open_orders = MagicMock(return_value=[])
api_mock.fetch_closed_orders = MagicMock(return_value=[])
api_mock.fetch_canceled_orders = MagicMock(creturn_value=[])
with pytest.raises(RetryableOrderError):
exchange.fetch_stoploss_order('1234', 'ETH/BTC')
assert api_mock.fetch_order.call_count == 1
assert api_mock.fetch_open_orders.call_count == 1
assert api_mock.fetch_closed_orders.call_count == 1
assert api_mock.fetch_canceled_orders.call_count == 1
api_mock.fetch_order.reset_mock()
api_mock.fetch_open_orders.reset_mock()
api_mock.fetch_closed_orders.reset_mock()
api_mock.fetch_canceled_orders.reset_mock()
api_mock.fetch_closed_orders = MagicMock(return_value=[
{
'id': '1234',
'status': 'closed',
'info': {'ordId': '123455'}
}
])
mocker.patch(f"{EXMS}.fetch_order", MagicMock(return_value={'id': '123455'}))
resp = exchange.fetch_stoploss_order('1234', 'ETH/BTC')
assert api_mock.fetch_order.call_count == 1
assert api_mock.fetch_open_orders.call_count == 1
assert api_mock.fetch_closed_orders.call_count == 1
assert api_mock.fetch_canceled_orders.call_count == 0
assert resp['id'] == '1234'
assert resp['id_stop'] == '123455'
assert resp['type'] == 'stoploss'
default_conf['dry_run'] = True
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='okx')
dro_mock = mocker.patch(f"{EXMS}.fetch_dry_run_order", MagicMock(return_value={'id': '123455'}))
api_mock.fetch_order.reset_mock()
api_mock.fetch_open_orders.reset_mock()
api_mock.fetch_closed_orders.reset_mock()
api_mock.fetch_canceled_orders.reset_mock()
resp = exchange.fetch_stoploss_order('1234', 'ETH/BTC')
assert api_mock.fetch_order.call_count == 0
assert api_mock.fetch_open_orders.call_count == 0
assert api_mock.fetch_closed_orders.call_count == 0
assert api_mock.fetch_canceled_orders.call_count == 0
assert dro_mock.call_count == 1
@pytest.mark.parametrize('sl1,sl2,sl3,side', [
(1501, 1499, 1501, "sell"),
(1499, 1501, 1499, "buy")
])
def test_stoploss_adjust_okx(mocker, default_conf, sl1, sl2, sl3, side):
exchange = get_patched_exchange(mocker, default_conf, id='okx')
order = {
'type': 'stoploss',
'price': 1500,
'stopLossPrice': 1500,
}
assert exchange.stoploss_adjust(sl1, order, side=side)
assert not exchange.stoploss_adjust(sl2, order, side=side)

View File

@ -79,7 +79,9 @@ def make_rl_config(conf):
"rr": 1,
"profit_aim": 0.02,
"win_reward_factor": 2
}}
},
"drop_ohlc_from_features": False
}
return conf

View File

@ -71,13 +71,6 @@ def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model, pca,
freqai_conf['freqai']['feature_parameters'].update({"shuffle_after_split": shuffle})
freqai_conf['freqai']['feature_parameters'].update({"buffer_train_data_candles": buffer})
if 'ReinforcementLearner' in model:
model_save_ext = 'zip'
freqai_conf = make_rl_config(freqai_conf)
# test the RL guardrails
freqai_conf['freqai']['feature_parameters'].update({"use_SVM_to_remove_outliers": True})
freqai_conf['freqai']['data_split_parameters'].update({'shuffle': True})
if 'ReinforcementLearner' in model:
model_save_ext = 'zip'
freqai_conf = make_rl_config(freqai_conf)
@ -87,6 +80,7 @@ def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model, pca,
if 'test_3ac' in model or 'test_4ac' in model:
freqai_conf["freqaimodel_path"] = str(Path(__file__).parents[1] / "freqai" / "test_models")
freqai_conf["freqai"]["rl_config"]["drop_ohlc_from_features"] = True
if 'PyTorchMLPRegressor' in model:
model_save_ext = 'zip'

Some files were not shown because too many files have changed in this diff Show More