Merge branch 'freqtrade:develop' into develop
This commit is contained in:
commit
7cbc0ce80a
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
@ -16,7 +16,8 @@ on:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
permissions:
|
||||||
|
repository-projects: read
|
||||||
jobs:
|
jobs:
|
||||||
build_linux:
|
build_linux:
|
||||||
|
|
||||||
@ -321,7 +322,6 @@ jobs:
|
|||||||
build_linux_online:
|
build_linux_online:
|
||||||
# Run pytest with "live" checks
|
# Run pytest with "live" checks
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
# permissions:
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
@ -425,7 +425,7 @@ jobs:
|
|||||||
python setup.py sdist bdist_wheel
|
python setup.py sdist bdist_wheel
|
||||||
|
|
||||||
- name: Publish to PyPI (Test)
|
- name: Publish to PyPI (Test)
|
||||||
uses: pypa/gh-action-pypi-publish@v1.7.1
|
uses: pypa/gh-action-pypi-publish@v1.8.3
|
||||||
if: (github.event_name == 'release')
|
if: (github.event_name == 'release')
|
||||||
with:
|
with:
|
||||||
user: __token__
|
user: __token__
|
||||||
@ -433,7 +433,7 @@ jobs:
|
|||||||
repository_url: https://test.pypi.org/legacy/
|
repository_url: https://test.pypi.org/legacy/
|
||||||
|
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
uses: pypa/gh-action-pypi-publish@v1.7.1
|
uses: pypa/gh-action-pypi-publish@v1.8.3
|
||||||
if: (github.event_name == 'release')
|
if: (github.event_name == 'release')
|
||||||
with:
|
with:
|
||||||
user: __token__
|
user: __token__
|
||||||
@ -466,12 +466,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and test and push docker images
|
- name: Build and test and push docker images
|
||||||
env:
|
env:
|
||||||
IMAGE_NAME: freqtradeorg/freqtrade
|
|
||||||
BRANCH_NAME: ${{ steps.extract_branch.outputs.branch }}
|
BRANCH_NAME: ${{ steps.extract_branch.outputs.branch }}
|
||||||
run: |
|
run: |
|
||||||
build_helpers/publish_docker_multi.sh
|
build_helpers/publish_docker_multi.sh
|
||||||
|
|
||||||
deploy_arm:
|
deploy_arm:
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
needs: [ deploy ]
|
needs: [ deploy ]
|
||||||
# Only run on 64bit machines
|
# Only run on 64bit machines
|
||||||
runs-on: [self-hosted, linux, ARM64]
|
runs-on: [self-hosted, linux, ARM64]
|
||||||
@ -494,8 +495,9 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and test and push docker images
|
- name: Build and test and push docker images
|
||||||
env:
|
env:
|
||||||
IMAGE_NAME: freqtradeorg/freqtrade
|
|
||||||
BRANCH_NAME: ${{ steps.extract_branch.outputs.branch }}
|
BRANCH_NAME: ${{ steps.extract_branch.outputs.branch }}
|
||||||
|
GHCR_USERNAME: ${{ github.actor }}
|
||||||
|
GHCR_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
build_helpers/publish_docker_arm64.sh
|
build_helpers/publish_docker_arm64.sh
|
||||||
|
|
||||||
|
@ -15,10 +15,10 @@ repos:
|
|||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
- types-cachetools==5.3.0.4
|
- types-cachetools==5.3.0.4
|
||||||
- types-filelock==3.2.7
|
- types-filelock==3.2.7
|
||||||
- types-requests==2.28.11.15
|
- types-requests==2.28.11.16
|
||||||
- types-tabulate==0.9.0.1
|
- types-tabulate==0.9.0.1
|
||||||
- types-python-dateutil==2.8.19.10
|
- types-python-dateutil==2.8.19.10
|
||||||
- SQLAlchemy==2.0.5.post1
|
- SQLAlchemy==2.0.7
|
||||||
# stages: [push]
|
# stages: [push]
|
||||||
|
|
||||||
- repo: https://github.com/pycqa/isort
|
- repo: https://github.com/pycqa/isort
|
||||||
|
@ -8,8 +8,8 @@ if [ -n "$2" ] || [ ! -f "${INSTALL_LOC}/lib/libta_lib.a" ]; then
|
|||||||
tar zxvf ta-lib-0.4.0-src.tar.gz
|
tar zxvf ta-lib-0.4.0-src.tar.gz
|
||||||
cd ta-lib \
|
cd ta-lib \
|
||||||
&& sed -i.bak "s|0.00000001|0.000000000000000001 |g" src/ta_func/ta_utility.h \
|
&& sed -i.bak "s|0.00000001|0.000000000000000001 |g" src/ta_func/ta_utility.h \
|
||||||
&& curl 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD' -o config.guess \
|
&& curl 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.guess' -o config.guess \
|
||||||
&& curl 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD' -o config.sub \
|
&& curl 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.sub' -o config.sub \
|
||||||
&& ./configure --prefix=${INSTALL_LOC}/ \
|
&& ./configure --prefix=${INSTALL_LOC}/ \
|
||||||
&& make
|
&& make
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
|
@ -3,6 +3,10 @@
|
|||||||
# Use BuildKit, otherwise building on ARM fails
|
# Use BuildKit, otherwise building on ARM fails
|
||||||
export DOCKER_BUILDKIT=1
|
export DOCKER_BUILDKIT=1
|
||||||
|
|
||||||
|
IMAGE_NAME=freqtradeorg/freqtrade
|
||||||
|
CACHE_IMAGE=freqtradeorg/freqtrade_cache
|
||||||
|
GHCR_IMAGE_NAME=ghcr.io/freqtrade/freqtrade
|
||||||
|
|
||||||
# Replace / with _ to create a valid tag
|
# Replace / with _ to create a valid tag
|
||||||
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
|
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
|
||||||
TAG_PLOT=${TAG}_plot
|
TAG_PLOT=${TAG}_plot
|
||||||
@ -14,7 +18,6 @@ TAG_ARM=${TAG}_arm
|
|||||||
TAG_PLOT_ARM=${TAG_PLOT}_arm
|
TAG_PLOT_ARM=${TAG_PLOT}_arm
|
||||||
TAG_FREQAI_ARM=${TAG_FREQAI}_arm
|
TAG_FREQAI_ARM=${TAG_FREQAI}_arm
|
||||||
TAG_FREQAI_RL_ARM=${TAG_FREQAI_RL}_arm
|
TAG_FREQAI_RL_ARM=${TAG_FREQAI_RL}_arm
|
||||||
CACHE_IMAGE=freqtradeorg/freqtrade_cache
|
|
||||||
|
|
||||||
echo "Running for ${TAG}"
|
echo "Running for ${TAG}"
|
||||||
|
|
||||||
@ -38,13 +41,13 @@ if [ $? -ne 0 ]; then
|
|||||||
echo "failed building multiarch images"
|
echo "failed building multiarch images"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
# Tag image for upload and next build step
|
|
||||||
docker tag freqtrade:$TAG_ARM ${CACHE_IMAGE}:$TAG_ARM
|
|
||||||
|
|
||||||
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
|
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
|
||||||
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
|
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
|
||||||
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
|
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
|
||||||
|
|
||||||
|
# Tag image for upload and next build step
|
||||||
|
docker tag freqtrade:$TAG_ARM ${CACHE_IMAGE}:$TAG_ARM
|
||||||
docker tag freqtrade:$TAG_PLOT_ARM ${CACHE_IMAGE}:$TAG_PLOT_ARM
|
docker tag freqtrade:$TAG_PLOT_ARM ${CACHE_IMAGE}:$TAG_PLOT_ARM
|
||||||
docker tag freqtrade:$TAG_FREQAI_ARM ${CACHE_IMAGE}:$TAG_FREQAI_ARM
|
docker tag freqtrade:$TAG_FREQAI_ARM ${CACHE_IMAGE}:$TAG_FREQAI_ARM
|
||||||
docker tag freqtrade:$TAG_FREQAI_RL_ARM ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
|
docker tag freqtrade:$TAG_FREQAI_RL_ARM ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
|
||||||
@ -59,7 +62,6 @@ fi
|
|||||||
|
|
||||||
docker images
|
docker images
|
||||||
|
|
||||||
# docker push ${IMAGE_NAME}
|
|
||||||
docker push ${CACHE_IMAGE}:$TAG_PLOT_ARM
|
docker push ${CACHE_IMAGE}:$TAG_PLOT_ARM
|
||||||
docker push ${CACHE_IMAGE}:$TAG_FREQAI_ARM
|
docker push ${CACHE_IMAGE}:$TAG_FREQAI_ARM
|
||||||
docker push ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
|
docker push ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
|
||||||
@ -82,14 +84,30 @@ docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI}
|
|||||||
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
|
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
|
||||||
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_RL}
|
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_RL}
|
||||||
|
|
||||||
|
# copy images to ghcr.io
|
||||||
|
|
||||||
|
alias crane="docker run --rm -i -v $(pwd)/.crane:/home/nonroot/.docker/ gcr.io/go-containerregistry/crane"
|
||||||
|
mkdir .crane
|
||||||
|
chmod a+rwx .crane
|
||||||
|
|
||||||
|
echo "${GHCR_TOKEN}" | crane auth login ghcr.io -u "${GHCR_USERNAME}" --password-stdin
|
||||||
|
|
||||||
|
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_RL}
|
||||||
|
crane copy ${IMAGE_NAME}:${TAG_FREQAI} ${GHCR_IMAGE_NAME}:${TAG_FREQAI}
|
||||||
|
crane copy ${IMAGE_NAME}:${TAG_PLOT} ${GHCR_IMAGE_NAME}:${TAG_PLOT}
|
||||||
|
crane copy ${IMAGE_NAME}:${TAG} ${GHCR_IMAGE_NAME}:${TAG}
|
||||||
|
|
||||||
# Tag as latest for develop builds
|
# Tag as latest for develop builds
|
||||||
if [ "${TAG}" = "develop" ]; then
|
if [ "${TAG}" = "develop" ]; then
|
||||||
echo 'Tagging image as latest'
|
echo 'Tagging image as latest'
|
||||||
docker manifest create ${IMAGE_NAME}:latest ${CACHE_IMAGE}:${TAG_ARM} ${IMAGE_NAME}:${TAG_PI} ${CACHE_IMAGE}:${TAG}
|
docker manifest create ${IMAGE_NAME}:latest ${CACHE_IMAGE}:${TAG_ARM} ${IMAGE_NAME}:${TAG_PI} ${CACHE_IMAGE}:${TAG}
|
||||||
docker manifest push -p ${IMAGE_NAME}:latest
|
docker manifest push -p ${IMAGE_NAME}:latest
|
||||||
|
|
||||||
|
crane copy ${IMAGE_NAME}:latest ${GHCR_IMAGE_NAME}:latest
|
||||||
fi
|
fi
|
||||||
|
|
||||||
docker images
|
docker images
|
||||||
|
rm -rf .crane
|
||||||
|
|
||||||
# Cleanup old images from arm64 node.
|
# Cleanup old images from arm64 node.
|
||||||
docker image prune -a --force --filter "until=24h"
|
docker image prune -a --force --filter "until=24h"
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
# The below assumes a correctly setup docker buildx environment
|
# The below assumes a correctly setup docker buildx environment
|
||||||
|
|
||||||
|
IMAGE_NAME=freqtradeorg/freqtrade
|
||||||
|
CACHE_IMAGE=freqtradeorg/freqtrade_cache
|
||||||
# Replace / with _ to create a valid tag
|
# Replace / with _ to create a valid tag
|
||||||
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
|
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
|
||||||
TAG_PLOT=${TAG}_plot
|
TAG_PLOT=${TAG}_plot
|
||||||
@ -11,7 +13,6 @@ TAG_PI="${TAG}_pi"
|
|||||||
|
|
||||||
PI_PLATFORM="linux/arm/v7"
|
PI_PLATFORM="linux/arm/v7"
|
||||||
echo "Running for ${TAG}"
|
echo "Running for ${TAG}"
|
||||||
CACHE_IMAGE=freqtradeorg/freqtrade_cache
|
|
||||||
CACHE_TAG=${CACHE_IMAGE}:${TAG_PI}_cache
|
CACHE_TAG=${CACHE_IMAGE}:${TAG_PI}_cache
|
||||||
|
|
||||||
# Add commit and commit_message to docker container
|
# Add commit and commit_message to docker container
|
||||||
|
@ -60,10 +60,10 @@ This loop will be repeated again and again until the bot is stopped.
|
|||||||
|
|
||||||
* Load historic data for configured pairlist.
|
* Load historic data for configured pairlist.
|
||||||
* Calls `bot_start()` once.
|
* Calls `bot_start()` once.
|
||||||
* Calls `bot_loop_start()` once.
|
|
||||||
* Calculate indicators (calls `populate_indicators()` once per pair).
|
* Calculate indicators (calls `populate_indicators()` once per pair).
|
||||||
* Calculate entry / exit signals (calls `populate_entry_trend()` and `populate_exit_trend()` once per pair).
|
* Calculate entry / exit signals (calls `populate_entry_trend()` and `populate_exit_trend()` once per pair).
|
||||||
* Loops per candle simulating entry and exit points.
|
* Loops per candle simulating entry and exit points.
|
||||||
|
* Calls `bot_loop_start()` strategy callback.
|
||||||
* Check for Order timeouts, either via the `unfilledtimeout` configuration, or via `check_entry_timeout()` / `check_exit_timeout()` strategy callbacks.
|
* Check for Order timeouts, either via the `unfilledtimeout` configuration, or via `check_entry_timeout()` / `check_exit_timeout()` strategy callbacks.
|
||||||
* Calls `adjust_entry_price()` strategy callback for open entry orders.
|
* Calls `adjust_entry_price()` strategy callback for open entry orders.
|
||||||
* Check for trade entry signals (`enter_long` / `enter_short` columns).
|
* Check for trade entry signals (`enter_long` / `enter_short` columns).
|
||||||
|
@ -46,7 +46,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
|||||||
| `outlier_protection_percentage` | Enable to prevent outlier detection methods from discarding too much data. If more than `outlier_protection_percentage` % of points are detected as outliers by the SVM or DBSCAN, FreqAI will log a warning message and ignore outlier detection, i.e., the original dataset will be kept intact. If the outlier protection is triggered, no predictions will be made based on the training dataset. <br> **Datatype:** Float. <br> Default: `30`.
|
| `outlier_protection_percentage` | Enable to prevent outlier detection methods from discarding too much data. If more than `outlier_protection_percentage` % of points are detected as outliers by the SVM or DBSCAN, FreqAI will log a warning message and ignore outlier detection, i.e., the original dataset will be kept intact. If the outlier protection is triggered, no predictions will be made based on the training dataset. <br> **Datatype:** Float. <br> Default: `30`.
|
||||||
| `reverse_train_test_order` | Split the feature dataset (see below) and use the latest data split for training and test on historical split of the data. This allows the model to be trained up to the most recent data point, while avoiding overfitting. However, you should be careful to understand the unorthodox nature of this parameter before employing it. <br> **Datatype:** Boolean. <br> Default: `False` (no reversal).
|
| `reverse_train_test_order` | Split the feature dataset (see below) and use the latest data split for training and test on historical split of the data. This allows the model to be trained up to the most recent data point, while avoiding overfitting. However, you should be careful to understand the unorthodox nature of this parameter before employing it. <br> **Datatype:** Boolean. <br> Default: `False` (no reversal).
|
||||||
| `shuffle_after_split` | Split the data into train and test sets, and then shuffle both sets individually. <br> **Datatype:** Boolean. <br> Default: `False`.
|
| `shuffle_after_split` | Split the data into train and test sets, and then shuffle both sets individually. <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||||
| `buffer_train_data_candles` | Cut `buffer_train_data_candles` off the beginning and end of the training data *after* the indicators were populated. The main example use is when predicting maxima and minima, the argrelextrema function cannot know the maxima/minima at the edges of the timerange. To improve model accuracy, it is best to compute argrelextrema on the full timerange and then use this function to cut off the edges (buffer) by the kernel. In another case, if the targets are set to a shifted price movement, this buffer is unnecessary because the shifted candles at the end of the timerange will be NaN and FreqAI will automatically cut those off of the training dataset.<br> **Datatype:** Boolean. <br> Default: `False`.
|
| `buffer_train_data_candles` | Cut `buffer_train_data_candles` off the beginning and end of the training data *after* the indicators were populated. The main example use is when predicting maxima and minima, the argrelextrema function cannot know the maxima/minima at the edges of the timerange. To improve model accuracy, it is best to compute argrelextrema on the full timerange and then use this function to cut off the edges (buffer) by the kernel. In another case, if the targets are set to a shifted price movement, this buffer is unnecessary because the shifted candles at the end of the timerange will be NaN and FreqAI will automatically cut those off of the training dataset.<br> **Datatype:** Integer. <br> Default: `0`.
|
||||||
|
|
||||||
### Data split parameters
|
### Data split parameters
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ where `ReinforcementLearner` will use the templated `ReinforcementLearner` from
|
|||||||
dataframe["&-action"] = 0
|
dataframe["&-action"] = 0
|
||||||
```
|
```
|
||||||
|
|
||||||
Most of the function remains the same as for typical Regressors, however, the function above shows how the strategy must pass the raw price data to the agent so that it has access to raw OHLCV in the training environment:
|
Most of the function remains the same as for typical Regressors, however, the function below shows how the strategy must pass the raw price data to the agent so that it has access to raw OHLCV in the training environment:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def feature_engineering_standard(self, dataframe, **kwargs):
|
def feature_engineering_standard(self, dataframe, **kwargs):
|
||||||
@ -248,13 +248,13 @@ FreqAI also provides a built in episodic summary logger called `self.tensorboard
|
|||||||
"""
|
"""
|
||||||
def calculate_reward(self, action: int) -> float:
|
def calculate_reward(self, action: int) -> float:
|
||||||
if not self._is_valid(action):
|
if not self._is_valid(action):
|
||||||
self.tensorboard_log("is_valid")
|
self.tensorboard_log("invalid")
|
||||||
return -2
|
return -2
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! Note
|
!!! Note
|
||||||
The `self.tensorboard_log()` function is designed for tracking incremented objects only i.e. events, actions inside the training environment. If the event of interest is a float, the float can be passed as the second argument e.g. `self.tensorboard_log("float_metric1", 0.23)` would add 0.23 to `float_metric`. In this case you can also disable incrementing using `inc=False` parameter.
|
The `self.tensorboard_log()` function is designed for tracking incremented objects only i.e. events, actions inside the training environment. If the event of interest is a float, the float can be passed as the second argument e.g. `self.tensorboard_log("float_metric1", 0.23)`. In this case the metric values are not incremented.
|
||||||
|
|
||||||
### Choosing a base environment
|
### Choosing a base environment
|
||||||
|
|
||||||
|
@ -128,6 +128,9 @@ The FreqAI specific parameter `label_period_candles` defines the offset (number
|
|||||||
|
|
||||||
You can choose to adopt a continual learning scheme by setting `"continual_learning": true` in the config. By enabling `continual_learning`, after training an initial model from scratch, subsequent trainings will start from the final model state of the preceding training. This gives the new model a "memory" of the previous state. By default, this is set to `False` which means that all new models are trained from scratch, without input from previous models.
|
You can choose to adopt a continual learning scheme by setting `"continual_learning": true` in the config. By enabling `continual_learning`, after training an initial model from scratch, subsequent trainings will start from the final model state of the preceding training. This gives the new model a "memory" of the previous state. By default, this is set to `False` which means that all new models are trained from scratch, without input from previous models.
|
||||||
|
|
||||||
|
???+ danger "Continual learning enforces a constant parameter space"
|
||||||
|
Since `continual_learning` means that the model parameter space *cannot* change between trainings, `principal_component_analysis` is automatically disabled when `continual_learning` is enabled. Hint: PCA changes the parameter space and the number of features, learn more about PCA [here](freqai-feature-engineering.md#data-dimensionality-reduction-with-principal-component-analysis).
|
||||||
|
|
||||||
## Hyperopt
|
## Hyperopt
|
||||||
|
|
||||||
You can hyperopt using the same command as for [typical Freqtrade hyperopt](hyperopt.md):
|
You can hyperopt using the same command as for [typical Freqtrade hyperopt](hyperopt.md):
|
||||||
|
@ -149,7 +149,7 @@ The below example assumes a timeframe of 1 hour:
|
|||||||
* Locks each pair after selling for an additional 5 candles (`CooldownPeriod`), giving other pairs a chance to get filled.
|
* Locks each pair after selling for an additional 5 candles (`CooldownPeriod`), giving other pairs a chance to get filled.
|
||||||
* Stops trading for 4 hours (`4 * 1h candles`) if the last 2 days (`48 * 1h candles`) had 20 trades, which caused a max-drawdown of more than 20%. (`MaxDrawdown`).
|
* Stops trading for 4 hours (`4 * 1h candles`) if the last 2 days (`48 * 1h candles`) had 20 trades, which caused a max-drawdown of more than 20%. (`MaxDrawdown`).
|
||||||
* Stops trading if more than 4 stoploss occur for all pairs within a 1 day (`24 * 1h candles`) limit (`StoplossGuard`).
|
* Stops trading if more than 4 stoploss occur for all pairs within a 1 day (`24 * 1h candles`) limit (`StoplossGuard`).
|
||||||
* Locks all pairs that had 4 Trades within the last 6 hours (`6 * 1h candles`) with a combined profit ratio of below 0.02 (<2%) (`LowProfitPairs`).
|
* Locks all pairs that had 2 Trades within the last 6 hours (`6 * 1h candles`) with a combined profit ratio of below 0.02 (<2%) (`LowProfitPairs`).
|
||||||
* Locks all pairs for 2 candles that had a profit of below 0.01 (<1%) within the last 24h (`24 * 1h candles`), a minimum of 4 trades.
|
* Locks all pairs for 2 candles that had a profit of below 0.01 (<1%) within the last 24h (`24 * 1h candles`), a minimum of 4 trades.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
markdown==3.3.7
|
markdown==3.3.7
|
||||||
mkdocs==1.4.2
|
mkdocs==1.4.2
|
||||||
mkdocs-material==9.1.2
|
mkdocs-material==9.1.4
|
||||||
mdx_truly_sane_lists==1.3
|
mdx_truly_sane_lists==1.3
|
||||||
pymdown-extensions==9.10
|
pymdown-extensions==9.10
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
|
@ -51,7 +51,8 @@ During hyperopt, this runs only once at startup.
|
|||||||
|
|
||||||
## Bot loop start
|
## Bot loop start
|
||||||
|
|
||||||
A simple callback which is called once at the start of every bot throttling iteration (roughly every 5 seconds, unless configured differently).
|
A simple callback which is called once at the start of every bot throttling iteration in dry/live mode (roughly every 5
|
||||||
|
seconds, unless configured differently) or once per candle in backtest/hyperopt mode.
|
||||||
This can be used to perform calculations which are pair independent (apply to all pairs), loading of external data, etc.
|
This can be used to perform calculations which are pair independent (apply to all pairs), loading of external data, etc.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
@ -61,11 +62,12 @@ class AwesomeStrategy(IStrategy):
|
|||||||
|
|
||||||
# ... populate_* methods
|
# ... populate_* methods
|
||||||
|
|
||||||
def bot_loop_start(self, **kwargs) -> None:
|
def bot_loop_start(self, current_time: datetime, **kwargs) -> None:
|
||||||
"""
|
"""
|
||||||
Called at the start of the bot iteration (one loop).
|
Called at the start of the bot iteration (one loop).
|
||||||
Might be used to perform pair-independent tasks
|
Might be used to perform pair-independent tasks
|
||||||
(e.g. gather some remote resource for comparison)
|
(e.g. gather some remote resource for comparison)
|
||||||
|
:param current_time: datetime object, containing the current datetime
|
||||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||||
"""
|
"""
|
||||||
if self.config['runmode'].value in ('live', 'dry_run'):
|
if self.config['runmode'].value in ('live', 'dry_run'):
|
||||||
|
@ -955,3 +955,47 @@ Print trades with id 2 and 3 as json
|
|||||||
``` bash
|
``` bash
|
||||||
freqtrade show-trades --db-url sqlite:///tradesv3.sqlite --trade-ids 2 3 --print-json
|
freqtrade show-trades --db-url sqlite:///tradesv3.sqlite --trade-ids 2 3 --print-json
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Strategy-Updater
|
||||||
|
|
||||||
|
Updates listed strategies or all strategies within the strategies folder to be v3 compliant.
|
||||||
|
If the command runs without --strategy-list then all strategies inside the strategies folder will be converted.
|
||||||
|
Your original strategy will remain available in the `user_data/strategies_orig_updater/` directory.
|
||||||
|
|
||||||
|
!!! Warning "Conversion results"
|
||||||
|
Strategy updater will work on a "best effort" approach. Please do your due diligence and verify the results of the conversion.
|
||||||
|
We also recommend to run a python formatter (e.g. `black`) to format results in a sane manner.
|
||||||
|
|
||||||
|
```
|
||||||
|
usage: freqtrade strategy-updater [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||||
|
[-d PATH] [--userdir PATH]
|
||||||
|
[--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]]
|
||||||
|
|
||||||
|
options:
|
||||||
|
-h, --help show this help message and exit
|
||||||
|
--strategy-list STRATEGY_LIST [STRATEGY_LIST ...]
|
||||||
|
Provide a space-separated list of strategies to
|
||||||
|
backtest. Please note that timeframe needs to be set
|
||||||
|
either in config or via command line. When using this
|
||||||
|
together with `--export trades`, the strategy-name is
|
||||||
|
injected into the filename (so `backtest-data.json`
|
||||||
|
becomes `backtest-data-SampleStrategy.json`
|
||||||
|
|
||||||
|
Common arguments:
|
||||||
|
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||||
|
--logfile FILE, --log-file FILE
|
||||||
|
Log to the file specified. Special values are:
|
||||||
|
'syslog', 'journald'. See the documentation for more
|
||||||
|
details.
|
||||||
|
-V, --version show program's version number and exit
|
||||||
|
-c PATH, --config PATH
|
||||||
|
Specify configuration file (default:
|
||||||
|
`userdir/config.json` or `config.json` whichever
|
||||||
|
exists). Multiple --config options may be used. Can be
|
||||||
|
set to `-` to read config from stdin.
|
||||||
|
-d PATH, --datadir PATH, --data-dir PATH
|
||||||
|
Path to directory with historical backtesting data.
|
||||||
|
--userdir PATH, --user-data-dir PATH
|
||||||
|
Path to userdata directory.
|
||||||
|
|
||||||
|
```
|
||||||
|
@ -22,5 +22,6 @@ from freqtrade.commands.optimize_commands import (start_backtesting, start_backt
|
|||||||
start_edge, start_hyperopt)
|
start_edge, start_hyperopt)
|
||||||
from freqtrade.commands.pairlist_commands import start_test_pairlist
|
from freqtrade.commands.pairlist_commands import start_test_pairlist
|
||||||
from freqtrade.commands.plot_commands import start_plot_dataframe, start_plot_profit
|
from freqtrade.commands.plot_commands import start_plot_dataframe, start_plot_profit
|
||||||
|
from freqtrade.commands.strategy_utils_commands import start_strategy_update
|
||||||
from freqtrade.commands.trade_commands import start_trading
|
from freqtrade.commands.trade_commands import start_trading
|
||||||
from freqtrade.commands.webserver_commands import start_webserver
|
from freqtrade.commands.webserver_commands import start_webserver
|
||||||
|
@ -40,8 +40,8 @@ def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[s
|
|||||||
|
|
||||||
if (not Path(signals_file).exists()):
|
if (not Path(signals_file).exists()):
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
(f"Cannot find latest backtest signals file: {signals_file}."
|
f"Cannot find latest backtest signals file: {signals_file}."
|
||||||
"Run backtesting with `--export signals`.")
|
"Run backtesting with `--export signals`."
|
||||||
)
|
)
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
@ -111,10 +111,13 @@ ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason
|
|||||||
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
||||||
"list-markets", "list-pairs", "list-strategies", "list-freqaimodels",
|
"list-markets", "list-pairs", "list-strategies", "list-freqaimodels",
|
||||||
"list-data", "hyperopt-list", "hyperopt-show", "backtest-filter",
|
"list-data", "hyperopt-list", "hyperopt-show", "backtest-filter",
|
||||||
"plot-dataframe", "plot-profit", "show-trades", "trades-to-ohlcv"]
|
"plot-dataframe", "plot-profit", "show-trades", "trades-to-ohlcv",
|
||||||
|
"strategy-updater"]
|
||||||
|
|
||||||
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-strategy"]
|
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-strategy"]
|
||||||
|
|
||||||
|
ARGS_STRATEGY_UTILS = ["strategy_list", "strategy_path", "recursive_strategy_search"]
|
||||||
|
|
||||||
|
|
||||||
class Arguments:
|
class Arguments:
|
||||||
"""
|
"""
|
||||||
@ -198,8 +201,8 @@ class Arguments:
|
|||||||
start_list_freqAI_models, start_list_markets,
|
start_list_freqAI_models, start_list_markets,
|
||||||
start_list_strategies, start_list_timeframes,
|
start_list_strategies, start_list_timeframes,
|
||||||
start_new_config, start_new_strategy, start_plot_dataframe,
|
start_new_config, start_new_strategy, start_plot_dataframe,
|
||||||
start_plot_profit, start_show_trades, start_test_pairlist,
|
start_plot_profit, start_show_trades, start_strategy_update,
|
||||||
start_trading, start_webserver)
|
start_test_pairlist, start_trading, start_webserver)
|
||||||
|
|
||||||
subparsers = self.parser.add_subparsers(dest='command',
|
subparsers = self.parser.add_subparsers(dest='command',
|
||||||
# Use custom message when no subhandler is added
|
# Use custom message when no subhandler is added
|
||||||
@ -440,3 +443,11 @@ class Arguments:
|
|||||||
parents=[_common_parser])
|
parents=[_common_parser])
|
||||||
webserver_cmd.set_defaults(func=start_webserver)
|
webserver_cmd.set_defaults(func=start_webserver)
|
||||||
self._build_args(optionlist=ARGS_WEBSERVER, parser=webserver_cmd)
|
self._build_args(optionlist=ARGS_WEBSERVER, parser=webserver_cmd)
|
||||||
|
|
||||||
|
# Add strategy_updater subcommand
|
||||||
|
strategy_updater_cmd = subparsers.add_parser('strategy-updater',
|
||||||
|
help='updates outdated strategy'
|
||||||
|
'files to the current version',
|
||||||
|
parents=[_common_parser])
|
||||||
|
strategy_updater_cmd.set_defaults(func=start_strategy_update)
|
||||||
|
self._build_args(optionlist=ARGS_STRATEGY_UTILS, parser=strategy_updater_cmd)
|
||||||
|
@ -204,11 +204,14 @@ def start_list_data(args: Dict[str, Any]) -> None:
|
|||||||
pair, timeframe, candle_type,
|
pair, timeframe, candle_type,
|
||||||
*dhc.ohlcv_data_min_max(pair, timeframe, candle_type)
|
*dhc.ohlcv_data_min_max(pair, timeframe, candle_type)
|
||||||
) for pair, timeframe, candle_type in paircombs]
|
) for pair, timeframe, candle_type in paircombs]
|
||||||
|
|
||||||
print(tabulate([
|
print(tabulate([
|
||||||
(pair, timeframe, candle_type,
|
(pair, timeframe, candle_type,
|
||||||
start.strftime(DATETIME_PRINT_FORMAT),
|
start.strftime(DATETIME_PRINT_FORMAT),
|
||||||
end.strftime(DATETIME_PRINT_FORMAT))
|
end.strftime(DATETIME_PRINT_FORMAT))
|
||||||
for pair, timeframe, candle_type, start, end in paircombs1
|
for pair, timeframe, candle_type, start, end in sorted(
|
||||||
|
paircombs1,
|
||||||
|
key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2]))
|
||||||
],
|
],
|
||||||
headers=("Pair", "Timeframe", "Type", 'From', 'To'),
|
headers=("Pair", "Timeframe", "Type", 'From', 'To'),
|
||||||
tablefmt='psql', stralign='right'))
|
tablefmt='psql', stralign='right'))
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func, select
|
||||||
|
|
||||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||||
from freqtrade.enums import RunMode
|
from freqtrade.enums import RunMode
|
||||||
@ -20,7 +20,7 @@ def start_convert_db(args: Dict[str, Any]) -> None:
|
|||||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||||
|
|
||||||
init_db(config['db_url'])
|
init_db(config['db_url'])
|
||||||
session_target = Trade._session
|
session_target = Trade.session
|
||||||
init_db(config['db_url_from'])
|
init_db(config['db_url_from'])
|
||||||
logger.info("Starting db migration.")
|
logger.info("Starting db migration.")
|
||||||
|
|
||||||
@ -36,16 +36,16 @@ def start_convert_db(args: Dict[str, Any]) -> None:
|
|||||||
|
|
||||||
session_target.commit()
|
session_target.commit()
|
||||||
|
|
||||||
for pairlock in PairLock.query:
|
for pairlock in PairLock.get_all_locks():
|
||||||
pairlock_count += 1
|
pairlock_count += 1
|
||||||
make_transient(pairlock)
|
make_transient(pairlock)
|
||||||
session_target.add(pairlock)
|
session_target.add(pairlock)
|
||||||
session_target.commit()
|
session_target.commit()
|
||||||
|
|
||||||
# Update sequences
|
# Update sequences
|
||||||
max_trade_id = session_target.query(func.max(Trade.id)).scalar()
|
max_trade_id = session_target.scalar(select(func.max(Trade.id)))
|
||||||
max_order_id = session_target.query(func.max(Order.id)).scalar()
|
max_order_id = session_target.scalar(select(func.max(Order.id)))
|
||||||
max_pairlock_id = session_target.query(func.max(PairLock.id)).scalar()
|
max_pairlock_id = session_target.scalar(select(func.max(PairLock.id)))
|
||||||
|
|
||||||
set_sequence_ids(session_target.get_bind(),
|
set_sequence_ids(session_target.get_bind(),
|
||||||
trade_id=max_trade_id,
|
trade_id=max_trade_id,
|
||||||
|
55
freqtrade/commands/strategy_utils_commands.py
Normal file
55
freqtrade/commands/strategy_utils_commands.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from freqtrade.configuration import setup_utils_configuration
|
||||||
|
from freqtrade.enums import RunMode
|
||||||
|
from freqtrade.resolvers import StrategyResolver
|
||||||
|
from freqtrade.strategy.strategyupdater import StrategyUpdater
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def start_strategy_update(args: Dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
Start the strategy updating script
|
||||||
|
:param args: Cli args from Arguments()
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
|
||||||
|
if sys.version_info == (3, 8): # pragma: no cover
|
||||||
|
sys.exit("Freqtrade strategy updater requires Python version >= 3.9")
|
||||||
|
|
||||||
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||||
|
|
||||||
|
strategy_objs = StrategyResolver.search_all_objects(
|
||||||
|
config, enum_failed=False, recursive=config.get('recursive_strategy_search', False))
|
||||||
|
|
||||||
|
filtered_strategy_objs = []
|
||||||
|
if args['strategy_list']:
|
||||||
|
filtered_strategy_objs = [
|
||||||
|
strategy_obj for strategy_obj in strategy_objs
|
||||||
|
if strategy_obj['name'] in args['strategy_list']
|
||||||
|
]
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Use all available entries.
|
||||||
|
filtered_strategy_objs = strategy_objs
|
||||||
|
|
||||||
|
processed_locations = set()
|
||||||
|
for strategy_obj in filtered_strategy_objs:
|
||||||
|
if strategy_obj['location'] not in processed_locations:
|
||||||
|
processed_locations.add(strategy_obj['location'])
|
||||||
|
start_conversion(strategy_obj, config)
|
||||||
|
|
||||||
|
|
||||||
|
def start_conversion(strategy_obj, config):
|
||||||
|
print(f"Conversion of {Path(strategy_obj['location']).name} started.")
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
start = time.perf_counter()
|
||||||
|
instance_strategy_updater.start(config, strategy_obj)
|
||||||
|
elapsed = time.perf_counter() - start
|
||||||
|
print(f"Conversion of {Path(strategy_obj['location']).name} took {elapsed:.1f} seconds.")
|
@ -27,10 +27,7 @@ def _extend_validator(validator_class):
|
|||||||
if 'default' in subschema:
|
if 'default' in subschema:
|
||||||
instance.setdefault(prop, subschema['default'])
|
instance.setdefault(prop, subschema['default'])
|
||||||
|
|
||||||
for error in validate_properties(
|
yield from validate_properties(validator, properties, instance, schema)
|
||||||
validator, properties, instance, schema,
|
|
||||||
):
|
|
||||||
yield error
|
|
||||||
|
|
||||||
return validators.extend(
|
return validators.extend(
|
||||||
validator_class, {'properties': set_defaults}
|
validator_class, {'properties': set_defaults}
|
||||||
|
@ -36,9 +36,10 @@ AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList', 'ProducerPairList', '
|
|||||||
'AgeFilter', 'OffsetFilter', 'PerformanceFilter',
|
'AgeFilter', 'OffsetFilter', 'PerformanceFilter',
|
||||||
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
|
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
|
||||||
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
|
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
|
||||||
AVAILABLE_PROTECTIONS = ['CooldownPeriod', 'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
|
AVAILABLE_PROTECTIONS = ['CooldownPeriod',
|
||||||
AVAILABLE_DATAHANDLERS_TRADES = ['json', 'jsongz', 'hdf5']
|
'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
|
||||||
AVAILABLE_DATAHANDLERS = AVAILABLE_DATAHANDLERS_TRADES + ['feather', 'parquet']
|
AVAILABLE_DATAHANDLERS_TRADES = ['json', 'jsongz', 'hdf5', 'feather']
|
||||||
|
AVAILABLE_DATAHANDLERS = AVAILABLE_DATAHANDLERS_TRADES + ['parquet']
|
||||||
BACKTEST_BREAKDOWNS = ['day', 'week', 'month']
|
BACKTEST_BREAKDOWNS = ['day', 'week', 'month']
|
||||||
BACKTEST_CACHE_AGE = ['none', 'day', 'week', 'month']
|
BACKTEST_CACHE_AGE = ['none', 'day', 'week', 'month']
|
||||||
BACKTEST_CACHE_DEFAULT = 'day'
|
BACKTEST_CACHE_DEFAULT = 'day'
|
||||||
|
@ -373,7 +373,7 @@ def load_trades_from_db(db_url: str, strategy: Optional[str] = None) -> pd.DataF
|
|||||||
filters = []
|
filters = []
|
||||||
if strategy:
|
if strategy:
|
||||||
filters.append(Trade.strategy == strategy)
|
filters.append(Trade.strategy == strategy)
|
||||||
trades = trade_list_to_dataframe(Trade.get_trades(filters).all())
|
trades = trade_list_to_dataframe(list(Trade.get_trades(filters).all()))
|
||||||
|
|
||||||
return trades
|
return trades
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ from freqtrade.exchange import Exchange, timeframe_to_seconds
|
|||||||
from freqtrade.exchange.types import OrderBook
|
from freqtrade.exchange.types import OrderBook
|
||||||
from freqtrade.misc import append_candles_to_dataframe
|
from freqtrade.misc import append_candles_to_dataframe
|
||||||
from freqtrade.rpc import RPCManager
|
from freqtrade.rpc import RPCManager
|
||||||
|
from freqtrade.rpc.rpc_types import RPCAnalyzedDFMsg
|
||||||
from freqtrade.util import PeriodicCache
|
from freqtrade.util import PeriodicCache
|
||||||
|
|
||||||
|
|
||||||
@ -118,8 +119,7 @@ class DataProvider:
|
|||||||
:param new_candle: This is a new candle
|
:param new_candle: This is a new candle
|
||||||
"""
|
"""
|
||||||
if self.__rpc:
|
if self.__rpc:
|
||||||
self.__rpc.send_msg(
|
msg: RPCAnalyzedDFMsg = {
|
||||||
{
|
|
||||||
'type': RPCMessageType.ANALYZED_DF,
|
'type': RPCMessageType.ANALYZED_DF,
|
||||||
'data': {
|
'data': {
|
||||||
'key': pair_key,
|
'key': pair_key,
|
||||||
@ -127,7 +127,7 @@ class DataProvider:
|
|||||||
'la': datetime.now(timezone.utc)
|
'la': datetime.now(timezone.utc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
self.__rpc.send_msg(msg)
|
||||||
if new_candle:
|
if new_candle:
|
||||||
self.__rpc.send_msg({
|
self.__rpc.send_msg({
|
||||||
'type': RPCMessageType.NEW_CANDLE,
|
'type': RPCMessageType.NEW_CANDLE,
|
||||||
|
@ -4,7 +4,7 @@ from typing import Optional
|
|||||||
from pandas import DataFrame, read_feather, to_datetime
|
from pandas import DataFrame, read_feather, to_datetime
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList
|
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList
|
||||||
from freqtrade.enums import CandleType
|
from freqtrade.enums import CandleType
|
||||||
|
|
||||||
from .idatahandler import IDataHandler
|
from .idatahandler import IDataHandler
|
||||||
@ -92,12 +92,11 @@ class FeatherDataHandler(IDataHandler):
|
|||||||
:param data: List of Lists containing trade data,
|
:param data: List of Lists containing trade data,
|
||||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||||
"""
|
"""
|
||||||
# filename = self._pair_trades_filename(self._datadir, pair)
|
filename = self._pair_trades_filename(self._datadir, pair)
|
||||||
|
self.create_dir_if_needed(filename)
|
||||||
|
|
||||||
raise NotImplementedError()
|
tradesdata = DataFrame(data, columns=DEFAULT_TRADES_COLUMNS)
|
||||||
# array = pa.array(data)
|
tradesdata.to_feather(filename, compression_level=9, compression='lz4')
|
||||||
# array
|
|
||||||
# feather.write_feather(data, filename)
|
|
||||||
|
|
||||||
def trades_append(self, pair: str, data: TradeList):
|
def trades_append(self, pair: str, data: TradeList):
|
||||||
"""
|
"""
|
||||||
@ -116,14 +115,13 @@ class FeatherDataHandler(IDataHandler):
|
|||||||
:param timerange: Timerange to load trades for - currently not implemented
|
:param timerange: Timerange to load trades for - currently not implemented
|
||||||
:return: List of trades
|
:return: List of trades
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
filename = self._pair_trades_filename(self._datadir, pair)
|
||||||
# filename = self._pair_trades_filename(self._datadir, pair)
|
if not filename.exists():
|
||||||
# tradesdata = misc.file_load_json(filename)
|
return []
|
||||||
|
|
||||||
# if not tradesdata:
|
tradesdata = read_feather(filename)
|
||||||
# return []
|
|
||||||
|
|
||||||
# return tradesdata
|
return tradesdata.values.tolist()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_file_extension(cls):
|
def _get_file_extension(cls):
|
||||||
|
@ -23,7 +23,7 @@ class Binance(Exchange):
|
|||||||
_ft_has: Dict = {
|
_ft_has: Dict = {
|
||||||
"stoploss_on_exchange": True,
|
"stoploss_on_exchange": True,
|
||||||
"stoploss_order_types": {"limit": "stop_loss_limit"},
|
"stoploss_order_types": {"limit": "stop_loss_limit"},
|
||||||
"order_time_in_force": ['GTC', 'FOK', 'IOC'],
|
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
|
||||||
"ohlcv_candle_limit": 1000,
|
"ohlcv_candle_limit": 1000,
|
||||||
"trades_pagination": "id",
|
"trades_pagination": "id",
|
||||||
"trades_pagination_arg": "fromId",
|
"trades_pagination_arg": "fromId",
|
||||||
@ -31,6 +31,7 @@ class Binance(Exchange):
|
|||||||
}
|
}
|
||||||
_ft_has_futures: Dict = {
|
_ft_has_futures: Dict = {
|
||||||
"stoploss_order_types": {"limit": "stop", "market": "stop_market"},
|
"stoploss_order_types": {"limit": "stop", "market": "stop_market"},
|
||||||
|
"order_time_in_force": ["GTC", "FOK", "IOC"],
|
||||||
"tickers_have_price": False,
|
"tickers_have_price": False,
|
||||||
"floor_leverage": True,
|
"floor_leverage": True,
|
||||||
"stop_price_type_field": "workingType",
|
"stop_price_type_field": "workingType",
|
||||||
|
@ -114,7 +114,7 @@ class Bybit(Exchange):
|
|||||||
data = [[x['timestamp'], x['fundingRate'], 0, 0, 0, 0] for x in data]
|
data = [[x['timestamp'], x['fundingRate'], 0, 0, 0, 0] for x in data]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _lev_prep(self, pair: str, leverage: float, side: BuySell):
|
def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False):
|
||||||
if self.trading_mode != TradingMode.SPOT:
|
if self.trading_mode != TradingMode.SPOT:
|
||||||
params = {'leverage': leverage}
|
params = {'leverage': leverage}
|
||||||
self.set_margin_mode(pair, self.margin_mode, accept_fail=True, params=params)
|
self.set_margin_mode(pair, self.margin_mode, accept_fail=True, params=params)
|
||||||
|
@ -60,7 +60,6 @@ class Exchange:
|
|||||||
_ft_has_default: Dict = {
|
_ft_has_default: Dict = {
|
||||||
"stoploss_on_exchange": False,
|
"stoploss_on_exchange": False,
|
||||||
"order_time_in_force": ["GTC"],
|
"order_time_in_force": ["GTC"],
|
||||||
"time_in_force_parameter": "timeInForce",
|
|
||||||
"ohlcv_params": {},
|
"ohlcv_params": {},
|
||||||
"ohlcv_candle_limit": 500,
|
"ohlcv_candle_limit": 500,
|
||||||
"ohlcv_has_history": True, # Some exchanges (Kraken) don't provide history via ohlcv
|
"ohlcv_has_history": True, # Some exchanges (Kraken) don't provide history via ohlcv
|
||||||
@ -206,6 +205,8 @@ class Exchange:
|
|||||||
and self._api_async.session):
|
and self._api_async.session):
|
||||||
logger.debug("Closing async ccxt session.")
|
logger.debug("Closing async ccxt session.")
|
||||||
self.loop.run_until_complete(self._api_async.close())
|
self.loop.run_until_complete(self._api_async.close())
|
||||||
|
if self.loop and not self.loop.is_closed():
|
||||||
|
self.loop.close()
|
||||||
|
|
||||||
def validate_config(self, config):
|
def validate_config(self, config):
|
||||||
# Check if timeframe is available
|
# Check if timeframe is available
|
||||||
@ -1019,10 +1020,10 @@ class Exchange:
|
|||||||
|
|
||||||
# Order handling
|
# Order handling
|
||||||
|
|
||||||
def _lev_prep(self, pair: str, leverage: float, side: BuySell):
|
def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False):
|
||||||
if self.trading_mode != TradingMode.SPOT:
|
if self.trading_mode != TradingMode.SPOT:
|
||||||
self.set_margin_mode(pair, self.margin_mode)
|
self.set_margin_mode(pair, self.margin_mode, accept_fail)
|
||||||
self._set_leverage(leverage, pair)
|
self._set_leverage(leverage, pair, accept_fail)
|
||||||
|
|
||||||
def _get_params(
|
def _get_params(
|
||||||
self,
|
self,
|
||||||
@ -1034,8 +1035,7 @@ class Exchange:
|
|||||||
) -> Dict:
|
) -> Dict:
|
||||||
params = self._params.copy()
|
params = self._params.copy()
|
||||||
if time_in_force != 'GTC' and ordertype != 'market':
|
if time_in_force != 'GTC' and ordertype != 'market':
|
||||||
param = self._ft_has.get('time_in_force_parameter', '')
|
params.update({'timeInForce': time_in_force.upper()})
|
||||||
params.update({param: time_in_force.upper()})
|
|
||||||
if reduceOnly:
|
if reduceOnly:
|
||||||
params.update({'reduceOnly': True})
|
params.update({'reduceOnly': True})
|
||||||
return params
|
return params
|
||||||
@ -1137,7 +1137,11 @@ class Exchange:
|
|||||||
"sell" else (stop_price >= limit_rate))
|
"sell" else (stop_price >= limit_rate))
|
||||||
# Ensure rate is less than stop price
|
# Ensure rate is less than stop price
|
||||||
if bad_stop_price:
|
if bad_stop_price:
|
||||||
raise OperationalException(
|
# This can for example happen if the stop / liquidation price is set to 0
|
||||||
|
# Which is possible if a market-order closes right away.
|
||||||
|
# The InvalidOrderException will bubble up to exit_positions, where it will be
|
||||||
|
# handled gracefully.
|
||||||
|
raise InvalidOrderException(
|
||||||
"In stoploss limit order, stop price should be more than limit price. "
|
"In stoploss limit order, stop price should be more than limit price. "
|
||||||
f"Stop price: {stop_price}, Limit price: {limit_rate}, "
|
f"Stop price: {stop_price}, Limit price: {limit_rate}, "
|
||||||
f"Limit Price pct: {limit_price_pct}"
|
f"Limit Price pct: {limit_price_pct}"
|
||||||
@ -1204,7 +1208,7 @@ class Exchange:
|
|||||||
|
|
||||||
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
||||||
|
|
||||||
self._lev_prep(pair, leverage, side)
|
self._lev_prep(pair, leverage, side, accept_fail=True)
|
||||||
order = self._api.create_order(symbol=pair, type=ordertype, side=side,
|
order = self._api.create_order(symbol=pair, type=ordertype, side=side,
|
||||||
amount=amount, price=limit_rate, params=params)
|
amount=amount, price=limit_rate, params=params)
|
||||||
self._log_exchange_response('create_stoploss_order', order)
|
self._log_exchange_response('create_stoploss_order', order)
|
||||||
@ -2529,7 +2533,6 @@ class Exchange:
|
|||||||
self,
|
self,
|
||||||
leverage: float,
|
leverage: float,
|
||||||
pair: Optional[str] = None,
|
pair: Optional[str] = None,
|
||||||
trading_mode: Optional[TradingMode] = None,
|
|
||||||
accept_fail: bool = False,
|
accept_fail: bool = False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
@ -2547,7 +2550,7 @@ class Exchange:
|
|||||||
self._log_exchange_response('set_leverage', res)
|
self._log_exchange_response('set_leverage', res)
|
||||||
except ccxt.DDoSProtection as e:
|
except ccxt.DDoSProtection as e:
|
||||||
raise DDosProtection(e) from e
|
raise DDosProtection(e) from e
|
||||||
except ccxt.BadRequest as e:
|
except (ccxt.BadRequest, ccxt.InsufficientFunds) as e:
|
||||||
if not accept_fail:
|
if not accept_fail:
|
||||||
raise TemporaryError(
|
raise TemporaryError(
|
||||||
f'Could not set leverage due to {e.__class__.__name__}. Message: {e}') from e
|
f'Could not set leverage due to {e.__class__.__name__}. Message: {e}') from e
|
||||||
|
@ -75,8 +75,7 @@ class Gate(Exchange):
|
|||||||
)
|
)
|
||||||
if ordertype == 'market' and self.trading_mode == TradingMode.FUTURES:
|
if ordertype == 'market' and self.trading_mode == TradingMode.FUTURES:
|
||||||
params['type'] = 'market'
|
params['type'] = 'market'
|
||||||
param = self._ft_has.get('time_in_force_parameter', '')
|
params.update({'timeInForce': 'IOC'})
|
||||||
params.update({param: 'IOC'})
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
def get_trades_for_order(self, order_id: str, pair: str, since: datetime,
|
def get_trades_for_order(self, order_id: str, pair: str, since: datetime,
|
||||||
|
@ -158,7 +158,6 @@ class Kraken(Exchange):
|
|||||||
self,
|
self,
|
||||||
leverage: float,
|
leverage: float,
|
||||||
pair: Optional[str] = None,
|
pair: Optional[str] = None,
|
||||||
trading_mode: Optional[TradingMode] = None,
|
|
||||||
accept_fail: bool = False,
|
accept_fail: bool = False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
|
@ -1,14 +1,16 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
import ccxt
|
import ccxt
|
||||||
|
|
||||||
from freqtrade.constants import BuySell
|
from freqtrade.constants import BuySell
|
||||||
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
||||||
from freqtrade.enums.pricetype import PriceType
|
from freqtrade.enums.pricetype import PriceType
|
||||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
from freqtrade.exceptions import (DDosProtection, OperationalException, RetryableOrderError,
|
||||||
|
TemporaryError)
|
||||||
from freqtrade.exchange import Exchange, date_minus_candles
|
from freqtrade.exchange import Exchange, date_minus_candles
|
||||||
from freqtrade.exchange.common import retrier
|
from freqtrade.exchange.common import retrier
|
||||||
|
from freqtrade.misc import safe_value_fallback2
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -24,11 +26,13 @@ class Okx(Exchange):
|
|||||||
"ohlcv_candle_limit": 100, # Warning, special case with data prior to X months
|
"ohlcv_candle_limit": 100, # Warning, special case with data prior to X months
|
||||||
"mark_ohlcv_timeframe": "4h",
|
"mark_ohlcv_timeframe": "4h",
|
||||||
"funding_fee_timeframe": "8h",
|
"funding_fee_timeframe": "8h",
|
||||||
|
"stoploss_order_types": {"limit": "limit"},
|
||||||
|
"stoploss_on_exchange": True,
|
||||||
}
|
}
|
||||||
_ft_has_futures: Dict = {
|
_ft_has_futures: Dict = {
|
||||||
"tickers_have_quoteVolume": False,
|
"tickers_have_quoteVolume": False,
|
||||||
"fee_cost_in_contracts": True,
|
"fee_cost_in_contracts": True,
|
||||||
"stop_price_type_field": "tpTriggerPxType",
|
"stop_price_type_field": "slTriggerPxType",
|
||||||
"stop_price_type_value_mapping": {
|
"stop_price_type_value_mapping": {
|
||||||
PriceType.LAST: "last",
|
PriceType.LAST: "last",
|
||||||
PriceType.MARK: "index",
|
PriceType.MARK: "index",
|
||||||
@ -121,10 +125,9 @@ class Okx(Exchange):
|
|||||||
return params
|
return params
|
||||||
|
|
||||||
@retrier
|
@retrier
|
||||||
def _lev_prep(self, pair: str, leverage: float, side: BuySell):
|
def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False):
|
||||||
if self.trading_mode != TradingMode.SPOT and self.margin_mode is not None:
|
if self.trading_mode != TradingMode.SPOT and self.margin_mode is not None:
|
||||||
try:
|
try:
|
||||||
# TODO-lev: Test me properly (check mgnMode passed)
|
|
||||||
res = self._api.set_leverage(
|
res = self._api.set_leverage(
|
||||||
leverage=leverage,
|
leverage=leverage,
|
||||||
symbol=pair,
|
symbol=pair,
|
||||||
@ -157,3 +160,78 @@ class Okx(Exchange):
|
|||||||
|
|
||||||
pair_tiers = self._leverage_tiers[pair]
|
pair_tiers = self._leverage_tiers[pair]
|
||||||
return pair_tiers[-1]['maxNotional'] / leverage
|
return pair_tiers[-1]['maxNotional'] / leverage
|
||||||
|
|
||||||
|
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||||
|
|
||||||
|
params = self._params.copy()
|
||||||
|
# Verify if stopPrice works for your exchange!
|
||||||
|
params.update({'stopLossPrice': stop_price})
|
||||||
|
|
||||||
|
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
|
||||||
|
params['tdMode'] = self.margin_mode.value
|
||||||
|
params['posSide'] = self._get_posSide(side, True)
|
||||||
|
return params
|
||||||
|
|
||||||
|
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
|
||||||
|
"""
|
||||||
|
OKX uses non-default stoploss price naming.
|
||||||
|
"""
|
||||||
|
if not self._ft_has.get('stoploss_on_exchange'):
|
||||||
|
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
||||||
|
|
||||||
|
return (
|
||||||
|
order.get('stopLossPrice', None) is None
|
||||||
|
or ((side == "sell" and stop_loss > float(order['stopLossPrice'])) or
|
||||||
|
(side == "buy" and stop_loss < float(order['stopLossPrice'])))
|
||||||
|
)
|
||||||
|
|
||||||
|
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||||
|
if self._config['dry_run']:
|
||||||
|
return self.fetch_dry_run_order(order_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
params1 = {'stop': True}
|
||||||
|
order_reg = self._api.fetch_order(order_id, pair, params=params1)
|
||||||
|
self._log_exchange_response('fetch_stoploss_order', order_reg)
|
||||||
|
return order_reg
|
||||||
|
except ccxt.OrderNotFound:
|
||||||
|
pass
|
||||||
|
params2 = {'stop': True, 'ordType': 'conditional'}
|
||||||
|
for method in (self._api.fetch_open_orders, self._api.fetch_closed_orders,
|
||||||
|
self._api.fetch_canceled_orders):
|
||||||
|
try:
|
||||||
|
orders = method(pair, params=params2)
|
||||||
|
orders_f = [order for order in orders if order['id'] == order_id]
|
||||||
|
if orders_f:
|
||||||
|
order = orders_f[0]
|
||||||
|
if (order['status'] == 'closed'
|
||||||
|
and (real_order_id := order.get('info', {}).get('ordId')) is not None):
|
||||||
|
# Once a order triggered, we fetch the regular followup order.
|
||||||
|
order_reg = self.fetch_order(real_order_id, pair)
|
||||||
|
self._log_exchange_response('fetch_stoploss_order1', order_reg)
|
||||||
|
order_reg['id_stop'] = order_reg['id']
|
||||||
|
order_reg['id'] = order_id
|
||||||
|
order_reg['type'] = 'stoploss'
|
||||||
|
order_reg['status_stop'] = 'triggered'
|
||||||
|
return order_reg
|
||||||
|
order['type'] = 'stoploss'
|
||||||
|
return order
|
||||||
|
except ccxt.BaseError:
|
||||||
|
pass
|
||||||
|
raise RetryableOrderError(
|
||||||
|
f'StoplossOrder not found (pair: {pair} id: {order_id}).')
|
||||||
|
|
||||||
|
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
||||||
|
if order['type'] == 'stop':
|
||||||
|
return safe_value_fallback2(order, order, 'id_stop', 'id')
|
||||||
|
return order['id']
|
||||||
|
|
||||||
|
def cancel_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||||
|
params1 = {'stop': True}
|
||||||
|
# 'ordType': 'conditional'
|
||||||
|
#
|
||||||
|
return self.cancel_order(
|
||||||
|
order_id=order_id,
|
||||||
|
pair=pair,
|
||||||
|
params=params1,
|
||||||
|
)
|
||||||
|
@ -47,7 +47,7 @@ class Base3ActionRLEnv(BaseEnvironment):
|
|||||||
self._update_unrealized_total_profit()
|
self._update_unrealized_total_profit()
|
||||||
step_reward = self.calculate_reward(action)
|
step_reward = self.calculate_reward(action)
|
||||||
self.total_reward += step_reward
|
self.total_reward += step_reward
|
||||||
self.tensorboard_log(self.actions._member_names_[action])
|
self.tensorboard_log(self.actions._member_names_[action], category="actions")
|
||||||
|
|
||||||
trade_type = None
|
trade_type = None
|
||||||
if self.is_tradesignal(action):
|
if self.is_tradesignal(action):
|
||||||
|
@ -48,7 +48,7 @@ class Base4ActionRLEnv(BaseEnvironment):
|
|||||||
self._update_unrealized_total_profit()
|
self._update_unrealized_total_profit()
|
||||||
step_reward = self.calculate_reward(action)
|
step_reward = self.calculate_reward(action)
|
||||||
self.total_reward += step_reward
|
self.total_reward += step_reward
|
||||||
self.tensorboard_log(self.actions._member_names_[action])
|
self.tensorboard_log(self.actions._member_names_[action], category="actions")
|
||||||
|
|
||||||
trade_type = None
|
trade_type = None
|
||||||
if self.is_tradesignal(action):
|
if self.is_tradesignal(action):
|
||||||
|
@ -49,7 +49,7 @@ class Base5ActionRLEnv(BaseEnvironment):
|
|||||||
self._update_unrealized_total_profit()
|
self._update_unrealized_total_profit()
|
||||||
step_reward = self.calculate_reward(action)
|
step_reward = self.calculate_reward(action)
|
||||||
self.total_reward += step_reward
|
self.total_reward += step_reward
|
||||||
self.tensorboard_log(self.actions._member_names_[action])
|
self.tensorboard_log(self.actions._member_names_[action], category="actions")
|
||||||
|
|
||||||
trade_type = None
|
trade_type = None
|
||||||
if self.is_tradesignal(action):
|
if self.is_tradesignal(action):
|
||||||
|
@ -137,7 +137,8 @@ class BaseEnvironment(gym.Env):
|
|||||||
self.np_random, seed = seeding.np_random(seed)
|
self.np_random, seed = seeding.np_random(seed)
|
||||||
return [seed]
|
return [seed]
|
||||||
|
|
||||||
def tensorboard_log(self, metric: str, value: Union[int, float] = 1, inc: bool = True):
|
def tensorboard_log(self, metric: str, value: Optional[Union[int, float]] = None,
|
||||||
|
inc: Optional[bool] = None, category: str = "custom"):
|
||||||
"""
|
"""
|
||||||
Function builds the tensorboard_metrics dictionary
|
Function builds the tensorboard_metrics dictionary
|
||||||
to be parsed by the TensorboardCallback. This
|
to be parsed by the TensorboardCallback. This
|
||||||
@ -149,17 +150,24 @@ class BaseEnvironment(gym.Env):
|
|||||||
|
|
||||||
def calculate_reward(self, action: int) -> float:
|
def calculate_reward(self, action: int) -> float:
|
||||||
if not self._is_valid(action):
|
if not self._is_valid(action):
|
||||||
self.tensorboard_log("is_valid")
|
self.tensorboard_log("invalid")
|
||||||
return -2
|
return -2
|
||||||
|
|
||||||
:param metric: metric to be tracked and incremented
|
:param metric: metric to be tracked and incremented
|
||||||
:param value: value to increment `metric` by
|
:param value: `metric` value
|
||||||
:param inc: sets whether the `value` is incremented or not
|
:param inc: (deprecated) sets whether the `value` is incremented or not
|
||||||
|
:param category: `metric` category
|
||||||
"""
|
"""
|
||||||
if not inc or metric not in self.tensorboard_metrics:
|
increment = True if value is None else False
|
||||||
self.tensorboard_metrics[metric] = value
|
value = 1 if increment else value
|
||||||
|
|
||||||
|
if category not in self.tensorboard_metrics:
|
||||||
|
self.tensorboard_metrics[category] = {}
|
||||||
|
|
||||||
|
if not increment or metric not in self.tensorboard_metrics[category]:
|
||||||
|
self.tensorboard_metrics[category][metric] = value
|
||||||
else:
|
else:
|
||||||
self.tensorboard_metrics[metric] += value
|
self.tensorboard_metrics[category][metric] += value
|
||||||
|
|
||||||
def reset_tensorboard_log(self):
|
def reset_tensorboard_log(self):
|
||||||
self.tensorboard_metrics = {}
|
self.tensorboard_metrics = {}
|
||||||
|
@ -13,7 +13,7 @@ class TensorboardCallback(BaseCallback):
|
|||||||
episodic summary reports.
|
episodic summary reports.
|
||||||
"""
|
"""
|
||||||
def __init__(self, verbose=1, actions: Type[Enum] = BaseActions):
|
def __init__(self, verbose=1, actions: Type[Enum] = BaseActions):
|
||||||
super(TensorboardCallback, self).__init__(verbose)
|
super().__init__(verbose)
|
||||||
self.model: Any = None
|
self.model: Any = None
|
||||||
self.logger = None # type: Any
|
self.logger = None # type: Any
|
||||||
self.training_env: BaseEnvironment = None # type: ignore
|
self.training_env: BaseEnvironment = None # type: ignore
|
||||||
@ -46,14 +46,12 @@ class TensorboardCallback(BaseCallback):
|
|||||||
local_info = self.locals["infos"][0]
|
local_info = self.locals["infos"][0]
|
||||||
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
|
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
|
||||||
|
|
||||||
for info in local_info:
|
for metric in local_info:
|
||||||
if info not in ["episode", "terminal_observation"]:
|
if metric not in ["episode", "terminal_observation"]:
|
||||||
self.logger.record(f"_info/{info}", local_info[info])
|
self.logger.record(f"info/{metric}", local_info[metric])
|
||||||
|
|
||||||
for info in tensorboard_metrics:
|
for category in tensorboard_metrics:
|
||||||
if info in [action.name for action in self.actions]:
|
for metric in tensorboard_metrics[category]:
|
||||||
self.logger.record(f"_actions/{info}", tensorboard_metrics[info])
|
self.logger.record(f"{category}/{metric}", tensorboard_metrics[category][metric])
|
||||||
else:
|
|
||||||
self.logger.record(f"_custom/{info}", tensorboard_metrics[info])
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -251,7 +251,7 @@ class FreqaiDataKitchen:
|
|||||||
(drop_index == 0) & (drop_index_labels == 0)
|
(drop_index == 0) & (drop_index_labels == 0)
|
||||||
]
|
]
|
||||||
logger.info(
|
logger.info(
|
||||||
f"dropped {len(unfiltered_df) - len(filtered_df)} training points"
|
f"{self.pair}: dropped {len(unfiltered_df) - len(filtered_df)} training points"
|
||||||
f" due to NaNs in populated dataset {len(unfiltered_df)}."
|
f" due to NaNs in populated dataset {len(unfiltered_df)}."
|
||||||
)
|
)
|
||||||
if (1 - len(filtered_df) / len(unfiltered_df)) > 0.1 and self.live:
|
if (1 - len(filtered_df) / len(unfiltered_df)) > 0.1 and self.live:
|
||||||
@ -675,7 +675,7 @@ class FreqaiDataKitchen:
|
|||||||
]
|
]
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"SVM tossed {len(y_pred) - kept_points.sum()}"
|
f"{self.pair}: SVM tossed {len(y_pred) - kept_points.sum()}"
|
||||||
f" test points from {len(y_pred)} total points."
|
f" test points from {len(y_pred)} total points."
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -949,7 +949,7 @@ class FreqaiDataKitchen:
|
|||||||
|
|
||||||
if (len(do_predict) - do_predict.sum()) > 0:
|
if (len(do_predict) - do_predict.sum()) > 0:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"DI tossed {len(do_predict) - do_predict.sum()} predictions for "
|
f"{self.pair}: DI tossed {len(do_predict) - do_predict.sum()} predictions for "
|
||||||
"being too far from training data."
|
"being too far from training data."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -104,6 +104,10 @@ class IFreqaiModel(ABC):
|
|||||||
self.data_provider: Optional[DataProvider] = None
|
self.data_provider: Optional[DataProvider] = None
|
||||||
self.max_system_threads = max(int(psutil.cpu_count() * 2 - 2), 1)
|
self.max_system_threads = max(int(psutil.cpu_count() * 2 - 2), 1)
|
||||||
self.can_short = True # overridden in start() with strategy.can_short
|
self.can_short = True # overridden in start() with strategy.can_short
|
||||||
|
self.model: Any = None
|
||||||
|
if self.ft_params.get('principal_component_analysis', False) and self.continual_learning:
|
||||||
|
self.ft_params.update({'principal_component_analysis': False})
|
||||||
|
logger.warning('User tried to use PCA with continual learning. Deactivating PCA.')
|
||||||
|
|
||||||
record_params(config, self.full_path)
|
record_params(config, self.full_path)
|
||||||
|
|
||||||
@ -153,8 +157,7 @@ class IFreqaiModel(ABC):
|
|||||||
dk = self.start_backtesting(dataframe, metadata, self.dk, strategy)
|
dk = self.start_backtesting(dataframe, metadata, self.dk, strategy)
|
||||||
dataframe = dk.remove_features_from_df(dk.return_dataframe)
|
dataframe = dk.remove_features_from_df(dk.return_dataframe)
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.info("Backtesting using historic predictions (live models)")
|
||||||
"Backtesting using historic predictions (live models)")
|
|
||||||
dk = self.start_backtesting_from_historic_predictions(
|
dk = self.start_backtesting_from_historic_predictions(
|
||||||
dataframe, metadata, self.dk)
|
dataframe, metadata, self.dk)
|
||||||
dataframe = dk.return_dataframe
|
dataframe = dk.return_dataframe
|
||||||
@ -338,13 +341,14 @@ class IFreqaiModel(ABC):
|
|||||||
except Exception as msg:
|
except Exception as msg:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Training {pair} raised exception {msg.__class__.__name__}. "
|
f"Training {pair} raised exception {msg.__class__.__name__}. "
|
||||||
f"Message: {msg}, skipping.")
|
f"Message: {msg}, skipping.", exc_info=True)
|
||||||
|
self.model = None
|
||||||
|
|
||||||
self.dd.pair_dict[pair]["trained_timestamp"] = int(
|
self.dd.pair_dict[pair]["trained_timestamp"] = int(
|
||||||
tr_train.stopts)
|
tr_train.stopts)
|
||||||
if self.plot_features:
|
if self.plot_features and self.model is not None:
|
||||||
plot_feature_importance(self.model, pair, dk, self.plot_features)
|
plot_feature_importance(self.model, pair, dk, self.plot_features)
|
||||||
if self.save_backtest_models:
|
if self.save_backtest_models and self.model is not None:
|
||||||
logger.info('Saving backtest model to disk.')
|
logger.info('Saving backtest model to disk.')
|
||||||
self.dd.save_data(self.model, pair, dk)
|
self.dd.save_data(self.model, pair, dk)
|
||||||
else:
|
else:
|
||||||
|
@ -100,7 +100,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
|||||||
"""
|
"""
|
||||||
# first, penalize if the action is not valid
|
# first, penalize if the action is not valid
|
||||||
if not self._is_valid(action):
|
if not self._is_valid(action):
|
||||||
self.tensorboard_log("is_valid")
|
self.tensorboard_log("invalid", category="actions")
|
||||||
return -2
|
return -2
|
||||||
|
|
||||||
pnl = self.get_unrealized_profit()
|
pnl = self.get_unrealized_profit()
|
||||||
|
@ -30,6 +30,8 @@ from freqtrade.plugins.protectionmanager import ProtectionManager
|
|||||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||||
from freqtrade.rpc import RPCManager
|
from freqtrade.rpc import RPCManager
|
||||||
from freqtrade.rpc.external_message_consumer import ExternalMessageConsumer
|
from freqtrade.rpc.external_message_consumer import ExternalMessageConsumer
|
||||||
|
from freqtrade.rpc.rpc_types import (RPCBuyMsg, RPCCancelMsg, RPCProtectionMsg, RPCSellCancelMsg,
|
||||||
|
RPCSellMsg)
|
||||||
from freqtrade.strategy.interface import IStrategy
|
from freqtrade.strategy.interface import IStrategy
|
||||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||||
from freqtrade.util import FtPrecise
|
from freqtrade.util import FtPrecise
|
||||||
@ -212,7 +214,8 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
self.dataprovider.refresh(self.pairlists.create_pair_list(self.active_pair_whitelist),
|
self.dataprovider.refresh(self.pairlists.create_pair_list(self.active_pair_whitelist),
|
||||||
self.strategy.gather_informative_pairs())
|
self.strategy.gather_informative_pairs())
|
||||||
|
|
||||||
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)()
|
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)(
|
||||||
|
current_time=datetime.now(timezone.utc))
|
||||||
|
|
||||||
self.strategy.analyze(self.active_pair_whitelist)
|
self.strategy.analyze(self.active_pair_whitelist)
|
||||||
|
|
||||||
@ -594,7 +597,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
stake_available = self.wallets.get_available_stake_amount()
|
stake_available = self.wallets.get_available_stake_amount()
|
||||||
logger.debug(f"Calling adjust_trade_position for pair {trade.pair}")
|
logger.debug(f"Calling adjust_trade_position for pair {trade.pair}")
|
||||||
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
|
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
|
||||||
default_retval=None)(
|
default_retval=None, supress_error=True)(
|
||||||
trade=trade,
|
trade=trade,
|
||||||
current_time=datetime.now(timezone.utc), current_rate=current_entry_rate,
|
current_time=datetime.now(timezone.utc), current_rate=current_entry_rate,
|
||||||
current_profit=current_entry_profit, min_stake=min_entry_stake,
|
current_profit=current_entry_profit, min_stake=min_entry_stake,
|
||||||
@ -810,6 +813,9 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
precision_mode=self.exchange.precisionMode,
|
precision_mode=self.exchange.precisionMode,
|
||||||
contract_size=self.exchange.get_contract_size(pair),
|
contract_size=self.exchange.get_contract_size(pair),
|
||||||
)
|
)
|
||||||
|
stoploss = self.strategy.stoploss if not self.edge else self.edge.get_stoploss(pair)
|
||||||
|
trade.adjust_stop_loss(trade.open_rate, stoploss, initial=True)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# This is additional buy, we reset fee_open_currency so timeout checking can work
|
# This is additional buy, we reset fee_open_currency so timeout checking can work
|
||||||
trade.is_open = True
|
trade.is_open = True
|
||||||
@ -819,7 +825,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
|
|
||||||
trade.orders.append(order_obj)
|
trade.orders.append(order_obj)
|
||||||
trade.recalc_trade_from_orders()
|
trade.recalc_trade_from_orders()
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
|
|
||||||
# Updating wallets
|
# Updating wallets
|
||||||
@ -851,7 +857,8 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
# Reset stoploss order id.
|
# Reset stoploss order id.
|
||||||
trade.stoploss_order_id = None
|
trade.stoploss_order_id = None
|
||||||
except InvalidOrderException:
|
except InvalidOrderException:
|
||||||
logger.exception(f"Could not cancel stoploss order {trade.stoploss_order_id}")
|
logger.exception(f"Could not cancel stoploss order {trade.stoploss_order_id} "
|
||||||
|
f"for pair {trade.pair}")
|
||||||
return trade
|
return trade
|
||||||
|
|
||||||
def get_valid_enter_price_and_stake(
|
def get_valid_enter_price_and_stake(
|
||||||
@ -943,7 +950,6 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
"""
|
"""
|
||||||
Sends rpc notification when a entry order occurred.
|
Sends rpc notification when a entry order occurred.
|
||||||
"""
|
"""
|
||||||
msg_type = RPCMessageType.ENTRY_FILL if fill else RPCMessageType.ENTRY
|
|
||||||
open_rate = order.safe_price
|
open_rate = order.safe_price
|
||||||
|
|
||||||
if open_rate is None:
|
if open_rate is None:
|
||||||
@ -954,9 +960,9 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
current_rate = self.exchange.get_rate(
|
current_rate = self.exchange.get_rate(
|
||||||
trade.pair, side='entry', is_short=trade.is_short, refresh=False)
|
trade.pair, side='entry', is_short=trade.is_short, refresh=False)
|
||||||
|
|
||||||
msg = {
|
msg: RPCBuyMsg = {
|
||||||
'trade_id': trade.id,
|
'trade_id': trade.id,
|
||||||
'type': msg_type,
|
'type': RPCMessageType.ENTRY_FILL if fill else RPCMessageType.ENTRY,
|
||||||
'buy_tag': trade.enter_tag,
|
'buy_tag': trade.enter_tag,
|
||||||
'enter_tag': trade.enter_tag,
|
'enter_tag': trade.enter_tag,
|
||||||
'exchange': trade.exchange.capitalize(),
|
'exchange': trade.exchange.capitalize(),
|
||||||
@ -968,6 +974,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
'order_type': order_type,
|
'order_type': order_type,
|
||||||
'stake_amount': trade.stake_amount,
|
'stake_amount': trade.stake_amount,
|
||||||
'stake_currency': self.config['stake_currency'],
|
'stake_currency': self.config['stake_currency'],
|
||||||
|
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
|
||||||
'fiat_currency': self.config.get('fiat_display_currency', None),
|
'fiat_currency': self.config.get('fiat_display_currency', None),
|
||||||
'amount': order.safe_amount_after_fee if fill else (order.amount or trade.amount),
|
'amount': order.safe_amount_after_fee if fill else (order.amount or trade.amount),
|
||||||
'open_date': trade.open_date or datetime.utcnow(),
|
'open_date': trade.open_date or datetime.utcnow(),
|
||||||
@ -986,7 +993,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
current_rate = self.exchange.get_rate(
|
current_rate = self.exchange.get_rate(
|
||||||
trade.pair, side='entry', is_short=trade.is_short, refresh=False)
|
trade.pair, side='entry', is_short=trade.is_short, refresh=False)
|
||||||
|
|
||||||
msg = {
|
msg: RPCCancelMsg = {
|
||||||
'trade_id': trade.id,
|
'trade_id': trade.id,
|
||||||
'type': RPCMessageType.ENTRY_CANCEL,
|
'type': RPCMessageType.ENTRY_CANCEL,
|
||||||
'buy_tag': trade.enter_tag,
|
'buy_tag': trade.enter_tag,
|
||||||
@ -998,7 +1005,9 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
'limit': trade.open_rate,
|
'limit': trade.open_rate,
|
||||||
'order_type': order_type,
|
'order_type': order_type,
|
||||||
'stake_amount': trade.stake_amount,
|
'stake_amount': trade.stake_amount,
|
||||||
|
'open_rate': trade.open_rate,
|
||||||
'stake_currency': self.config['stake_currency'],
|
'stake_currency': self.config['stake_currency'],
|
||||||
|
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
|
||||||
'fiat_currency': self.config.get('fiat_display_currency', None),
|
'fiat_currency': self.config.get('fiat_display_currency', None),
|
||||||
'amount': trade.amount,
|
'amount': trade.amount,
|
||||||
'open_date': trade.open_date,
|
'open_date': trade.open_date,
|
||||||
@ -1021,12 +1030,16 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
trades_closed = 0
|
trades_closed = 0
|
||||||
for trade in trades:
|
for trade in trades:
|
||||||
try:
|
try:
|
||||||
|
try:
|
||||||
if (self.strategy.order_types.get('stoploss_on_exchange') and
|
if (self.strategy.order_types.get('stoploss_on_exchange') and
|
||||||
self.handle_stoploss_on_exchange(trade)):
|
self.handle_stoploss_on_exchange(trade)):
|
||||||
trades_closed += 1
|
trades_closed += 1
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
except InvalidOrderException as exception:
|
||||||
|
logger.warning(
|
||||||
|
f'Unable to handle stoploss on exchange for {trade.pair}: {exception}')
|
||||||
# Check if we can sell our current pair
|
# Check if we can sell our current pair
|
||||||
if trade.open_order_id is None and trade.is_open and self.handle_trade(trade):
|
if trade.open_order_id is None and trade.is_open and self.handle_trade(trade):
|
||||||
trades_closed += 1
|
trades_closed += 1
|
||||||
@ -1232,13 +1245,8 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
# cancelling the current stoploss on exchange first
|
# cancelling the current stoploss on exchange first
|
||||||
logger.info(f"Cancelling current stoploss on exchange for pair {trade.pair} "
|
logger.info(f"Cancelling current stoploss on exchange for pair {trade.pair} "
|
||||||
f"(orderid:{order['id']}) in order to add another one ...")
|
f"(orderid:{order['id']}) in order to add another one ...")
|
||||||
try:
|
|
||||||
co = self.exchange.cancel_stoploss_order_with_result(order['id'], trade.pair,
|
self.cancel_stoploss_on_exchange(trade)
|
||||||
trade.amount)
|
|
||||||
trade.update_order(co)
|
|
||||||
except InvalidOrderException:
|
|
||||||
logger.exception(f"Could not cancel stoploss order {order['id']} "
|
|
||||||
f"for pair {trade.pair}")
|
|
||||||
|
|
||||||
# Create new stoploss order
|
# Create new stoploss order
|
||||||
if not self.create_stoploss_order(trade=trade, stop_price=stoploss_norm):
|
if not self.create_stoploss_order(trade=trade, stop_price=stoploss_norm):
|
||||||
@ -1659,7 +1667,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
amount = trade.amount
|
amount = trade.amount
|
||||||
gain = "profit" if profit_ratio > 0 else "loss"
|
gain = "profit" if profit_ratio > 0 else "loss"
|
||||||
|
|
||||||
msg = {
|
msg: RPCSellMsg = {
|
||||||
'type': (RPCMessageType.EXIT_FILL if fill
|
'type': (RPCMessageType.EXIT_FILL if fill
|
||||||
else RPCMessageType.EXIT),
|
else RPCMessageType.EXIT),
|
||||||
'trade_id': trade.id,
|
'trade_id': trade.id,
|
||||||
@ -1685,6 +1693,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
'close_date': trade.close_date or datetime.utcnow(),
|
'close_date': trade.close_date or datetime.utcnow(),
|
||||||
'stake_amount': trade.stake_amount,
|
'stake_amount': trade.stake_amount,
|
||||||
'stake_currency': self.config['stake_currency'],
|
'stake_currency': self.config['stake_currency'],
|
||||||
|
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
|
||||||
'fiat_currency': self.config.get('fiat_display_currency'),
|
'fiat_currency': self.config.get('fiat_display_currency'),
|
||||||
'sub_trade': sub_trade,
|
'sub_trade': sub_trade,
|
||||||
'cumulative_profit': trade.realized_profit,
|
'cumulative_profit': trade.realized_profit,
|
||||||
@ -1715,7 +1724,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
profit_ratio = trade.calc_profit_ratio(profit_rate)
|
profit_ratio = trade.calc_profit_ratio(profit_rate)
|
||||||
gain = "profit" if profit_ratio > 0 else "loss"
|
gain = "profit" if profit_ratio > 0 else "loss"
|
||||||
|
|
||||||
msg = {
|
msg: RPCSellCancelMsg = {
|
||||||
'type': RPCMessageType.EXIT_CANCEL,
|
'type': RPCMessageType.EXIT_CANCEL,
|
||||||
'trade_id': trade.id,
|
'trade_id': trade.id,
|
||||||
'exchange': trade.exchange.capitalize(),
|
'exchange': trade.exchange.capitalize(),
|
||||||
@ -1737,6 +1746,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
'open_date': trade.open_date,
|
'open_date': trade.open_date,
|
||||||
'close_date': trade.close_date or datetime.now(timezone.utc),
|
'close_date': trade.close_date or datetime.now(timezone.utc),
|
||||||
'stake_currency': self.config['stake_currency'],
|
'stake_currency': self.config['stake_currency'],
|
||||||
|
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
|
||||||
'fiat_currency': self.config.get('fiat_display_currency', None),
|
'fiat_currency': self.config.get('fiat_display_currency', None),
|
||||||
'reason': reason,
|
'reason': reason,
|
||||||
'sub_trade': sub_trade,
|
'sub_trade': sub_trade,
|
||||||
@ -1844,14 +1854,20 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
self.strategy.lock_pair(pair, datetime.now(timezone.utc), reason='Auto lock')
|
self.strategy.lock_pair(pair, datetime.now(timezone.utc), reason='Auto lock')
|
||||||
prot_trig = self.protections.stop_per_pair(pair, side=side)
|
prot_trig = self.protections.stop_per_pair(pair, side=side)
|
||||||
if prot_trig:
|
if prot_trig:
|
||||||
msg = {'type': RPCMessageType.PROTECTION_TRIGGER, }
|
msg: RPCProtectionMsg = {
|
||||||
msg.update(prot_trig.to_json())
|
'type': RPCMessageType.PROTECTION_TRIGGER,
|
||||||
|
'base_currency': self.exchange.get_pair_base_currency(prot_trig.pair),
|
||||||
|
**prot_trig.to_json() # type: ignore
|
||||||
|
}
|
||||||
self.rpc.send_msg(msg)
|
self.rpc.send_msg(msg)
|
||||||
|
|
||||||
prot_trig_glb = self.protections.global_stop(side=side)
|
prot_trig_glb = self.protections.global_stop(side=side)
|
||||||
if prot_trig_glb:
|
if prot_trig_glb:
|
||||||
msg = {'type': RPCMessageType.PROTECTION_TRIGGER_GLOBAL, }
|
msg = {
|
||||||
msg.update(prot_trig_glb.to_json())
|
'type': RPCMessageType.PROTECTION_TRIGGER_GLOBAL,
|
||||||
|
'base_currency': self.exchange.get_pair_base_currency(prot_trig_glb.pair),
|
||||||
|
**prot_trig_glb.to_json() # type: ignore
|
||||||
|
}
|
||||||
self.rpc.send_msg(msg)
|
self.rpc.send_msg(msg)
|
||||||
|
|
||||||
def apply_fee_conditional(self, trade: Trade, trade_base_currency: str,
|
def apply_fee_conditional(self, trade: Trade, trade_base_currency: str,
|
||||||
|
@ -203,9 +203,10 @@ class Backtesting:
|
|||||||
# since a "perfect" stoploss-exit is assumed anyway
|
# since a "perfect" stoploss-exit is assumed anyway
|
||||||
# And the regular "stoploss" function would not apply to that case
|
# And the regular "stoploss" function would not apply to that case
|
||||||
self.strategy.order_types['stoploss_on_exchange'] = False
|
self.strategy.order_types['stoploss_on_exchange'] = False
|
||||||
|
# Update can_short flag
|
||||||
|
self._can_short = self.trading_mode != TradingMode.SPOT and strategy.can_short
|
||||||
|
|
||||||
self.strategy.ft_bot_start()
|
self.strategy.ft_bot_start()
|
||||||
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)()
|
|
||||||
|
|
||||||
def _load_protections(self, strategy: IStrategy):
|
def _load_protections(self, strategy: IStrategy):
|
||||||
if self.config.get('enable_protections', False):
|
if self.config.get('enable_protections', False):
|
||||||
@ -442,10 +443,6 @@ class Backtesting:
|
|||||||
# Worst case: price ticks tiny bit above open and dives down.
|
# Worst case: price ticks tiny bit above open and dives down.
|
||||||
stop_rate = row[OPEN_IDX] * (1 - side_1 * abs(
|
stop_rate = row[OPEN_IDX] * (1 - side_1 * abs(
|
||||||
(trade.stop_loss_pct or 0.0) / leverage))
|
(trade.stop_loss_pct or 0.0) / leverage))
|
||||||
if is_short:
|
|
||||||
assert stop_rate > row[LOW_IDX]
|
|
||||||
else:
|
|
||||||
assert stop_rate < row[HIGH_IDX]
|
|
||||||
|
|
||||||
# Limit lower-end to candle low to avoid exits below the low.
|
# Limit lower-end to candle low to avoid exits below the low.
|
||||||
# This still remains "worst case" - but "worst realistic case".
|
# This still remains "worst case" - but "worst realistic case".
|
||||||
@ -526,7 +523,7 @@ class Backtesting:
|
|||||||
max_stake = self.exchange.get_max_pair_stake_amount(trade.pair, current_rate)
|
max_stake = self.exchange.get_max_pair_stake_amount(trade.pair, current_rate)
|
||||||
stake_available = self.wallets.get_available_stake_amount()
|
stake_available = self.wallets.get_available_stake_amount()
|
||||||
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
|
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
|
||||||
default_retval=None)(
|
default_retval=None, supress_error=True)(
|
||||||
trade=trade, # type: ignore[arg-type]
|
trade=trade, # type: ignore[arg-type]
|
||||||
current_time=current_date, current_rate=current_rate,
|
current_time=current_date, current_rate=current_rate,
|
||||||
current_profit=current_profit, min_stake=min_stake,
|
current_profit=current_profit, min_stake=min_stake,
|
||||||
@ -744,7 +741,7 @@ class Backtesting:
|
|||||||
proposed_leverage=1.0,
|
proposed_leverage=1.0,
|
||||||
max_leverage=max_leverage,
|
max_leverage=max_leverage,
|
||||||
side=direction, entry_tag=entry_tag,
|
side=direction, entry_tag=entry_tag,
|
||||||
) if self._can_short else 1.0
|
) if self.trading_mode != TradingMode.SPOT else 1.0
|
||||||
# Cap leverage between 1.0 and max_leverage.
|
# Cap leverage between 1.0 and max_leverage.
|
||||||
leverage = min(max(leverage, 1.0), max_leverage)
|
leverage = min(max(leverage, 1.0), max_leverage)
|
||||||
|
|
||||||
@ -1034,6 +1031,9 @@ class Backtesting:
|
|||||||
requested_stake=(
|
requested_stake=(
|
||||||
order.safe_remaining * order.ft_price / trade.leverage),
|
order.safe_remaining * order.ft_price / trade.leverage),
|
||||||
direction='short' if trade.is_short else 'long')
|
direction='short' if trade.is_short else 'long')
|
||||||
|
# Delete trade if no successful entries happened (if placing the new order failed)
|
||||||
|
if trade.open_order_id is None and trade.nr_of_successful_entries == 0:
|
||||||
|
return True
|
||||||
self.replaced_entry_orders += 1
|
self.replaced_entry_orders += 1
|
||||||
else:
|
else:
|
||||||
# assumption: there can't be multiple open entry orders at any given time
|
# assumption: there can't be multiple open entry orders at any given time
|
||||||
@ -1159,6 +1159,8 @@ class Backtesting:
|
|||||||
while current_time <= end_date:
|
while current_time <= end_date:
|
||||||
open_trade_count_start = LocalTrade.bt_open_open_trade_count
|
open_trade_count_start = LocalTrade.bt_open_open_trade_count
|
||||||
self.check_abort()
|
self.check_abort()
|
||||||
|
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)(
|
||||||
|
current_time=current_time)
|
||||||
for i, pair in enumerate(data):
|
for i, pair in enumerate(data):
|
||||||
row_index = indexes[pair]
|
row_index = indexes[pair]
|
||||||
row = self.validate_row(data, pair, row_index, current_time)
|
row = self.validate_row(data, pair, row_index, current_time)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import io
|
|
||||||
import logging
|
import logging
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
@ -464,8 +463,8 @@ class HyperoptTools():
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
io.open(csv_file, 'w+').close()
|
Path(csv_file).open('w+').close()
|
||||||
except IOError:
|
except OSError:
|
||||||
logger.error(f"Failed to create CSV file: {csv_file}")
|
logger.error(f"Failed to create CSV file: {csv_file}")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -2,7 +2,9 @@
|
|||||||
This module contains the class to persist trades into SQLite
|
This module contains the class to persist trades into SQLite
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, Dict
|
import threading
|
||||||
|
from contextvars import ContextVar
|
||||||
|
from typing import Any, Dict, Final, Optional
|
||||||
|
|
||||||
from sqlalchemy import create_engine, inspect
|
from sqlalchemy import create_engine, inspect
|
||||||
from sqlalchemy.exc import NoSuchModuleError
|
from sqlalchemy.exc import NoSuchModuleError
|
||||||
@ -19,6 +21,22 @@ from freqtrade.persistence.trade_model import Order, Trade
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
REQUEST_ID_CTX_KEY: Final[str] = 'request_id'
|
||||||
|
_request_id_ctx_var: ContextVar[Optional[str]] = ContextVar(REQUEST_ID_CTX_KEY, default=None)
|
||||||
|
|
||||||
|
|
||||||
|
def get_request_or_thread_id() -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Helper method to get either async context (for fastapi requests), or thread id
|
||||||
|
"""
|
||||||
|
id = _request_id_ctx_var.get()
|
||||||
|
if id is None:
|
||||||
|
# when not in request context - use thread id
|
||||||
|
id = str(threading.current_thread().ident)
|
||||||
|
|
||||||
|
return id
|
||||||
|
|
||||||
|
|
||||||
_SQL_DOCS_URL = 'http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls'
|
_SQL_DOCS_URL = 'http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls'
|
||||||
|
|
||||||
|
|
||||||
@ -53,13 +71,11 @@ def init_db(db_url: str) -> None:
|
|||||||
|
|
||||||
# https://docs.sqlalchemy.org/en/13/orm/contextual.html#thread-local-scope
|
# https://docs.sqlalchemy.org/en/13/orm/contextual.html#thread-local-scope
|
||||||
# Scoped sessions proxy requests to the appropriate thread-local session.
|
# Scoped sessions proxy requests to the appropriate thread-local session.
|
||||||
# We should use the scoped_session object - not a seperately initialized version
|
# Since we also use fastAPI, we need to make it aware of the request id, too
|
||||||
Trade._session = scoped_session(sessionmaker(bind=engine, autoflush=False))
|
Trade.session = scoped_session(sessionmaker(
|
||||||
Order._session = Trade._session
|
bind=engine, autoflush=False), scopefunc=get_request_or_thread_id)
|
||||||
PairLock._session = Trade._session
|
Order.session = Trade.session
|
||||||
Trade.query = Trade._session.query_property()
|
PairLock.session = Trade.session
|
||||||
Order.query = Trade._session.query_property()
|
|
||||||
PairLock.query = Trade._session.query_property()
|
|
||||||
|
|
||||||
previous_tables = inspect(engine).get_table_names()
|
previous_tables = inspect(engine).get_table_names()
|
||||||
ModelBase.metadata.create_all(engine)
|
ModelBase.metadata.create_all(engine)
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import Any, ClassVar, Dict, Optional
|
from typing import Any, ClassVar, Dict, Optional
|
||||||
|
|
||||||
from sqlalchemy import String, or_
|
from sqlalchemy import ScalarResult, String, or_, select
|
||||||
from sqlalchemy.orm import Mapped, Query, QueryPropertyDescriptor, mapped_column
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||||
from freqtrade.persistence.base import ModelBase, SessionType
|
from freqtrade.persistence.base import ModelBase, SessionType
|
||||||
@ -13,8 +13,7 @@ class PairLock(ModelBase):
|
|||||||
Pair Locks database model.
|
Pair Locks database model.
|
||||||
"""
|
"""
|
||||||
__tablename__ = 'pairlocks'
|
__tablename__ = 'pairlocks'
|
||||||
query: ClassVar[QueryPropertyDescriptor]
|
session: ClassVar[SessionType]
|
||||||
_session: ClassVar[SessionType]
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(primary_key=True)
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
|
||||||
@ -37,7 +36,8 @@ class PairLock(ModelBase):
|
|||||||
f'lock_end_time={lock_end_time}, reason={self.reason}, active={self.active})')
|
f'lock_end_time={lock_end_time}, reason={self.reason}, active={self.active})')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def query_pair_locks(pair: Optional[str], now: datetime, side: str = '*') -> Query:
|
def query_pair_locks(
|
||||||
|
pair: Optional[str], now: datetime, side: str = '*') -> ScalarResult['PairLock']:
|
||||||
"""
|
"""
|
||||||
Get all currently active locks for this pair
|
Get all currently active locks for this pair
|
||||||
:param pair: Pair to check for. Returns all current locks if pair is empty
|
:param pair: Pair to check for. Returns all current locks if pair is empty
|
||||||
@ -53,9 +53,11 @@ class PairLock(ModelBase):
|
|||||||
else:
|
else:
|
||||||
filters.append(PairLock.side == '*')
|
filters.append(PairLock.side == '*')
|
||||||
|
|
||||||
return PairLock.query.filter(
|
return PairLock.session.scalars(select(PairLock).filter(*filters))
|
||||||
*filters
|
|
||||||
)
|
@staticmethod
|
||||||
|
def get_all_locks() -> ScalarResult['PairLock']:
|
||||||
|
return PairLock.session.scalars(select(PairLock))
|
||||||
|
|
||||||
def to_json(self) -> Dict[str, Any]:
|
def to_json(self) -> Dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import List, Optional
|
from typing import List, Optional, Sequence
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
from freqtrade.exchange import timeframe_to_next_date
|
from freqtrade.exchange import timeframe_to_next_date
|
||||||
from freqtrade.persistence.models import PairLock
|
from freqtrade.persistence.models import PairLock
|
||||||
@ -51,15 +53,15 @@ class PairLocks():
|
|||||||
active=True
|
active=True
|
||||||
)
|
)
|
||||||
if PairLocks.use_db:
|
if PairLocks.use_db:
|
||||||
PairLock.query.session.add(lock)
|
PairLock.session.add(lock)
|
||||||
PairLock.query.session.commit()
|
PairLock.session.commit()
|
||||||
else:
|
else:
|
||||||
PairLocks.locks.append(lock)
|
PairLocks.locks.append(lock)
|
||||||
return lock
|
return lock
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_pair_locks(
|
def get_pair_locks(pair: Optional[str], now: Optional[datetime] = None,
|
||||||
pair: Optional[str], now: Optional[datetime] = None, side: str = '*') -> List[PairLock]:
|
side: str = '*') -> Sequence[PairLock]:
|
||||||
"""
|
"""
|
||||||
Get all currently active locks for this pair
|
Get all currently active locks for this pair
|
||||||
:param pair: Pair to check for. Returns all current locks if pair is empty
|
:param pair: Pair to check for. Returns all current locks if pair is empty
|
||||||
@ -106,7 +108,7 @@ class PairLocks():
|
|||||||
for lock in locks:
|
for lock in locks:
|
||||||
lock.active = False
|
lock.active = False
|
||||||
if PairLocks.use_db:
|
if PairLocks.use_db:
|
||||||
PairLock.query.session.commit()
|
PairLock.session.commit()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def unlock_reason(reason: str, now: Optional[datetime] = None) -> None:
|
def unlock_reason(reason: str, now: Optional[datetime] = None) -> None:
|
||||||
@ -126,11 +128,11 @@ class PairLocks():
|
|||||||
PairLock.active.is_(True),
|
PairLock.active.is_(True),
|
||||||
PairLock.reason == reason
|
PairLock.reason == reason
|
||||||
]
|
]
|
||||||
locks = PairLock.query.filter(*filters)
|
locks = PairLock.session.scalars(select(PairLock).filter(*filters)).all()
|
||||||
for lock in locks:
|
for lock in locks:
|
||||||
logger.info(f"Releasing lock for {lock.pair} with reason '{reason}'.")
|
logger.info(f"Releasing lock for {lock.pair} with reason '{reason}'.")
|
||||||
lock.active = False
|
lock.active = False
|
||||||
PairLock.query.session.commit()
|
PairLock.session.commit()
|
||||||
else:
|
else:
|
||||||
# used in backtesting mode; don't show log messages for speed
|
# used in backtesting mode; don't show log messages for speed
|
||||||
locksb = PairLocks.get_pair_locks(None)
|
locksb = PairLocks.get_pair_locks(None)
|
||||||
@ -165,11 +167,11 @@ class PairLocks():
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_all_locks() -> List[PairLock]:
|
def get_all_locks() -> Sequence[PairLock]:
|
||||||
"""
|
"""
|
||||||
Return all locks, also locks with expired end date
|
Return all locks, also locks with expired end date
|
||||||
"""
|
"""
|
||||||
if PairLocks.use_db:
|
if PairLocks.use_db:
|
||||||
return PairLock.query.all()
|
return PairLock.get_all_locks().all()
|
||||||
else:
|
else:
|
||||||
return PairLocks.locks
|
return PairLocks.locks
|
||||||
|
@ -5,11 +5,11 @@ import logging
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from math import isclose
|
from math import isclose
|
||||||
from typing import Any, ClassVar, Dict, List, Optional, cast
|
from typing import Any, ClassVar, Dict, List, Optional, Sequence, cast
|
||||||
|
|
||||||
from sqlalchemy import Enum, Float, ForeignKey, Integer, String, UniqueConstraint, desc, func
|
from sqlalchemy import (Enum, Float, ForeignKey, Integer, ScalarResult, Select, String,
|
||||||
from sqlalchemy.orm import (Mapped, Query, QueryPropertyDescriptor, lazyload, mapped_column,
|
UniqueConstraint, desc, func, select)
|
||||||
relationship)
|
from sqlalchemy.orm import Mapped, lazyload, mapped_column, relationship
|
||||||
|
|
||||||
from freqtrade.constants import (DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC, NON_OPEN_EXCHANGE_STATES,
|
from freqtrade.constants import (DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC, NON_OPEN_EXCHANGE_STATES,
|
||||||
BuySell, LongShort)
|
BuySell, LongShort)
|
||||||
@ -36,8 +36,7 @@ class Order(ModelBase):
|
|||||||
Mirrors CCXT Order structure
|
Mirrors CCXT Order structure
|
||||||
"""
|
"""
|
||||||
__tablename__ = 'orders'
|
__tablename__ = 'orders'
|
||||||
query: ClassVar[QueryPropertyDescriptor]
|
session: ClassVar[SessionType]
|
||||||
_session: ClassVar[SessionType]
|
|
||||||
|
|
||||||
# Uniqueness should be ensured over pair, order_id
|
# Uniqueness should be ensured over pair, order_id
|
||||||
# its likely that order_id is unique per Pair on some exchanges.
|
# its likely that order_id is unique per Pair on some exchanges.
|
||||||
@ -263,12 +262,12 @@ class Order(ModelBase):
|
|||||||
return o
|
return o
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_open_orders() -> List['Order']:
|
def get_open_orders() -> Sequence['Order']:
|
||||||
"""
|
"""
|
||||||
Retrieve open orders from the database
|
Retrieve open orders from the database
|
||||||
:return: List of open orders
|
:return: List of open orders
|
||||||
"""
|
"""
|
||||||
return Order.query.filter(Order.ft_is_open.is_(True)).all()
|
return Order.session.scalars(select(Order).filter(Order.ft_is_open.is_(True))).all()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def order_by_id(order_id: str) -> Optional['Order']:
|
def order_by_id(order_id: str) -> Optional['Order']:
|
||||||
@ -276,7 +275,7 @@ class Order(ModelBase):
|
|||||||
Retrieve order based on order_id
|
Retrieve order based on order_id
|
||||||
:return: Order or None
|
:return: Order or None
|
||||||
"""
|
"""
|
||||||
return Order.query.filter(Order.order_id == order_id).first()
|
return Order.session.scalars(select(Order).filter(Order.order_id == order_id)).first()
|
||||||
|
|
||||||
|
|
||||||
class LocalTrade():
|
class LocalTrade():
|
||||||
@ -561,6 +560,9 @@ class LocalTrade():
|
|||||||
'trading_mode': self.trading_mode,
|
'trading_mode': self.trading_mode,
|
||||||
'funding_fees': self.funding_fees,
|
'funding_fees': self.funding_fees,
|
||||||
'open_order_id': self.open_order_id,
|
'open_order_id': self.open_order_id,
|
||||||
|
'amount_precision': self.amount_precision,
|
||||||
|
'price_precision': self.price_precision,
|
||||||
|
'precision_mode': self.precision_mode,
|
||||||
'orders': orders,
|
'orders': orders,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1153,7 +1155,9 @@ class LocalTrade():
|
|||||||
get open trade count
|
get open trade count
|
||||||
"""
|
"""
|
||||||
if Trade.use_db:
|
if Trade.use_db:
|
||||||
return Trade.query.filter(Trade.is_open.is_(True)).count()
|
return Trade.session.execute(
|
||||||
|
select(func.count(Trade.id)).filter(Trade.is_open.is_(True))
|
||||||
|
).scalar_one()
|
||||||
else:
|
else:
|
||||||
return LocalTrade.bt_open_open_trade_count
|
return LocalTrade.bt_open_open_trade_count
|
||||||
|
|
||||||
@ -1186,8 +1190,7 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
Note: Fields must be aligned with LocalTrade class
|
Note: Fields must be aligned with LocalTrade class
|
||||||
"""
|
"""
|
||||||
__tablename__ = 'trades'
|
__tablename__ = 'trades'
|
||||||
query: ClassVar[QueryPropertyDescriptor]
|
session: ClassVar[SessionType]
|
||||||
_session: ClassVar[SessionType]
|
|
||||||
|
|
||||||
use_db: bool = True
|
use_db: bool = True
|
||||||
|
|
||||||
@ -1287,18 +1290,18 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
def delete(self) -> None:
|
def delete(self) -> None:
|
||||||
|
|
||||||
for order in self.orders:
|
for order in self.orders:
|
||||||
Order.query.session.delete(order)
|
Order.session.delete(order)
|
||||||
|
|
||||||
Trade.query.session.delete(self)
|
Trade.session.delete(self)
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def commit():
|
def commit():
|
||||||
Trade.query.session.commit()
|
Trade.session.commit()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def rollback():
|
def rollback():
|
||||||
Trade.query.session.rollback()
|
Trade.session.rollback()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_trades_proxy(*, pair: Optional[str] = None, is_open: Optional[bool] = None,
|
def get_trades_proxy(*, pair: Optional[str] = None, is_open: Optional[bool] = None,
|
||||||
@ -1332,7 +1335,7 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_trades(trade_filter=None, include_orders: bool = True) -> Query['Trade']:
|
def get_trades_query(trade_filter=None, include_orders: bool = True) -> Select:
|
||||||
"""
|
"""
|
||||||
Helper function to query Trades using filters.
|
Helper function to query Trades using filters.
|
||||||
NOTE: Not supported in Backtesting.
|
NOTE: Not supported in Backtesting.
|
||||||
@ -1347,22 +1350,35 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
if trade_filter is not None:
|
if trade_filter is not None:
|
||||||
if not isinstance(trade_filter, list):
|
if not isinstance(trade_filter, list):
|
||||||
trade_filter = [trade_filter]
|
trade_filter = [trade_filter]
|
||||||
this_query = Trade.query.filter(*trade_filter)
|
this_query = select(Trade).filter(*trade_filter)
|
||||||
else:
|
else:
|
||||||
this_query = Trade.query
|
this_query = select(Trade)
|
||||||
if not include_orders:
|
if not include_orders:
|
||||||
# Don't load order relations
|
# Don't load order relations
|
||||||
# Consider using noload or raiseload instead of lazyload
|
# Consider using noload or raiseload instead of lazyload
|
||||||
this_query = this_query.options(lazyload(Trade.orders))
|
this_query = this_query.options(lazyload(Trade.orders))
|
||||||
return this_query
|
return this_query
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_trades(trade_filter=None, include_orders: bool = True) -> ScalarResult['Trade']:
|
||||||
|
"""
|
||||||
|
Helper function to query Trades using filters.
|
||||||
|
NOTE: Not supported in Backtesting.
|
||||||
|
:param trade_filter: Optional filter to apply to trades
|
||||||
|
Can be either a Filter object, or a List of filters
|
||||||
|
e.g. `(trade_filter=[Trade.id == trade_id, Trade.is_open.is_(True),])`
|
||||||
|
e.g. `(trade_filter=Trade.id == trade_id)`
|
||||||
|
:return: unsorted query object
|
||||||
|
"""
|
||||||
|
return Trade.session.scalars(Trade.get_trades_query(trade_filter, include_orders))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_open_order_trades() -> List['Trade']:
|
def get_open_order_trades() -> List['Trade']:
|
||||||
"""
|
"""
|
||||||
Returns all open trades
|
Returns all open trades
|
||||||
NOTE: Not supported in Backtesting.
|
NOTE: Not supported in Backtesting.
|
||||||
"""
|
"""
|
||||||
return Trade.get_trades(Trade.open_order_id.isnot(None)).all()
|
return cast(List[Trade], Trade.get_trades(Trade.open_order_id.isnot(None)).all())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_open_trades_without_assigned_fees():
|
def get_open_trades_without_assigned_fees():
|
||||||
@ -1392,11 +1408,12 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
Retrieves total realized profit
|
Retrieves total realized profit
|
||||||
"""
|
"""
|
||||||
if Trade.use_db:
|
if Trade.use_db:
|
||||||
total_profit = Trade.query.with_entities(
|
total_profit: float = Trade.session.execute(
|
||||||
func.sum(Trade.close_profit_abs)).filter(Trade.is_open.is_(False)).scalar()
|
select(func.sum(Trade.close_profit_abs)).filter(Trade.is_open.is_(False))
|
||||||
|
).scalar_one()
|
||||||
else:
|
else:
|
||||||
total_profit = sum(
|
total_profit = sum(t.close_profit_abs # type: ignore
|
||||||
t.close_profit_abs for t in LocalTrade.get_trades_proxy(is_open=False))
|
for t in LocalTrade.get_trades_proxy(is_open=False))
|
||||||
return total_profit or 0
|
return total_profit or 0
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -1406,8 +1423,9 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
in stake currency
|
in stake currency
|
||||||
"""
|
"""
|
||||||
if Trade.use_db:
|
if Trade.use_db:
|
||||||
total_open_stake_amount = Trade.query.with_entities(
|
total_open_stake_amount = Trade.session.scalar(
|
||||||
func.sum(Trade.stake_amount)).filter(Trade.is_open.is_(True)).scalar()
|
select(func.sum(Trade.stake_amount)).filter(Trade.is_open.is_(True))
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
total_open_stake_amount = sum(
|
total_open_stake_amount = sum(
|
||||||
t.stake_amount for t in LocalTrade.get_trades_proxy(is_open=True))
|
t.stake_amount for t in LocalTrade.get_trades_proxy(is_open=True))
|
||||||
@ -1423,15 +1441,18 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
if minutes:
|
if minutes:
|
||||||
start_date = datetime.now(timezone.utc) - timedelta(minutes=minutes)
|
start_date = datetime.now(timezone.utc) - timedelta(minutes=minutes)
|
||||||
filters.append(Trade.close_date >= start_date)
|
filters.append(Trade.close_date >= start_date)
|
||||||
pair_rates = Trade.query.with_entities(
|
|
||||||
|
pair_rates = Trade.session.execute(
|
||||||
|
select(
|
||||||
Trade.pair,
|
Trade.pair,
|
||||||
func.sum(Trade.close_profit).label('profit_sum'),
|
func.sum(Trade.close_profit).label('profit_sum'),
|
||||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||||
func.count(Trade.pair).label('count')
|
func.count(Trade.pair).label('count')
|
||||||
).filter(*filters)\
|
).filter(*filters)
|
||||||
.group_by(Trade.pair) \
|
.group_by(Trade.pair)
|
||||||
.order_by(desc('profit_sum_abs')) \
|
.order_by(desc('profit_sum_abs'))
|
||||||
.all()
|
).all()
|
||||||
|
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
'pair': pair,
|
'pair': pair,
|
||||||
@ -1456,15 +1477,16 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
if (pair is not None):
|
if (pair is not None):
|
||||||
filters.append(Trade.pair == pair)
|
filters.append(Trade.pair == pair)
|
||||||
|
|
||||||
enter_tag_perf = Trade.query.with_entities(
|
enter_tag_perf = Trade.session.execute(
|
||||||
|
select(
|
||||||
Trade.enter_tag,
|
Trade.enter_tag,
|
||||||
func.sum(Trade.close_profit).label('profit_sum'),
|
func.sum(Trade.close_profit).label('profit_sum'),
|
||||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||||
func.count(Trade.pair).label('count')
|
func.count(Trade.pair).label('count')
|
||||||
).filter(*filters)\
|
).filter(*filters)
|
||||||
.group_by(Trade.enter_tag) \
|
.group_by(Trade.enter_tag)
|
||||||
.order_by(desc('profit_sum_abs')) \
|
.order_by(desc('profit_sum_abs'))
|
||||||
.all()
|
).all()
|
||||||
|
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
@ -1488,16 +1510,16 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
filters: List = [Trade.is_open.is_(False)]
|
filters: List = [Trade.is_open.is_(False)]
|
||||||
if (pair is not None):
|
if (pair is not None):
|
||||||
filters.append(Trade.pair == pair)
|
filters.append(Trade.pair == pair)
|
||||||
|
sell_tag_perf = Trade.session.execute(
|
||||||
sell_tag_perf = Trade.query.with_entities(
|
select(
|
||||||
Trade.exit_reason,
|
Trade.exit_reason,
|
||||||
func.sum(Trade.close_profit).label('profit_sum'),
|
func.sum(Trade.close_profit).label('profit_sum'),
|
||||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||||
func.count(Trade.pair).label('count')
|
func.count(Trade.pair).label('count')
|
||||||
).filter(*filters)\
|
).filter(*filters)
|
||||||
.group_by(Trade.exit_reason) \
|
.group_by(Trade.exit_reason)
|
||||||
.order_by(desc('profit_sum_abs')) \
|
.order_by(desc('profit_sum_abs'))
|
||||||
.all()
|
).all()
|
||||||
|
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
@ -1521,18 +1543,18 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
filters: List = [Trade.is_open.is_(False)]
|
filters: List = [Trade.is_open.is_(False)]
|
||||||
if (pair is not None):
|
if (pair is not None):
|
||||||
filters.append(Trade.pair == pair)
|
filters.append(Trade.pair == pair)
|
||||||
|
mix_tag_perf = Trade.session.execute(
|
||||||
mix_tag_perf = Trade.query.with_entities(
|
select(
|
||||||
Trade.id,
|
Trade.id,
|
||||||
Trade.enter_tag,
|
Trade.enter_tag,
|
||||||
Trade.exit_reason,
|
Trade.exit_reason,
|
||||||
func.sum(Trade.close_profit).label('profit_sum'),
|
func.sum(Trade.close_profit).label('profit_sum'),
|
||||||
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
func.sum(Trade.close_profit_abs).label('profit_sum_abs'),
|
||||||
func.count(Trade.pair).label('count')
|
func.count(Trade.pair).label('count')
|
||||||
).filter(*filters)\
|
).filter(*filters)
|
||||||
.group_by(Trade.id) \
|
.group_by(Trade.id)
|
||||||
.order_by(desc('profit_sum_abs')) \
|
.order_by(desc('profit_sum_abs'))
|
||||||
.all()
|
).all()
|
||||||
|
|
||||||
return_list: List[Dict] = []
|
return_list: List[Dict] = []
|
||||||
for id, enter_tag, exit_reason, profit, profit_abs, count in mix_tag_perf:
|
for id, enter_tag, exit_reason, profit, profit_abs, count in mix_tag_perf:
|
||||||
@ -1568,11 +1590,15 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
NOTE: Not supported in Backtesting.
|
NOTE: Not supported in Backtesting.
|
||||||
:returns: Tuple containing (pair, profit_sum)
|
:returns: Tuple containing (pair, profit_sum)
|
||||||
"""
|
"""
|
||||||
best_pair = Trade.query.with_entities(
|
best_pair = Trade.session.execute(
|
||||||
Trade.pair, func.sum(Trade.close_profit).label('profit_sum')
|
select(
|
||||||
).filter(Trade.is_open.is_(False) & (Trade.close_date >= start_date)) \
|
Trade.pair,
|
||||||
.group_by(Trade.pair) \
|
func.sum(Trade.close_profit).label('profit_sum')
|
||||||
.order_by(desc('profit_sum')).first()
|
).filter(Trade.is_open.is_(False) & (Trade.close_date >= start_date))
|
||||||
|
.group_by(Trade.pair)
|
||||||
|
.order_by(desc('profit_sum'))
|
||||||
|
).first()
|
||||||
|
|
||||||
return best_pair
|
return best_pair
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -1582,12 +1608,13 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
NOTE: Not supported in Backtesting.
|
NOTE: Not supported in Backtesting.
|
||||||
:returns: Tuple containing (pair, profit_sum)
|
:returns: Tuple containing (pair, profit_sum)
|
||||||
"""
|
"""
|
||||||
trading_volume = Order.query.with_entities(
|
trading_volume = Trade.session.execute(
|
||||||
|
select(
|
||||||
func.sum(Order.cost).label('volume')
|
func.sum(Order.cost).label('volume')
|
||||||
).filter(
|
).filter(
|
||||||
Order.order_filled_date >= start_date,
|
Order.order_filled_date >= start_date,
|
||||||
Order.status == 'closed'
|
Order.status == 'closed'
|
||||||
).scalar()
|
)).scalar_one()
|
||||||
return trading_volume
|
return trading_volume
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -1636,8 +1663,10 @@ class Trade(ModelBase, LocalTrade):
|
|||||||
stop_loss=data["stop_loss_abs"],
|
stop_loss=data["stop_loss_abs"],
|
||||||
stop_loss_pct=data["stop_loss_ratio"],
|
stop_loss_pct=data["stop_loss_ratio"],
|
||||||
stoploss_order_id=data["stoploss_order_id"],
|
stoploss_order_id=data["stoploss_order_id"],
|
||||||
stoploss_last_update=(datetime.fromtimestamp(data["stoploss_last_update"] // 1000,
|
stoploss_last_update=(
|
||||||
tz=timezone.utc) if data["stoploss_last_update"] else None),
|
datetime.fromtimestamp(data["stoploss_last_update_timestamp"] // 1000,
|
||||||
|
tz=timezone.utc)
|
||||||
|
if data["stoploss_last_update_timestamp"] else None),
|
||||||
initial_stop_loss=data["initial_stop_loss_abs"],
|
initial_stop_loss=data["initial_stop_loss_abs"],
|
||||||
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
|
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
|
||||||
min_rate=data["min_rate"],
|
min_rate=data["min_rate"],
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
@ -635,7 +636,7 @@ def load_and_plot_trades(config: Config):
|
|||||||
exchange = ExchangeResolver.load_exchange(config['exchange']['name'], config)
|
exchange = ExchangeResolver.load_exchange(config['exchange']['name'], config)
|
||||||
IStrategy.dp = DataProvider(config, exchange)
|
IStrategy.dp = DataProvider(config, exchange)
|
||||||
strategy.ft_bot_start()
|
strategy.ft_bot_start()
|
||||||
strategy.bot_loop_start()
|
strategy.bot_loop_start(datetime.now(timezone.utc))
|
||||||
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
|
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
|
||||||
timerange = plot_elements['timerange']
|
timerange = plot_elements['timerange']
|
||||||
trades = plot_elements['trades']
|
trades = plot_elements['trades']
|
||||||
|
@ -276,6 +276,10 @@ class TradeSchema(BaseModel):
|
|||||||
funding_fees: Optional[float]
|
funding_fees: Optional[float]
|
||||||
trading_mode: Optional[TradingMode]
|
trading_mode: Optional[TradingMode]
|
||||||
|
|
||||||
|
amount_precision: Optional[float]
|
||||||
|
price_precision: Optional[float]
|
||||||
|
precision_mode: Optional[int]
|
||||||
|
|
||||||
|
|
||||||
class OpenTradeSchema(TradeSchema):
|
class OpenTradeSchema(TradeSchema):
|
||||||
stoploss_current_dist: Optional[float]
|
stoploss_current_dist: Optional[float]
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
from typing import Any, Dict, Iterator, Optional
|
from typing import Any, AsyncIterator, Dict, Optional
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
from fastapi import Depends
|
from fastapi import Depends
|
||||||
|
|
||||||
from freqtrade.enums import RunMode
|
from freqtrade.enums import RunMode
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
|
from freqtrade.persistence.models import _request_id_ctx_var
|
||||||
from freqtrade.rpc.rpc import RPC, RPCException
|
from freqtrade.rpc.rpc import RPC, RPCException
|
||||||
|
|
||||||
from .webserver import ApiServer
|
from .webserver import ApiServer
|
||||||
@ -15,12 +17,19 @@ def get_rpc_optional() -> Optional[RPC]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_rpc() -> Optional[Iterator[RPC]]:
|
async def get_rpc() -> Optional[AsyncIterator[RPC]]:
|
||||||
|
|
||||||
_rpc = get_rpc_optional()
|
_rpc = get_rpc_optional()
|
||||||
if _rpc:
|
if _rpc:
|
||||||
|
request_id = str(uuid4())
|
||||||
|
ctx_token = _request_id_ctx_var.set(request_id)
|
||||||
Trade.rollback()
|
Trade.rollback()
|
||||||
|
try:
|
||||||
yield _rpc
|
yield _rpc
|
||||||
Trade.rollback()
|
finally:
|
||||||
|
Trade.session.remove()
|
||||||
|
_request_id_ctx_var.reset(ctx_token)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise RPCException('Bot is not in the correct state')
|
raise RPCException('Bot is not in the correct state')
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@ from freqtrade.exceptions import OperationalException
|
|||||||
from freqtrade.rpc.api_server.uvicorn_threaded import UvicornServer
|
from freqtrade.rpc.api_server.uvicorn_threaded import UvicornServer
|
||||||
from freqtrade.rpc.api_server.ws.message_stream import MessageStream
|
from freqtrade.rpc.api_server.ws.message_stream import MessageStream
|
||||||
from freqtrade.rpc.rpc import RPC, RPCException, RPCHandler
|
from freqtrade.rpc.rpc import RPC, RPCException, RPCHandler
|
||||||
|
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -108,7 +109,7 @@ class ApiServer(RPCHandler):
|
|||||||
cls._has_rpc = False
|
cls._has_rpc = False
|
||||||
cls._rpc = None
|
cls._rpc = None
|
||||||
|
|
||||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||||
"""
|
"""
|
||||||
Publish the message to the message stream
|
Publish the message to the message stream
|
||||||
"""
|
"""
|
||||||
|
@ -5,7 +5,7 @@ import logging
|
|||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from datetime import date, datetime, timedelta, timezone
|
from datetime import date, datetime, timedelta, timezone
|
||||||
from math import isnan
|
from math import isnan
|
||||||
from typing import Any, Dict, Generator, List, Optional, Tuple, Union
|
from typing import Any, Dict, Generator, List, Optional, Sequence, Tuple, Union
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import psutil
|
import psutil
|
||||||
@ -13,6 +13,7 @@ from dateutil.relativedelta import relativedelta
|
|||||||
from dateutil.tz import tzlocal
|
from dateutil.tz import tzlocal
|
||||||
from numpy import NAN, inf, int64, mean
|
from numpy import NAN, inf, int64, mean
|
||||||
from pandas import DataFrame, NaT
|
from pandas import DataFrame, NaT
|
||||||
|
from sqlalchemy import func, select
|
||||||
|
|
||||||
from freqtrade import __version__
|
from freqtrade import __version__
|
||||||
from freqtrade.configuration.timerange import TimeRange
|
from freqtrade.configuration.timerange import TimeRange
|
||||||
@ -29,6 +30,7 @@ from freqtrade.persistence import Order, PairLocks, Trade
|
|||||||
from freqtrade.persistence.models import PairLock
|
from freqtrade.persistence.models import PairLock
|
||||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||||
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
|
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
|
||||||
|
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||||
from freqtrade.wallets import PositionWallet, Wallet
|
from freqtrade.wallets import PositionWallet, Wallet
|
||||||
|
|
||||||
|
|
||||||
@ -78,7 +80,7 @@ class RPCHandler:
|
|||||||
""" Cleanup pending module resources """
|
""" Cleanup pending module resources """
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def send_msg(self, msg: Dict[str, str]) -> None:
|
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||||
""" Sends a message to all registered rpc modules """
|
""" Sends a message to all registered rpc modules """
|
||||||
|
|
||||||
|
|
||||||
@ -122,7 +124,8 @@ class RPC:
|
|||||||
if config['max_open_trades'] != float('inf') else -1),
|
if config['max_open_trades'] != float('inf') else -1),
|
||||||
'minimal_roi': config['minimal_roi'].copy() if 'minimal_roi' in config else {},
|
'minimal_roi': config['minimal_roi'].copy() if 'minimal_roi' in config else {},
|
||||||
'stoploss': config.get('stoploss'),
|
'stoploss': config.get('stoploss'),
|
||||||
'stoploss_on_exchange': config.get('stoploss_on_exchange', False),
|
'stoploss_on_exchange': config.get('order_types',
|
||||||
|
{}).get('stoploss_on_exchange', False),
|
||||||
'trailing_stop': config.get('trailing_stop'),
|
'trailing_stop': config.get('trailing_stop'),
|
||||||
'trailing_stop_positive': config.get('trailing_stop_positive'),
|
'trailing_stop_positive': config.get('trailing_stop_positive'),
|
||||||
'trailing_stop_positive_offset': config.get('trailing_stop_positive_offset'),
|
'trailing_stop_positive_offset': config.get('trailing_stop_positive_offset'),
|
||||||
@ -158,7 +161,7 @@ class RPC:
|
|||||||
"""
|
"""
|
||||||
# Fetch open trades
|
# Fetch open trades
|
||||||
if trade_ids:
|
if trade_ids:
|
||||||
trades: List[Trade] = Trade.get_trades(trade_filter=Trade.id.in_(trade_ids)).all()
|
trades: Sequence[Trade] = Trade.get_trades(trade_filter=Trade.id.in_(trade_ids)).all()
|
||||||
else:
|
else:
|
||||||
trades = Trade.get_open_trades()
|
trades = Trade.get_open_trades()
|
||||||
|
|
||||||
@ -339,11 +342,13 @@ class RPC:
|
|||||||
for day in range(0, timescale):
|
for day in range(0, timescale):
|
||||||
profitday = start_date - time_offset(day)
|
profitday = start_date - time_offset(day)
|
||||||
# Only query for necessary columns for performance reasons.
|
# Only query for necessary columns for performance reasons.
|
||||||
trades = Trade.query.session.query(Trade.close_profit_abs).filter(
|
trades = Trade.session.execute(
|
||||||
Trade.is_open.is_(False),
|
select(Trade.close_profit_abs)
|
||||||
|
.filter(Trade.is_open.is_(False),
|
||||||
Trade.close_date >= profitday,
|
Trade.close_date >= profitday,
|
||||||
Trade.close_date < (profitday + time_offset(1))
|
Trade.close_date < (profitday + time_offset(1)))
|
||||||
).order_by(Trade.close_date).all()
|
.order_by(Trade.close_date)
|
||||||
|
).all()
|
||||||
|
|
||||||
curdayprofit = sum(
|
curdayprofit = sum(
|
||||||
trade.close_profit_abs for trade in trades if trade.close_profit_abs is not None)
|
trade.close_profit_abs for trade in trades if trade.close_profit_abs is not None)
|
||||||
@ -381,14 +386,19 @@ class RPC:
|
|||||||
""" Returns the X last trades """
|
""" Returns the X last trades """
|
||||||
order_by: Any = Trade.id if order_by_id else Trade.close_date.desc()
|
order_by: Any = Trade.id if order_by_id else Trade.close_date.desc()
|
||||||
if limit:
|
if limit:
|
||||||
trades = Trade.get_trades([Trade.is_open.is_(False)]).order_by(
|
trades = Trade.session.scalars(
|
||||||
order_by).limit(limit).offset(offset)
|
Trade.get_trades_query([Trade.is_open.is_(False)])
|
||||||
|
.order_by(order_by)
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset))
|
||||||
else:
|
else:
|
||||||
trades = Trade.get_trades([Trade.is_open.is_(False)]).order_by(
|
trades = Trade.session.scalars(
|
||||||
Trade.close_date.desc())
|
Trade.get_trades_query([Trade.is_open.is_(False)])
|
||||||
|
.order_by(Trade.close_date.desc()))
|
||||||
|
|
||||||
output = [trade.to_json() for trade in trades]
|
output = [trade.to_json() for trade in trades]
|
||||||
total_trades = Trade.get_trades([Trade.is_open.is_(False)]).count()
|
total_trades = Trade.session.scalar(
|
||||||
|
select(func.count(Trade.id)).filter(Trade.is_open.is_(False)))
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"trades": output,
|
"trades": output,
|
||||||
@ -436,8 +446,8 @@ class RPC:
|
|||||||
""" Returns cumulative profit statistics """
|
""" Returns cumulative profit statistics """
|
||||||
trade_filter = ((Trade.is_open.is_(False) & (Trade.close_date >= start_date)) |
|
trade_filter = ((Trade.is_open.is_(False) & (Trade.close_date >= start_date)) |
|
||||||
Trade.is_open.is_(True))
|
Trade.is_open.is_(True))
|
||||||
trades: List[Trade] = Trade.get_trades(
|
trades: Sequence[Trade] = Trade.session.scalars(Trade.get_trades_query(
|
||||||
trade_filter, include_orders=False).order_by(Trade.id).all()
|
trade_filter, include_orders=False).order_by(Trade.id)).all()
|
||||||
|
|
||||||
profit_all_coin = []
|
profit_all_coin = []
|
||||||
profit_all_ratio = []
|
profit_all_ratio = []
|
||||||
@ -946,12 +956,12 @@ class RPC:
|
|||||||
def _rpc_delete_lock(self, lockid: Optional[int] = None,
|
def _rpc_delete_lock(self, lockid: Optional[int] = None,
|
||||||
pair: Optional[str] = None) -> Dict[str, Any]:
|
pair: Optional[str] = None) -> Dict[str, Any]:
|
||||||
""" Delete specific lock(s) """
|
""" Delete specific lock(s) """
|
||||||
locks = []
|
locks: Sequence[PairLock] = []
|
||||||
|
|
||||||
if pair:
|
if pair:
|
||||||
locks = PairLocks.get_pair_locks(pair)
|
locks = PairLocks.get_pair_locks(pair)
|
||||||
if lockid:
|
if lockid:
|
||||||
locks = PairLock.query.filter(PairLock.id == lockid).all()
|
locks = PairLock.session.scalars(select(PairLock).filter(PairLock.id == lockid)).all()
|
||||||
|
|
||||||
for lock in locks:
|
for lock in locks:
|
||||||
lock.active = False
|
lock.active = False
|
||||||
|
@ -3,11 +3,12 @@ This module contains class to manage RPC communications (Telegram, API, ...)
|
|||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from typing import Any, Dict, List
|
from typing import List
|
||||||
|
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
from freqtrade.enums import NO_ECHO_MESSAGES, RPCMessageType
|
from freqtrade.enums import NO_ECHO_MESSAGES, RPCMessageType
|
||||||
from freqtrade.rpc import RPC, RPCHandler
|
from freqtrade.rpc import RPC, RPCHandler
|
||||||
|
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -58,7 +59,7 @@ class RPCManager:
|
|||||||
mod.cleanup()
|
mod.cleanup()
|
||||||
del mod
|
del mod
|
||||||
|
|
||||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||||
"""
|
"""
|
||||||
Send given message to all registered rpc modules.
|
Send given message to all registered rpc modules.
|
||||||
A message consists of one or more key value pairs of strings.
|
A message consists of one or more key value pairs of strings.
|
||||||
@ -69,10 +70,6 @@ class RPCManager:
|
|||||||
"""
|
"""
|
||||||
if msg.get('type') not in NO_ECHO_MESSAGES:
|
if msg.get('type') not in NO_ECHO_MESSAGES:
|
||||||
logger.info('Sending rpc message: %s', msg)
|
logger.info('Sending rpc message: %s', msg)
|
||||||
if 'pair' in msg:
|
|
||||||
msg.update({
|
|
||||||
'base_currency': self._rpc._freqtrade.exchange.get_pair_base_currency(msg['pair'])
|
|
||||||
})
|
|
||||||
for mod in self.registered_modules:
|
for mod in self.registered_modules:
|
||||||
logger.debug('Forwarding message to rpc.%s', mod.name)
|
logger.debug('Forwarding message to rpc.%s', mod.name)
|
||||||
try:
|
try:
|
||||||
|
128
freqtrade/rpc/rpc_types.py
Normal file
128
freqtrade/rpc/rpc_types.py
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, List, Literal, Optional, TypedDict, Union
|
||||||
|
|
||||||
|
from freqtrade.constants import PairWithTimeframe
|
||||||
|
from freqtrade.enums import RPCMessageType
|
||||||
|
|
||||||
|
|
||||||
|
class RPCSendMsgBase(TypedDict):
|
||||||
|
pass
|
||||||
|
# ty1pe: Literal[RPCMessageType]
|
||||||
|
|
||||||
|
|
||||||
|
class RPCStatusMsg(RPCSendMsgBase):
|
||||||
|
"""Used for Status, Startup and Warning messages"""
|
||||||
|
type: Literal[RPCMessageType.STATUS, RPCMessageType.STARTUP, RPCMessageType.WARNING]
|
||||||
|
status: str
|
||||||
|
|
||||||
|
|
||||||
|
class RPCStrategyMsg(RPCSendMsgBase):
|
||||||
|
"""Used for Status, Startup and Warning messages"""
|
||||||
|
type: Literal[RPCMessageType.STRATEGY_MSG]
|
||||||
|
msg: str
|
||||||
|
|
||||||
|
|
||||||
|
class RPCProtectionMsg(RPCSendMsgBase):
|
||||||
|
type: Literal[RPCMessageType.PROTECTION_TRIGGER, RPCMessageType.PROTECTION_TRIGGER_GLOBAL]
|
||||||
|
id: int
|
||||||
|
pair: str
|
||||||
|
base_currency: Optional[str]
|
||||||
|
lock_time: str
|
||||||
|
lock_timestamp: int
|
||||||
|
lock_end_time: str
|
||||||
|
lock_end_timestamp: int
|
||||||
|
reason: str
|
||||||
|
side: str
|
||||||
|
active: bool
|
||||||
|
|
||||||
|
|
||||||
|
class RPCWhitelistMsg(RPCSendMsgBase):
|
||||||
|
type: Literal[RPCMessageType.WHITELIST]
|
||||||
|
data: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class __RPCBuyMsgBase(RPCSendMsgBase):
|
||||||
|
trade_id: int
|
||||||
|
buy_tag: Optional[str]
|
||||||
|
enter_tag: Optional[str]
|
||||||
|
exchange: str
|
||||||
|
pair: str
|
||||||
|
base_currency: str
|
||||||
|
leverage: Optional[float]
|
||||||
|
direction: str
|
||||||
|
limit: float
|
||||||
|
open_rate: float
|
||||||
|
order_type: Optional[str] # TODO: why optional??
|
||||||
|
stake_amount: float
|
||||||
|
stake_currency: str
|
||||||
|
fiat_currency: Optional[str]
|
||||||
|
amount: float
|
||||||
|
open_date: datetime
|
||||||
|
current_rate: Optional[float]
|
||||||
|
sub_trade: bool
|
||||||
|
|
||||||
|
|
||||||
|
class RPCBuyMsg(__RPCBuyMsgBase):
|
||||||
|
type: Literal[RPCMessageType.ENTRY, RPCMessageType.ENTRY_FILL]
|
||||||
|
|
||||||
|
|
||||||
|
class RPCCancelMsg(__RPCBuyMsgBase):
|
||||||
|
type: Literal[RPCMessageType.ENTRY_CANCEL]
|
||||||
|
reason: str
|
||||||
|
|
||||||
|
|
||||||
|
class RPCSellMsg(__RPCBuyMsgBase):
|
||||||
|
type: Literal[RPCMessageType.EXIT, RPCMessageType.EXIT_FILL]
|
||||||
|
cumulative_profit: float
|
||||||
|
gain: str # Literal["profit", "loss"]
|
||||||
|
close_rate: float
|
||||||
|
profit_amount: float
|
||||||
|
profit_ratio: float
|
||||||
|
sell_reason: Optional[str]
|
||||||
|
exit_reason: Optional[str]
|
||||||
|
close_date: datetime
|
||||||
|
# current_rate: Optional[float]
|
||||||
|
order_rate: Optional[float]
|
||||||
|
|
||||||
|
|
||||||
|
class RPCSellCancelMsg(__RPCBuyMsgBase):
|
||||||
|
type: Literal[RPCMessageType.EXIT_CANCEL]
|
||||||
|
reason: str
|
||||||
|
gain: str # Literal["profit", "loss"]
|
||||||
|
profit_amount: float
|
||||||
|
profit_ratio: float
|
||||||
|
sell_reason: Optional[str]
|
||||||
|
exit_reason: Optional[str]
|
||||||
|
close_date: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class _AnalyzedDFData(TypedDict):
|
||||||
|
key: PairWithTimeframe
|
||||||
|
df: Any
|
||||||
|
la: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class RPCAnalyzedDFMsg(RPCSendMsgBase):
|
||||||
|
"""New Analyzed dataframe message"""
|
||||||
|
type: Literal[RPCMessageType.ANALYZED_DF]
|
||||||
|
data: _AnalyzedDFData
|
||||||
|
|
||||||
|
|
||||||
|
class RPCNewCandleMsg(RPCSendMsgBase):
|
||||||
|
"""New candle ping message, issued once per new candle/pair"""
|
||||||
|
type: Literal[RPCMessageType.NEW_CANDLE]
|
||||||
|
data: PairWithTimeframe
|
||||||
|
|
||||||
|
|
||||||
|
RPCSendMsg = Union[
|
||||||
|
RPCStatusMsg,
|
||||||
|
RPCStrategyMsg,
|
||||||
|
RPCProtectionMsg,
|
||||||
|
RPCWhitelistMsg,
|
||||||
|
RPCBuyMsg,
|
||||||
|
RPCCancelMsg,
|
||||||
|
RPCSellMsg,
|
||||||
|
RPCSellCancelMsg,
|
||||||
|
RPCAnalyzedDFMsg,
|
||||||
|
RPCNewCandleMsg
|
||||||
|
]
|
@ -30,6 +30,7 @@ from freqtrade.exceptions import OperationalException
|
|||||||
from freqtrade.misc import chunks, plural, round_coin_value
|
from freqtrade.misc import chunks, plural, round_coin_value
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
from freqtrade.rpc import RPC, RPCException, RPCHandler
|
from freqtrade.rpc import RPC, RPCException, RPCHandler
|
||||||
|
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -83,6 +84,8 @@ def authorized_only(command_handler: Callable[..., None]) -> Callable[..., Any]:
|
|||||||
self._send_msg(str(e))
|
self._send_msg(str(e))
|
||||||
except BaseException:
|
except BaseException:
|
||||||
logger.exception('Exception occurred within Telegram module')
|
logger.exception('Exception occurred within Telegram module')
|
||||||
|
finally:
|
||||||
|
Trade.session.remove()
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
@ -427,14 +430,14 @@ class Telegram(RPCHandler):
|
|||||||
return None
|
return None
|
||||||
return message
|
return message
|
||||||
|
|
||||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||||
""" Send a message to telegram channel """
|
""" Send a message to telegram channel """
|
||||||
|
|
||||||
default_noti = 'on'
|
default_noti = 'on'
|
||||||
|
|
||||||
msg_type = msg['type']
|
msg_type = msg['type']
|
||||||
noti = ''
|
noti = ''
|
||||||
if msg_type == RPCMessageType.EXIT:
|
if msg['type'] == RPCMessageType.EXIT:
|
||||||
sell_noti = self._config['telegram'] \
|
sell_noti = self._config['telegram'] \
|
||||||
.get('notification_settings', {}).get(str(msg_type), {})
|
.get('notification_settings', {}).get(str(msg_type), {})
|
||||||
# For backward compatibility sell still can be string
|
# For backward compatibility sell still can be string
|
||||||
@ -451,7 +454,7 @@ class Telegram(RPCHandler):
|
|||||||
# Notification disabled
|
# Notification disabled
|
||||||
return
|
return
|
||||||
|
|
||||||
message = self.compose_message(deepcopy(msg), msg_type)
|
message = self.compose_message(deepcopy(msg), msg_type) # type: ignore
|
||||||
if message:
|
if message:
|
||||||
self._send_msg(message, disable_notification=(noti == 'silent'))
|
self._send_msg(message, disable_notification=(noti == 'silent'))
|
||||||
|
|
||||||
@ -1340,7 +1343,7 @@ class Telegram(RPCHandler):
|
|||||||
message = tabulate({k: [v] for k, v in counts.items()},
|
message = tabulate({k: [v] for k, v in counts.items()},
|
||||||
headers=['current', 'max', 'total stake'],
|
headers=['current', 'max', 'total stake'],
|
||||||
tablefmt='simple')
|
tablefmt='simple')
|
||||||
message = "<pre>{}</pre>".format(message)
|
message = f"<pre>{message}</pre>"
|
||||||
logger.debug(message)
|
logger.debug(message)
|
||||||
self._send_msg(message, parse_mode=ParseMode.HTML,
|
self._send_msg(message, parse_mode=ParseMode.HTML,
|
||||||
reload_able=True, callback_path="update_count",
|
reload_able=True, callback_path="update_count",
|
||||||
@ -1642,7 +1645,7 @@ class Telegram(RPCHandler):
|
|||||||
])
|
])
|
||||||
else:
|
else:
|
||||||
reply_markup = InlineKeyboardMarkup([[]])
|
reply_markup = InlineKeyboardMarkup([[]])
|
||||||
msg += "\nUpdated: {}".format(datetime.now().ctime())
|
msg += f"\nUpdated: {datetime.now().ctime()}"
|
||||||
if not query.message:
|
if not query.message:
|
||||||
return
|
return
|
||||||
chat_id = query.message.chat_id
|
chat_id = query.message.chat_id
|
||||||
|
@ -10,6 +10,7 @@ from requests import RequestException, post
|
|||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
from freqtrade.enums import RPCMessageType
|
from freqtrade.enums import RPCMessageType
|
||||||
from freqtrade.rpc import RPC, RPCHandler
|
from freqtrade.rpc import RPC, RPCHandler
|
||||||
|
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -41,7 +42,7 @@ class Webhook(RPCHandler):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _get_value_dict(self, msg: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
def _get_value_dict(self, msg: RPCSendMsg) -> Optional[Dict[str, Any]]:
|
||||||
whconfig = self._config['webhook']
|
whconfig = self._config['webhook']
|
||||||
# Deprecated 2022.10 - only keep generic method.
|
# Deprecated 2022.10 - only keep generic method.
|
||||||
if msg['type'] in [RPCMessageType.ENTRY]:
|
if msg['type'] in [RPCMessageType.ENTRY]:
|
||||||
@ -75,7 +76,7 @@ class Webhook(RPCHandler):
|
|||||||
return None
|
return None
|
||||||
return valuedict
|
return valuedict
|
||||||
|
|
||||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||||
""" Send a message to telegram channel """
|
""" Send a message to telegram channel """
|
||||||
try:
|
try:
|
||||||
|
|
||||||
@ -113,7 +114,7 @@ class Webhook(RPCHandler):
|
|||||||
response = post(self._url, data=payload['data'],
|
response = post(self._url, data=payload['data'],
|
||||||
headers={'Content-Type': 'text/plain'})
|
headers={'Content-Type': 'text/plain'})
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError('Unknown format: {}'.format(self._format))
|
raise NotImplementedError(f'Unknown format: {self._format}')
|
||||||
|
|
||||||
# Throw a RequestException if the post was not successful
|
# Throw a RequestException if the post was not successful
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
@ -251,11 +251,12 @@ class IStrategy(ABC, HyperStrategyMixin):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def bot_loop_start(self, **kwargs) -> None:
|
def bot_loop_start(self, current_time: datetime, **kwargs) -> None:
|
||||||
"""
|
"""
|
||||||
Called at the start of the bot iteration (one loop).
|
Called at the start of the bot iteration (one loop).
|
||||||
Might be used to perform pair-independent tasks
|
Might be used to perform pair-independent tasks
|
||||||
(e.g. gather some remote resource for comparison)
|
(e.g. gather some remote resource for comparison)
|
||||||
|
:param current_time: datetime object, containing the current datetime
|
||||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
255
freqtrade/strategy/strategyupdater.py
Normal file
255
freqtrade/strategy/strategyupdater.py
Normal file
@ -0,0 +1,255 @@
|
|||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import ast_comments
|
||||||
|
|
||||||
|
from freqtrade.constants import Config
|
||||||
|
|
||||||
|
|
||||||
|
class StrategyUpdater:
|
||||||
|
name_mapping = {
|
||||||
|
'ticker_interval': 'timeframe',
|
||||||
|
'buy': 'enter_long',
|
||||||
|
'sell': 'exit_long',
|
||||||
|
'buy_tag': 'enter_tag',
|
||||||
|
'sell_reason': 'exit_reason',
|
||||||
|
|
||||||
|
'sell_signal': 'exit_signal',
|
||||||
|
'custom_sell': 'custom_exit',
|
||||||
|
'force_sell': 'force_exit',
|
||||||
|
'emergency_sell': 'emergency_exit',
|
||||||
|
|
||||||
|
# Strategy/config settings:
|
||||||
|
'use_sell_signal': 'use_exit_signal',
|
||||||
|
'sell_profit_only': 'exit_profit_only',
|
||||||
|
'sell_profit_offset': 'exit_profit_offset',
|
||||||
|
'ignore_roi_if_buy_signal': 'ignore_roi_if_entry_signal',
|
||||||
|
'forcebuy_enable': 'force_entry_enable',
|
||||||
|
}
|
||||||
|
|
||||||
|
function_mapping = {
|
||||||
|
'populate_buy_trend': 'populate_entry_trend',
|
||||||
|
'populate_sell_trend': 'populate_exit_trend',
|
||||||
|
'custom_sell': 'custom_exit',
|
||||||
|
'check_buy_timeout': 'check_entry_timeout',
|
||||||
|
'check_sell_timeout': 'check_exit_timeout',
|
||||||
|
# '': '',
|
||||||
|
}
|
||||||
|
# order_time_in_force, order_types, unfilledtimeout
|
||||||
|
otif_ot_unfilledtimeout = {
|
||||||
|
'buy': 'entry',
|
||||||
|
'sell': 'exit',
|
||||||
|
}
|
||||||
|
|
||||||
|
# create a dictionary that maps the old column names to the new ones
|
||||||
|
rename_dict = {'buy': 'enter_long', 'sell': 'exit_long', 'buy_tag': 'enter_tag'}
|
||||||
|
|
||||||
|
def start(self, config: Config, strategy_obj: dict) -> None:
|
||||||
|
"""
|
||||||
|
Run strategy updater
|
||||||
|
It updates a strategy to v3 with the help of the ast-module
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
|
||||||
|
source_file = strategy_obj['location']
|
||||||
|
strategies_backup_folder = Path.joinpath(config['user_data_dir'], "strategies_orig_updater")
|
||||||
|
target_file = Path.joinpath(strategies_backup_folder, strategy_obj['location_rel'])
|
||||||
|
|
||||||
|
# read the file
|
||||||
|
with Path(source_file).open('r') as f:
|
||||||
|
old_code = f.read()
|
||||||
|
if not strategies_backup_folder.is_dir():
|
||||||
|
Path(strategies_backup_folder).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# backup original
|
||||||
|
# => currently no date after the filename,
|
||||||
|
# could get overridden pretty fast if this is fired twice!
|
||||||
|
# The folder is always the same and the file name too (currently).
|
||||||
|
shutil.copy(source_file, target_file)
|
||||||
|
|
||||||
|
# update the code
|
||||||
|
new_code = self.update_code(old_code)
|
||||||
|
# write the modified code to the destination folder
|
||||||
|
with Path(source_file).open('w') as f:
|
||||||
|
f.write(new_code)
|
||||||
|
|
||||||
|
# define the function to update the code
|
||||||
|
def update_code(self, code):
|
||||||
|
# parse the code into an AST
|
||||||
|
tree = ast_comments.parse(code)
|
||||||
|
|
||||||
|
# use the AST to update the code
|
||||||
|
updated_code = self.modify_ast(tree)
|
||||||
|
|
||||||
|
# return the modified code without executing it
|
||||||
|
return updated_code
|
||||||
|
|
||||||
|
# function that uses the ast module to update the code
|
||||||
|
def modify_ast(self, tree): # noqa
|
||||||
|
# use the visitor to update the names and functions in the AST
|
||||||
|
NameUpdater().visit(tree)
|
||||||
|
|
||||||
|
# first fix the comments, so it understands "\n" properly inside multi line comments.
|
||||||
|
ast_comments.fix_missing_locations(tree)
|
||||||
|
ast_comments.increment_lineno(tree, n=1)
|
||||||
|
|
||||||
|
# generate the new code from the updated AST
|
||||||
|
# without indent {} parameters would just be written straight one after the other.
|
||||||
|
|
||||||
|
# ast_comments would be amazing since this is the only solution that carries over comments,
|
||||||
|
# but it does currently not have an unparse function, hopefully in the future ... !
|
||||||
|
# return ast_comments.unparse(tree)
|
||||||
|
|
||||||
|
return ast_comments.unparse(tree)
|
||||||
|
|
||||||
|
|
||||||
|
# Here we go through each respective node, slice, elt, key ... to replace outdated entries.
|
||||||
|
class NameUpdater(ast_comments.NodeTransformer):
|
||||||
|
def generic_visit(self, node):
|
||||||
|
|
||||||
|
# space is not yet transferred from buy/sell to entry/exit and thereby has to be skipped.
|
||||||
|
if isinstance(node, ast_comments.keyword):
|
||||||
|
if node.arg == "space":
|
||||||
|
return node
|
||||||
|
|
||||||
|
# from here on this is the original function.
|
||||||
|
for field, old_value in ast_comments.iter_fields(node):
|
||||||
|
if isinstance(old_value, list):
|
||||||
|
new_values = []
|
||||||
|
for value in old_value:
|
||||||
|
if isinstance(value, ast_comments.AST):
|
||||||
|
value = self.visit(value)
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
|
elif not isinstance(value, ast_comments.AST):
|
||||||
|
new_values.extend(value)
|
||||||
|
continue
|
||||||
|
new_values.append(value)
|
||||||
|
old_value[:] = new_values
|
||||||
|
elif isinstance(old_value, ast_comments.AST):
|
||||||
|
new_node = self.visit(old_value)
|
||||||
|
if new_node is None:
|
||||||
|
delattr(node, field)
|
||||||
|
else:
|
||||||
|
setattr(node, field, new_node)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_Expr(self, node):
|
||||||
|
if hasattr(node.value, "left") and hasattr(node.value.left, "id"):
|
||||||
|
node.value.left.id = self.check_dict(StrategyUpdater.name_mapping, node.value.left.id)
|
||||||
|
self.visit(node.value)
|
||||||
|
return node
|
||||||
|
|
||||||
|
# Renames an element if contained inside a dictionary.
|
||||||
|
@staticmethod
|
||||||
|
def check_dict(current_dict: dict, element: str):
|
||||||
|
if element in current_dict:
|
||||||
|
element = current_dict[element]
|
||||||
|
return element
|
||||||
|
|
||||||
|
def visit_arguments(self, node):
|
||||||
|
if isinstance(node.args, list):
|
||||||
|
for arg in node.args:
|
||||||
|
arg.arg = self.check_dict(StrategyUpdater.name_mapping, arg.arg)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_Name(self, node):
|
||||||
|
# if the name is in the mapping, update it
|
||||||
|
node.id = self.check_dict(StrategyUpdater.name_mapping, node.id)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_Import(self, node):
|
||||||
|
# do not update the names in import statements
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_ImportFrom(self, node):
|
||||||
|
# if hasattr(node, "module"):
|
||||||
|
# if node.module == "freqtrade.strategy.hyper":
|
||||||
|
# node.module = "freqtrade.strategy"
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_If(self, node: ast_comments.If):
|
||||||
|
for child in ast_comments.iter_child_nodes(node):
|
||||||
|
self.visit(child)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_FunctionDef(self, node):
|
||||||
|
node.name = self.check_dict(StrategyUpdater.function_mapping, node.name)
|
||||||
|
self.generic_visit(node)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_Attribute(self, node):
|
||||||
|
if (
|
||||||
|
isinstance(node.value, ast_comments.Name)
|
||||||
|
and node.value.id == 'trade'
|
||||||
|
and node.attr == 'nr_of_successful_buys'
|
||||||
|
):
|
||||||
|
node.attr = 'nr_of_successful_entries'
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_ClassDef(self, node):
|
||||||
|
# check if the class is derived from IStrategy
|
||||||
|
if any(isinstance(base, ast_comments.Name) and
|
||||||
|
base.id == 'IStrategy' for base in node.bases):
|
||||||
|
# check if the INTERFACE_VERSION variable exists
|
||||||
|
has_interface_version = any(
|
||||||
|
isinstance(child, ast_comments.Assign) and
|
||||||
|
isinstance(child.targets[0], ast_comments.Name) and
|
||||||
|
child.targets[0].id == 'INTERFACE_VERSION'
|
||||||
|
for child in node.body
|
||||||
|
)
|
||||||
|
|
||||||
|
# if the INTERFACE_VERSION variable does not exist, add it as the first child
|
||||||
|
if not has_interface_version:
|
||||||
|
node.body.insert(0, ast_comments.parse('INTERFACE_VERSION = 3').body[0])
|
||||||
|
# otherwise, update its value to 3
|
||||||
|
else:
|
||||||
|
for child in node.body:
|
||||||
|
if (
|
||||||
|
isinstance(child, ast_comments.Assign)
|
||||||
|
and isinstance(child.targets[0], ast_comments.Name)
|
||||||
|
and child.targets[0].id == 'INTERFACE_VERSION'
|
||||||
|
):
|
||||||
|
child.value = ast_comments.parse('3').body[0].value
|
||||||
|
self.generic_visit(node)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_Subscript(self, node):
|
||||||
|
if isinstance(node.slice, ast_comments.Constant):
|
||||||
|
if node.slice.value in StrategyUpdater.rename_dict:
|
||||||
|
# Replace the slice attributes with the values from rename_dict
|
||||||
|
node.slice.value = StrategyUpdater.rename_dict[node.slice.value]
|
||||||
|
if hasattr(node.slice, "elts"):
|
||||||
|
self.visit_elts(node.slice.elts)
|
||||||
|
if hasattr(node.slice, "value"):
|
||||||
|
if hasattr(node.slice.value, "elts"):
|
||||||
|
self.visit_elts(node.slice.value.elts)
|
||||||
|
return node
|
||||||
|
|
||||||
|
# elts can have elts (technically recursively)
|
||||||
|
def visit_elts(self, elts):
|
||||||
|
if isinstance(elts, list):
|
||||||
|
for elt in elts:
|
||||||
|
self.visit_elt(elt)
|
||||||
|
else:
|
||||||
|
self.visit_elt(elts)
|
||||||
|
return elts
|
||||||
|
|
||||||
|
# sub function again needed since the structure itself is highly flexible ...
|
||||||
|
def visit_elt(self, elt):
|
||||||
|
if isinstance(elt, ast_comments.Constant) and elt.value in StrategyUpdater.rename_dict:
|
||||||
|
elt.value = StrategyUpdater.rename_dict[elt.value]
|
||||||
|
if hasattr(elt, "elts"):
|
||||||
|
self.visit_elts(elt.elts)
|
||||||
|
if hasattr(elt, "args"):
|
||||||
|
if isinstance(elt.args, ast_comments.arguments):
|
||||||
|
self.visit_elts(elt.args)
|
||||||
|
else:
|
||||||
|
for arg in elt.args:
|
||||||
|
self.visit_elts(arg)
|
||||||
|
return elt
|
||||||
|
|
||||||
|
def visit_Constant(self, node):
|
||||||
|
node.value = self.check_dict(StrategyUpdater.otif_ot_unfilledtimeout, node.value)
|
||||||
|
node.value = self.check_dict(StrategyUpdater.name_mapping, node.value)
|
||||||
|
return node
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
def bot_loop_start(self, **kwargs) -> None:
|
def bot_loop_start(self, current_time: datetime, **kwargs) -> None:
|
||||||
"""
|
"""
|
||||||
Called at the start of the bot iteration (one loop).
|
Called at the start of the bot iteration (one loop).
|
||||||
Might be used to perform pair-independent tasks
|
Might be used to perform pair-independent tasks
|
||||||
@ -8,6 +8,7 @@ def bot_loop_start(self, **kwargs) -> None:
|
|||||||
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-advanced/
|
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-advanced/
|
||||||
|
|
||||||
When not implemented by a strategy, this simply does nothing.
|
When not implemented by a strategy, this simply does nothing.
|
||||||
|
:param current_time: datetime object, containing the current datetime
|
||||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from packaging import version
|
from packaging import version
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
from freqtrade.enums.tradingmode import TradingMode
|
from freqtrade.enums.tradingmode import TradingMode
|
||||||
@ -44,7 +45,7 @@ def _migrate_binance_futures_db(config: Config):
|
|||||||
# Should symbol be migrated too?
|
# Should symbol be migrated too?
|
||||||
# order.symbol = new_pair
|
# order.symbol = new_pair
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
pls = PairLock.query.filter(PairLock.pair.notlike('%:%'))
|
pls = PairLock.session.scalars(select(PairLock).filter(PairLock.pair.notlike('%:%'))).all()
|
||||||
for pl in pls:
|
for pl in pls:
|
||||||
pl.pair = f"{pl.pair}:{config['stake_currency']}"
|
pl.pair = f"{pl.pair}:{config['stake_currency']}"
|
||||||
# print(pls)
|
# print(pls)
|
||||||
|
8
freqtrade/vendor/qtpylib/indicators.py
vendored
8
freqtrade/vendor/qtpylib/indicators.py
vendored
@ -1,5 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# QTPyLib: Quantitative Trading Python Library
|
# QTPyLib: Quantitative Trading Python Library
|
||||||
# https://github.com/ranaroussi/qtpylib
|
# https://github.com/ranaroussi/qtpylib
|
||||||
#
|
#
|
||||||
@ -18,7 +16,6 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
import sys
|
|
||||||
import warnings
|
import warnings
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
@ -27,11 +24,6 @@ import pandas as pd
|
|||||||
from pandas.core.base import PandasObject
|
from pandas.core.base import PandasObject
|
||||||
|
|
||||||
|
|
||||||
# =============================================
|
|
||||||
# check min, python version
|
|
||||||
if sys.version_info < (3, 4):
|
|
||||||
raise SystemError("QTPyLib requires Python version >= 3.4")
|
|
||||||
|
|
||||||
# =============================================
|
# =============================================
|
||||||
warnings.simplefilter(action="ignore", category=RuntimeWarning)
|
warnings.simplefilter(action="ignore", category=RuntimeWarning)
|
||||||
|
|
||||||
|
@ -1,3 +1,7 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools >= 46.4.0", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[tool.black]
|
[tool.black]
|
||||||
line-length = 100
|
line-length = 100
|
||||||
exclude = '''
|
exclude = '''
|
||||||
@ -48,10 +52,6 @@ ignore_errors = true
|
|||||||
module = "telegram.*"
|
module = "telegram.*"
|
||||||
implicit_optional = true
|
implicit_optional = true
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["setuptools >= 46.4.0", "wheel"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[tool.pyright]
|
[tool.pyright]
|
||||||
include = ["freqtrade"]
|
include = ["freqtrade"]
|
||||||
exclude = [
|
exclude = [
|
||||||
@ -68,10 +68,11 @@ target-version = "py38"
|
|||||||
extend-select = [
|
extend-select = [
|
||||||
"C90", # mccabe
|
"C90", # mccabe
|
||||||
# "N", # pep8-naming
|
# "N", # pep8-naming
|
||||||
# "UP", # pyupgrade
|
"UP", # pyupgrade
|
||||||
"TID", # flake8-tidy-imports
|
"TID", # flake8-tidy-imports
|
||||||
# "EXE", # flake8-executable
|
# "EXE", # flake8-executable
|
||||||
"YTT", # flake8-2020
|
"YTT", # flake8-2020
|
||||||
|
# "S", # flake8-bandit
|
||||||
# "DTZ", # flake8-datetimez
|
# "DTZ", # flake8-datetimez
|
||||||
# "RSE", # flake8-raise
|
# "RSE", # flake8-raise
|
||||||
# "TCH", # flake8-type-checking
|
# "TCH", # flake8-type-checking
|
||||||
@ -80,3 +81,6 @@ extend-select = [
|
|||||||
|
|
||||||
[tool.ruff.mccabe]
|
[tool.ruff.mccabe]
|
||||||
max-complexity = 12
|
max-complexity = 12
|
||||||
|
|
||||||
|
[tool.ruff.per-file-ignores]
|
||||||
|
"tests/*" = ["S"]
|
||||||
|
@ -7,11 +7,11 @@
|
|||||||
-r docs/requirements-docs.txt
|
-r docs/requirements-docs.txt
|
||||||
|
|
||||||
coveralls==3.3.1
|
coveralls==3.3.1
|
||||||
ruff==0.0.255
|
ruff==0.0.259
|
||||||
mypy==1.1.1
|
mypy==1.1.1
|
||||||
pre-commit==3.1.1
|
pre-commit==3.2.1
|
||||||
pytest==7.2.2
|
pytest==7.2.2
|
||||||
pytest-asyncio==0.20.3
|
pytest-asyncio==0.21.0
|
||||||
pytest-cov==4.0.0
|
pytest-cov==4.0.0
|
||||||
pytest-mock==3.10.0
|
pytest-mock==3.10.0
|
||||||
pytest-random-order==1.1.0
|
pytest-random-order==1.1.0
|
||||||
@ -22,11 +22,11 @@ time-machine==2.9.0
|
|||||||
httpx==0.23.3
|
httpx==0.23.3
|
||||||
|
|
||||||
# Convert jupyter notebooks to markdown documents
|
# Convert jupyter notebooks to markdown documents
|
||||||
nbconvert==7.2.9
|
nbconvert==7.2.10
|
||||||
|
|
||||||
# mypy types
|
# mypy types
|
||||||
types-cachetools==5.3.0.4
|
types-cachetools==5.3.0.4
|
||||||
types-filelock==3.2.7
|
types-filelock==3.2.7
|
||||||
types-requests==2.28.11.15
|
types-requests==2.28.11.16
|
||||||
types-tabulate==0.9.0.1
|
types-tabulate==0.9.0.1
|
||||||
types-python-dateutil==2.8.19.10
|
types-python-dateutil==2.8.19.10
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
# Required for freqai
|
# Required for freqai
|
||||||
scikit-learn==1.1.3
|
scikit-learn==1.1.3
|
||||||
joblib==1.2.0
|
joblib==1.2.0
|
||||||
catboost==1.1.1; platform_machine != 'aarch64' and python_version < '3.11'
|
catboost==1.1.1; platform_machine != 'aarch64' and 'arm' not in platform_machine and python_version < '3.11'
|
||||||
lightgbm==3.3.5
|
lightgbm==3.3.5
|
||||||
xgboost==1.7.4
|
xgboost==1.7.4
|
||||||
tensorboard==2.12.0
|
tensorboard==2.12.0
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
scipy==1.10.1
|
scipy==1.10.1
|
||||||
scikit-learn==1.1.3
|
scikit-learn==1.1.3
|
||||||
scikit-optimize==0.9.0
|
scikit-optimize==0.9.0
|
||||||
filelock==3.9.0
|
filelock==3.10.6
|
||||||
progressbar2==4.2.0
|
progressbar2==4.2.0
|
||||||
|
@ -2,10 +2,10 @@ numpy==1.24.2
|
|||||||
pandas==1.5.3
|
pandas==1.5.3
|
||||||
pandas-ta==0.3.14b
|
pandas-ta==0.3.14b
|
||||||
|
|
||||||
ccxt==2.9.12
|
ccxt==3.0.37
|
||||||
cryptography==39.0.2
|
cryptography==40.0.1
|
||||||
aiohttp==3.8.4
|
aiohttp==3.8.4
|
||||||
SQLAlchemy==2.0.5.post1
|
SQLAlchemy==2.0.7
|
||||||
python-telegram-bot==13.15
|
python-telegram-bot==13.15
|
||||||
arrow==1.2.3
|
arrow==1.2.3
|
||||||
cachetools==4.2.2
|
cachetools==4.2.2
|
||||||
@ -26,17 +26,17 @@ pyarrow==11.0.0; platform_machine != 'armv7l'
|
|||||||
py_find_1st==1.1.5
|
py_find_1st==1.1.5
|
||||||
|
|
||||||
# Load ticker files 30% faster
|
# Load ticker files 30% faster
|
||||||
python-rapidjson==1.9
|
python-rapidjson==1.10
|
||||||
# Properly format api responses
|
# Properly format api responses
|
||||||
orjson==3.8.7
|
orjson==3.8.8
|
||||||
|
|
||||||
# Notify systemd
|
# Notify systemd
|
||||||
sdnotify==0.3.2
|
sdnotify==0.3.2
|
||||||
|
|
||||||
# API Server
|
# API Server
|
||||||
fastapi==0.94.0
|
fastapi==0.95.0
|
||||||
pydantic==1.10.6
|
pydantic==1.10.7
|
||||||
uvicorn==0.21.0
|
uvicorn==0.21.1
|
||||||
pyjwt==2.6.0
|
pyjwt==2.6.0
|
||||||
aiofiles==23.1.0
|
aiofiles==23.1.0
|
||||||
psutil==5.9.4
|
psutil==5.9.4
|
||||||
@ -55,3 +55,5 @@ schedule==1.1.0
|
|||||||
#WS Messages
|
#WS Messages
|
||||||
websockets==10.4
|
websockets==10.4
|
||||||
janus==1.0.0
|
janus==1.0.0
|
||||||
|
|
||||||
|
ast-comments==1.0.1
|
||||||
|
@ -14,7 +14,8 @@ from freqtrade.commands import (start_backtesting_show, start_convert_data, star
|
|||||||
start_hyperopt_show, start_install_ui, start_list_data,
|
start_hyperopt_show, start_install_ui, start_list_data,
|
||||||
start_list_exchanges, start_list_markets, start_list_strategies,
|
start_list_exchanges, start_list_markets, start_list_strategies,
|
||||||
start_list_timeframes, start_new_strategy, start_show_trades,
|
start_list_timeframes, start_new_strategy, start_show_trades,
|
||||||
start_test_pairlist, start_trading, start_webserver)
|
start_strategy_update, start_test_pairlist, start_trading,
|
||||||
|
start_webserver)
|
||||||
from freqtrade.commands.db_commands import start_convert_db
|
from freqtrade.commands.db_commands import start_convert_db
|
||||||
from freqtrade.commands.deploy_commands import (clean_ui_subdir, download_and_install_ui,
|
from freqtrade.commands.deploy_commands import (clean_ui_subdir, download_and_install_ui,
|
||||||
get_ui_download_url, read_ui_version)
|
get_ui_download_url, read_ui_version)
|
||||||
@ -1546,3 +1547,37 @@ def test_start_convert_db(mocker, fee, tmpdir, caplog):
|
|||||||
start_convert_db(pargs)
|
start_convert_db(pargs)
|
||||||
|
|
||||||
assert db_target_file.is_file()
|
assert db_target_file.is_file()
|
||||||
|
|
||||||
|
|
||||||
|
def test_start_strategy_updater(mocker, tmpdir):
|
||||||
|
sc_mock = mocker.patch('freqtrade.commands.strategy_utils_commands.start_conversion')
|
||||||
|
teststrats = Path(__file__).parent.parent / 'strategy/strats'
|
||||||
|
args = [
|
||||||
|
"strategy-updater",
|
||||||
|
"--userdir",
|
||||||
|
str(tmpdir),
|
||||||
|
"--strategy-path",
|
||||||
|
str(teststrats),
|
||||||
|
]
|
||||||
|
pargs = get_args(args)
|
||||||
|
pargs['config'] = None
|
||||||
|
start_strategy_update(pargs)
|
||||||
|
# Number of strategies in the test directory
|
||||||
|
assert sc_mock.call_count == 11
|
||||||
|
|
||||||
|
sc_mock.reset_mock()
|
||||||
|
args = [
|
||||||
|
"strategy-updater",
|
||||||
|
"--userdir",
|
||||||
|
str(tmpdir),
|
||||||
|
"--strategy-path",
|
||||||
|
str(teststrats),
|
||||||
|
"--strategy-list",
|
||||||
|
"StrategyTestV3",
|
||||||
|
"StrategyTestV2"
|
||||||
|
]
|
||||||
|
pargs = get_args(args)
|
||||||
|
pargs['config'] = None
|
||||||
|
start_strategy_update(pargs)
|
||||||
|
# Number of strategies in the test directory
|
||||||
|
assert sc_mock.call_count == 2
|
||||||
|
@ -299,7 +299,7 @@ def create_mock_trades(fee, is_short: Optional[bool] = False, use_db: bool = Tru
|
|||||||
"""
|
"""
|
||||||
def add_trade(trade):
|
def add_trade(trade):
|
||||||
if use_db:
|
if use_db:
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
else:
|
else:
|
||||||
LocalTrade.add_bt_trade(trade)
|
LocalTrade.add_bt_trade(trade)
|
||||||
is_short1 = is_short if is_short is not None else True
|
is_short1 = is_short if is_short is not None else True
|
||||||
@ -332,11 +332,11 @@ def create_mock_trades_with_leverage(fee, use_db: bool = True):
|
|||||||
Create some fake trades ...
|
Create some fake trades ...
|
||||||
"""
|
"""
|
||||||
if use_db:
|
if use_db:
|
||||||
Trade.query.session.rollback()
|
Trade.session.rollback()
|
||||||
|
|
||||||
def add_trade(trade):
|
def add_trade(trade):
|
||||||
if use_db:
|
if use_db:
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
else:
|
else:
|
||||||
LocalTrade.add_bt_trade(trade)
|
LocalTrade.add_bt_trade(trade)
|
||||||
|
|
||||||
@ -366,7 +366,7 @@ def create_mock_trades_with_leverage(fee, use_db: bool = True):
|
|||||||
add_trade(trade)
|
add_trade(trade)
|
||||||
|
|
||||||
if use_db:
|
if use_db:
|
||||||
Trade.query.session.flush()
|
Trade.session.flush()
|
||||||
|
|
||||||
|
|
||||||
def create_mock_trades_usdt(fee, is_short: Optional[bool] = False, use_db: bool = True):
|
def create_mock_trades_usdt(fee, is_short: Optional[bool] = False, use_db: bool = True):
|
||||||
@ -375,7 +375,7 @@ def create_mock_trades_usdt(fee, is_short: Optional[bool] = False, use_db: bool
|
|||||||
"""
|
"""
|
||||||
def add_trade(trade):
|
def add_trade(trade):
|
||||||
if use_db:
|
if use_db:
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
else:
|
else:
|
||||||
LocalTrade.add_bt_trade(trade)
|
LocalTrade.add_bt_trade(trade)
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ def test_load_backtest_data_new_format(testdatadir):
|
|||||||
assert bt_data.equals(bt_data3)
|
assert bt_data.equals(bt_data3)
|
||||||
|
|
||||||
with pytest.raises(ValueError, match=r"File .* does not exist\."):
|
with pytest.raises(ValueError, match=r"File .* does not exist\."):
|
||||||
load_backtest_data(str("filename") + "nofile")
|
load_backtest_data("filename" + "nofile")
|
||||||
|
|
||||||
with pytest.raises(ValueError, match=r"Unknown dataformat."):
|
with pytest.raises(ValueError, match=r"Unknown dataformat."):
|
||||||
load_backtest_data(testdatadir / "backtest_results" / LAST_BT_RESULT_FN)
|
load_backtest_data(testdatadir / "backtest_results" / LAST_BT_RESULT_FN)
|
||||||
|
@ -252,7 +252,7 @@ def test_datahandler__check_empty_df(testdatadir, caplog):
|
|||||||
assert log_has_re(expected_text, caplog)
|
assert log_has_re(expected_text, caplog)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('datahandler', ['feather', 'parquet'])
|
@pytest.mark.parametrize('datahandler', ['parquet'])
|
||||||
def test_datahandler_trades_not_supported(datahandler, testdatadir, ):
|
def test_datahandler_trades_not_supported(datahandler, testdatadir, ):
|
||||||
dh = get_datahandler(testdatadir, datahandler)
|
dh = get_datahandler(testdatadir, datahandler)
|
||||||
with pytest.raises(NotImplementedError):
|
with pytest.raises(NotImplementedError):
|
||||||
@ -496,6 +496,58 @@ def test_hdf5datahandler_ohlcv_purge(mocker, testdatadir):
|
|||||||
assert unlinkmock.call_count == 2
|
assert unlinkmock.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_featherdatahandler_trades_load(testdatadir):
|
||||||
|
dh = get_datahandler(testdatadir, 'feather')
|
||||||
|
trades = dh.trades_load('XRP/ETH')
|
||||||
|
assert isinstance(trades, list)
|
||||||
|
assert trades[0][0] == 1570752011620
|
||||||
|
assert trades[-1][-1] == 0.1986231
|
||||||
|
|
||||||
|
trades1 = dh.trades_load('UNITTEST/NONEXIST')
|
||||||
|
assert trades1 == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_featherdatahandler_trades_store(testdatadir, tmpdir):
|
||||||
|
tmpdir1 = Path(tmpdir)
|
||||||
|
dh = get_datahandler(testdatadir, 'feather')
|
||||||
|
trades = dh.trades_load('XRP/ETH')
|
||||||
|
|
||||||
|
dh1 = get_datahandler(tmpdir1, 'feather')
|
||||||
|
dh1.trades_store('XRP/NEW', trades)
|
||||||
|
file = tmpdir1 / 'XRP_NEW-trades.feather'
|
||||||
|
assert file.is_file()
|
||||||
|
# Load trades back
|
||||||
|
trades_new = dh1.trades_load('XRP/NEW')
|
||||||
|
|
||||||
|
assert len(trades_new) == len(trades)
|
||||||
|
assert trades[0][0] == trades_new[0][0]
|
||||||
|
assert trades[0][1] == trades_new[0][1]
|
||||||
|
# assert trades[0][2] == trades_new[0][2] # This is nan - so comparison does not make sense
|
||||||
|
assert trades[0][3] == trades_new[0][3]
|
||||||
|
assert trades[0][4] == trades_new[0][4]
|
||||||
|
assert trades[0][5] == trades_new[0][5]
|
||||||
|
assert trades[0][6] == trades_new[0][6]
|
||||||
|
assert trades[-1][0] == trades_new[-1][0]
|
||||||
|
assert trades[-1][1] == trades_new[-1][1]
|
||||||
|
# assert trades[-1][2] == trades_new[-1][2] # This is nan - so comparison does not make sense
|
||||||
|
assert trades[-1][3] == trades_new[-1][3]
|
||||||
|
assert trades[-1][4] == trades_new[-1][4]
|
||||||
|
assert trades[-1][5] == trades_new[-1][5]
|
||||||
|
assert trades[-1][6] == trades_new[-1][6]
|
||||||
|
|
||||||
|
|
||||||
|
def test_featherdatahandler_trades_purge(mocker, testdatadir):
|
||||||
|
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||||
|
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
||||||
|
dh = get_datahandler(testdatadir, 'feather')
|
||||||
|
assert not dh.trades_purge('UNITTEST/NONEXIST')
|
||||||
|
assert unlinkmock.call_count == 0
|
||||||
|
|
||||||
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||||
|
assert dh.trades_purge('UNITTEST/NONEXIST')
|
||||||
|
assert unlinkmock.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_gethandlerclass():
|
def test_gethandlerclass():
|
||||||
cl = get_datahandlerclass('json')
|
cl = get_datahandlerclass('json')
|
||||||
assert cl == JsonDataHandler
|
assert cl == JsonDataHandler
|
||||||
|
@ -409,7 +409,7 @@ def test_init_with_refresh(default_conf, mocker) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_file_dump_json_tofile(testdatadir) -> None:
|
def test_file_dump_json_tofile(testdatadir) -> None:
|
||||||
file = testdatadir / 'test_{id}.json'.format(id=str(uuid.uuid4()))
|
file = testdatadir / f'test_{uuid.uuid4()}.json'
|
||||||
data = {'bar': 'foo'}
|
data = {'bar': 'foo'}
|
||||||
|
|
||||||
# check the file we will create does not exist
|
# check the file we will create does not exist
|
||||||
|
@ -11,6 +11,19 @@ from tests.conftest import EXMS, get_mock_coro, get_patched_exchange, log_has_re
|
|||||||
from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('side,type,time_in_force,expected', [
|
||||||
|
('buy', 'limit', 'gtc', {'timeInForce': 'GTC'}),
|
||||||
|
('buy', 'limit', 'IOC', {'timeInForce': 'IOC'}),
|
||||||
|
('buy', 'market', 'IOC', {}),
|
||||||
|
('buy', 'limit', 'PO', {'timeInForce': 'PO'}),
|
||||||
|
('sell', 'limit', 'PO', {'timeInForce': 'PO'}),
|
||||||
|
('sell', 'market', 'PO', {}),
|
||||||
|
])
|
||||||
|
def test__get_params_binance(default_conf, mocker, side, type, time_in_force, expected):
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, id='binance')
|
||||||
|
assert exchange._get_params(side, type, 1, False, time_in_force) == expected
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('trademode', [TradingMode.FUTURES, TradingMode.SPOT])
|
@pytest.mark.parametrize('trademode', [TradingMode.FUTURES, TradingMode.SPOT])
|
||||||
@pytest.mark.parametrize('limitratio,expected,side', [
|
@pytest.mark.parametrize('limitratio,expected,side', [
|
||||||
(None, 220 * 0.99, "sell"),
|
(None, 220 * 0.99, "sell"),
|
||||||
@ -39,7 +52,7 @@ def test_create_stoploss_order_binance(default_conf, mocker, limitratio, expecte
|
|||||||
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance')
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance')
|
||||||
|
|
||||||
with pytest.raises(OperationalException):
|
with pytest.raises(InvalidOrderException):
|
||||||
order = exchange.create_stoploss(
|
order = exchange.create_stoploss(
|
||||||
pair='ETH/BTC',
|
pair='ETH/BTC',
|
||||||
amount=1,
|
amount=1,
|
||||||
@ -118,7 +131,7 @@ def test_create_stoploss_order_dry_run_binance(default_conf, mocker):
|
|||||||
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance')
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance')
|
||||||
|
|
||||||
with pytest.raises(OperationalException):
|
with pytest.raises(InvalidOrderException):
|
||||||
order = exchange.create_stoploss(
|
order = exchange.create_stoploss(
|
||||||
pair='ETH/BTC',
|
pair='ETH/BTC',
|
||||||
amount=1,
|
amount=1,
|
||||||
@ -542,7 +555,6 @@ def test__set_leverage_binance(mocker, default_conf):
|
|||||||
"set_leverage",
|
"set_leverage",
|
||||||
pair="XRP/USDT",
|
pair="XRP/USDT",
|
||||||
leverage=5.0,
|
leverage=5.0,
|
||||||
trading_mode=TradingMode.FUTURES
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ EXCHANGES = {
|
|||||||
'stake_currency': 'USDT',
|
'stake_currency': 'USDT',
|
||||||
'use_ci_proxy': True,
|
'use_ci_proxy': True,
|
||||||
'hasQuoteVolume': True,
|
'hasQuoteVolume': True,
|
||||||
'timeframe': '5m',
|
'timeframe': '1h',
|
||||||
'futures': True,
|
'futures': True,
|
||||||
'futures_pair': 'BTC/USDT:USDT',
|
'futures_pair': 'BTC/USDT:USDT',
|
||||||
'hasQuoteVolumeFutures': True,
|
'hasQuoteVolumeFutures': True,
|
||||||
@ -66,7 +66,7 @@ EXCHANGES = {
|
|||||||
'pair': 'BTC/USDT',
|
'pair': 'BTC/USDT',
|
||||||
'stake_currency': 'USDT',
|
'stake_currency': 'USDT',
|
||||||
'hasQuoteVolume': True,
|
'hasQuoteVolume': True,
|
||||||
'timeframe': '5m',
|
'timeframe': '1h',
|
||||||
'futures': False,
|
'futures': False,
|
||||||
'sample_order': [{
|
'sample_order': [{
|
||||||
"symbol": "SOLUSDT",
|
"symbol": "SOLUSDT",
|
||||||
@ -91,7 +91,7 @@ EXCHANGES = {
|
|||||||
'pair': 'BTC/USDT',
|
'pair': 'BTC/USDT',
|
||||||
'stake_currency': 'USDT',
|
'stake_currency': 'USDT',
|
||||||
'hasQuoteVolume': True,
|
'hasQuoteVolume': True,
|
||||||
'timeframe': '5m',
|
'timeframe': '1h',
|
||||||
'leverage_tiers_public': False,
|
'leverage_tiers_public': False,
|
||||||
'leverage_in_spot_market': True,
|
'leverage_in_spot_market': True,
|
||||||
},
|
},
|
||||||
@ -99,7 +99,7 @@ EXCHANGES = {
|
|||||||
'pair': 'XRP/USDT',
|
'pair': 'XRP/USDT',
|
||||||
'stake_currency': 'USDT',
|
'stake_currency': 'USDT',
|
||||||
'hasQuoteVolume': True,
|
'hasQuoteVolume': True,
|
||||||
'timeframe': '5m',
|
'timeframe': '1h',
|
||||||
'leverage_tiers_public': False,
|
'leverage_tiers_public': False,
|
||||||
'leverage_in_spot_market': True,
|
'leverage_in_spot_market': True,
|
||||||
'sample_order': [
|
'sample_order': [
|
||||||
@ -141,7 +141,7 @@ EXCHANGES = {
|
|||||||
'pair': 'BTC/USDT',
|
'pair': 'BTC/USDT',
|
||||||
'stake_currency': 'USDT',
|
'stake_currency': 'USDT',
|
||||||
'hasQuoteVolume': True,
|
'hasQuoteVolume': True,
|
||||||
'timeframe': '5m',
|
'timeframe': '1h',
|
||||||
'futures': True,
|
'futures': True,
|
||||||
'futures_pair': 'BTC/USDT:USDT',
|
'futures_pair': 'BTC/USDT:USDT',
|
||||||
'hasQuoteVolumeFutures': True,
|
'hasQuoteVolumeFutures': True,
|
||||||
@ -215,7 +215,7 @@ EXCHANGES = {
|
|||||||
'pair': 'BTC/USDT',
|
'pair': 'BTC/USDT',
|
||||||
'stake_currency': 'USDT',
|
'stake_currency': 'USDT',
|
||||||
'hasQuoteVolume': True,
|
'hasQuoteVolume': True,
|
||||||
'timeframe': '5m',
|
'timeframe': '1h',
|
||||||
'futures': True,
|
'futures': True,
|
||||||
'futures_pair': 'BTC/USDT:USDT',
|
'futures_pair': 'BTC/USDT:USDT',
|
||||||
'hasQuoteVolumeFutures': False,
|
'hasQuoteVolumeFutures': False,
|
||||||
@ -226,7 +226,7 @@ EXCHANGES = {
|
|||||||
'pair': 'BTC/USDT',
|
'pair': 'BTC/USDT',
|
||||||
'stake_currency': 'USDT',
|
'stake_currency': 'USDT',
|
||||||
'hasQuoteVolume': True,
|
'hasQuoteVolume': True,
|
||||||
'timeframe': '5m',
|
'timeframe': '1h',
|
||||||
'futures_pair': 'BTC/USDT:USDT',
|
'futures_pair': 'BTC/USDT:USDT',
|
||||||
'futures': True,
|
'futures': True,
|
||||||
'leverage_tiers_public': True,
|
'leverage_tiers_public': True,
|
||||||
@ -253,14 +253,14 @@ EXCHANGES = {
|
|||||||
'pair': 'ETH/BTC',
|
'pair': 'ETH/BTC',
|
||||||
'stake_currency': 'BTC',
|
'stake_currency': 'BTC',
|
||||||
'hasQuoteVolume': True,
|
'hasQuoteVolume': True,
|
||||||
'timeframe': '5m',
|
'timeframe': '1h',
|
||||||
'futures': False,
|
'futures': False,
|
||||||
},
|
},
|
||||||
'bitvavo': {
|
'bitvavo': {
|
||||||
'pair': 'BTC/EUR',
|
'pair': 'BTC/EUR',
|
||||||
'stake_currency': 'EUR',
|
'stake_currency': 'EUR',
|
||||||
'hasQuoteVolume': True,
|
'hasQuoteVolume': True,
|
||||||
'timeframe': '5m',
|
'timeframe': '1h',
|
||||||
'leverage_tiers_public': False,
|
'leverage_tiers_public': False,
|
||||||
'leverage_in_spot_market': False,
|
'leverage_in_spot_market': False,
|
||||||
},
|
},
|
||||||
|
@ -113,18 +113,21 @@ async def async_ccxt_exception(mocker, default_conf, api_mock, fun, mock_ccxt_fu
|
|||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
||||||
await getattr(exchange, fun)(**kwargs)
|
await getattr(exchange, fun)(**kwargs)
|
||||||
assert api_mock.__dict__[mock_ccxt_fun].call_count == retries
|
assert api_mock.__dict__[mock_ccxt_fun].call_count == retries
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
with pytest.raises(TemporaryError):
|
with pytest.raises(TemporaryError):
|
||||||
api_mock.__dict__[mock_ccxt_fun] = MagicMock(side_effect=ccxt.NetworkError("DeadBeef"))
|
api_mock.__dict__[mock_ccxt_fun] = MagicMock(side_effect=ccxt.NetworkError("DeadBeef"))
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
||||||
await getattr(exchange, fun)(**kwargs)
|
await getattr(exchange, fun)(**kwargs)
|
||||||
assert api_mock.__dict__[mock_ccxt_fun].call_count == retries
|
assert api_mock.__dict__[mock_ccxt_fun].call_count == retries
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
with pytest.raises(OperationalException):
|
with pytest.raises(OperationalException):
|
||||||
api_mock.__dict__[mock_ccxt_fun] = MagicMock(side_effect=ccxt.BaseError("DeadBeef"))
|
api_mock.__dict__[mock_ccxt_fun] = MagicMock(side_effect=ccxt.BaseError("DeadBeef"))
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
||||||
await getattr(exchange, fun)(**kwargs)
|
await getattr(exchange, fun)(**kwargs)
|
||||||
assert api_mock.__dict__[mock_ccxt_fun].call_count == 1
|
assert api_mock.__dict__[mock_ccxt_fun].call_count == 1
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
|
|
||||||
def test_init(default_conf, mocker, caplog):
|
def test_init(default_conf, mocker, caplog):
|
||||||
@ -1039,9 +1042,9 @@ def test_validate_ordertypes(default_conf, mocker):
|
|||||||
('bybit', 'last', True),
|
('bybit', 'last', True),
|
||||||
('bybit', 'mark', True),
|
('bybit', 'mark', True),
|
||||||
('bybit', 'index', True),
|
('bybit', 'index', True),
|
||||||
# ('okx', 'last', True),
|
('okx', 'last', True),
|
||||||
# ('okx', 'mark', True),
|
('okx', 'mark', True),
|
||||||
# ('okx', 'index', True),
|
('okx', 'index', True),
|
||||||
('gate', 'last', True),
|
('gate', 'last', True),
|
||||||
('gate', 'mark', True),
|
('gate', 'mark', True),
|
||||||
('gate', 'index', True),
|
('gate', 'index', True),
|
||||||
@ -2248,7 +2251,6 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
|
|||||||
assert res[pair2].at[0, 'open']
|
assert res[pair2].at[0, 'open']
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_name):
|
async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_name):
|
||||||
ohlcv = [
|
ohlcv = [
|
||||||
@ -2277,7 +2279,7 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
|
|||||||
assert res[3] == ohlcv
|
assert res[3] == ohlcv
|
||||||
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
||||||
assert not log_has(f"Using cached candle (OHLCV) data for {pair} ...", caplog)
|
assert not log_has(f"Using cached candle (OHLCV) data for {pair} ...", caplog)
|
||||||
|
exchange.close()
|
||||||
# exchange = Exchange(default_conf)
|
# exchange = Exchange(default_conf)
|
||||||
await async_ccxt_exception(mocker, default_conf, MagicMock(),
|
await async_ccxt_exception(mocker, default_conf, MagicMock(),
|
||||||
"_async_get_candle_history", "fetch_ohlcv",
|
"_async_get_candle_history", "fetch_ohlcv",
|
||||||
@ -2292,15 +2294,17 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
|
|||||||
await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT,
|
await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT,
|
||||||
(arrow.utcnow().int_timestamp - 2000) * 1000)
|
(arrow.utcnow().int_timestamp - 2000) * 1000)
|
||||||
|
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
|
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
|
||||||
r'historical candle \(OHLCV\) data\..*'):
|
r'historical candle \(OHLCV\) data\..*'):
|
||||||
api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
|
api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT,
|
await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT,
|
||||||
(arrow.utcnow().int_timestamp - 2000) * 1000)
|
(arrow.utcnow().int_timestamp - 2000) * 1000)
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test__async_kucoin_get_candle_history(default_conf, mocker, caplog):
|
async def test__async_kucoin_get_candle_history(default_conf, mocker, caplog):
|
||||||
from freqtrade.exchange.common import _reset_logging_mixin
|
from freqtrade.exchange.common import _reset_logging_mixin
|
||||||
_reset_logging_mixin()
|
_reset_logging_mixin()
|
||||||
@ -2341,9 +2345,9 @@ async def test__async_kucoin_get_candle_history(default_conf, mocker, caplog):
|
|||||||
# Expect the "returned exception" message 12 times (4 retries * 3 (loop))
|
# Expect the "returned exception" message 12 times (4 retries * 3 (loop))
|
||||||
assert num_log_has_re(msg, caplog) == 12
|
assert num_log_has_re(msg, caplog) == 12
|
||||||
assert num_log_has_re(msg2, caplog) == 9
|
assert num_log_has_re(msg2, caplog) == 9
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
|
async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
|
||||||
""" Test empty exchange result """
|
""" Test empty exchange result """
|
||||||
ohlcv = []
|
ohlcv = []
|
||||||
@ -2363,6 +2367,7 @@ async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
|
|||||||
assert res[2] == CandleType.SPOT
|
assert res[2] == CandleType.SPOT
|
||||||
assert res[3] == ohlcv
|
assert res[3] == ohlcv
|
||||||
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
|
|
||||||
def test_refresh_latest_ohlcv_inv_result(default_conf, mocker, caplog):
|
def test_refresh_latest_ohlcv_inv_result(default_conf, mocker, caplog):
|
||||||
@ -2757,7 +2762,6 @@ async def test___async_get_candle_history_sort(default_conf, mocker, exchange_na
|
|||||||
assert res_ohlcv[9][5] == 2.31452783
|
assert res_ohlcv[9][5] == 2.31452783
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
|
async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
|
||||||
fetch_trades_result):
|
fetch_trades_result):
|
||||||
@ -2785,8 +2789,8 @@ async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
|
|||||||
assert exchange._api_async.fetch_trades.call_args[1]['limit'] == 1000
|
assert exchange._api_async.fetch_trades.call_args[1]['limit'] == 1000
|
||||||
assert exchange._api_async.fetch_trades.call_args[1]['params'] == {'from': '123'}
|
assert exchange._api_async.fetch_trades.call_args[1]['params'] == {'from': '123'}
|
||||||
assert log_has_re(f"Fetching trades for pair {pair}, params: .*", caplog)
|
assert log_has_re(f"Fetching trades for pair {pair}, params: .*", caplog)
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
exchange = Exchange(default_conf)
|
|
||||||
await async_ccxt_exception(mocker, default_conf, MagicMock(),
|
await async_ccxt_exception(mocker, default_conf, MagicMock(),
|
||||||
"_async_fetch_trades", "fetch_trades",
|
"_async_fetch_trades", "fetch_trades",
|
||||||
pair='ABCD/BTC', since=None)
|
pair='ABCD/BTC', since=None)
|
||||||
@ -2796,15 +2800,16 @@ async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
|
|||||||
api_mock.fetch_trades = MagicMock(side_effect=ccxt.BaseError("Unknown error"))
|
api_mock.fetch_trades = MagicMock(side_effect=ccxt.BaseError("Unknown error"))
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
|
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
|
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
|
||||||
r'historical trade data\..*'):
|
r'historical trade data\..*'):
|
||||||
api_mock.fetch_trades = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
|
api_mock.fetch_trades = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
|
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
async def test__async_fetch_trades_contract_size(default_conf, mocker, caplog, exchange_name,
|
async def test__async_fetch_trades_contract_size(default_conf, mocker, caplog, exchange_name,
|
||||||
fetch_trades_result):
|
fetch_trades_result):
|
||||||
@ -2839,6 +2844,7 @@ async def test__async_fetch_trades_contract_size(default_conf, mocker, caplog, e
|
|||||||
pair = 'ETH/USDT:USDT'
|
pair = 'ETH/USDT:USDT'
|
||||||
res = await exchange._async_fetch_trades(pair, since=None, params=None)
|
res = await exchange._async_fetch_trades(pair, since=None, params=None)
|
||||||
assert res[0][5] == 300
|
assert res[0][5] == 300
|
||||||
|
exchange.close()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@ -3387,7 +3393,7 @@ def test_merge_ft_has_dict(default_conf, mocker):
|
|||||||
ex = Binance(default_conf)
|
ex = Binance(default_conf)
|
||||||
assert ex._ft_has != Exchange._ft_has_default
|
assert ex._ft_has != Exchange._ft_has_default
|
||||||
assert ex.get_option('stoploss_on_exchange')
|
assert ex.get_option('stoploss_on_exchange')
|
||||||
assert ex.get_option('order_time_in_force') == ['GTC', 'FOK', 'IOC']
|
assert ex.get_option('order_time_in_force') == ['GTC', 'FOK', 'IOC', 'PO']
|
||||||
assert ex.get_option('trades_pagination') == 'id'
|
assert ex.get_option('trades_pagination') == 'id'
|
||||||
assert ex.get_option('trades_pagination_arg') == 'fromId'
|
assert ex.get_option('trades_pagination_arg') == 'fromId'
|
||||||
|
|
||||||
@ -3868,29 +3874,6 @@ def test_get_stake_amount_considering_leverage(
|
|||||||
stake_amount, leverage) == min_stake_with_lev
|
stake_amount, leverage) == min_stake_with_lev
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("exchange_name,trading_mode", [
|
|
||||||
("binance", TradingMode.FUTURES),
|
|
||||||
])
|
|
||||||
def test__set_leverage(mocker, default_conf, exchange_name, trading_mode):
|
|
||||||
|
|
||||||
api_mock = MagicMock()
|
|
||||||
api_mock.set_leverage = MagicMock()
|
|
||||||
type(api_mock).has = PropertyMock(return_value={'setLeverage': True})
|
|
||||||
default_conf['dry_run'] = False
|
|
||||||
|
|
||||||
ccxt_exceptionhandlers(
|
|
||||||
mocker,
|
|
||||||
default_conf,
|
|
||||||
api_mock,
|
|
||||||
exchange_name,
|
|
||||||
"_set_leverage",
|
|
||||||
"set_leverage",
|
|
||||||
pair="XRP/USDT",
|
|
||||||
leverage=5.0,
|
|
||||||
trading_mode=trading_mode
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("margin_mode", [
|
@pytest.mark.parametrize("margin_mode", [
|
||||||
(MarginMode.CROSS),
|
(MarginMode.CROSS),
|
||||||
(MarginMode.ISOLATED)
|
(MarginMode.ISOLATED)
|
||||||
@ -4830,7 +4813,6 @@ def test_load_leverage_tiers(mocker, default_conf, leverage_tiers, exchange_name
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
@pytest.mark.parametrize('exchange_name', EXCHANGES)
|
@pytest.mark.parametrize('exchange_name', EXCHANGES)
|
||||||
async def test_get_market_leverage_tiers(mocker, default_conf, exchange_name):
|
async def test_get_market_leverage_tiers(mocker, default_conf, exchange_name):
|
||||||
default_conf['exchange']['name'] = exchange_name
|
default_conf['exchange']['name'] = exchange_name
|
||||||
|
@ -4,7 +4,7 @@ from unittest.mock import MagicMock
|
|||||||
import ccxt
|
import ccxt
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from freqtrade.exceptions import DependencyException, InvalidOrderException, OperationalException
|
from freqtrade.exceptions import DependencyException, InvalidOrderException
|
||||||
from tests.conftest import EXMS, get_patched_exchange
|
from tests.conftest import EXMS, get_patched_exchange
|
||||||
from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
||||||
|
|
||||||
@ -31,7 +31,7 @@ def test_create_stoploss_order_huobi(default_conf, mocker, limitratio, expected,
|
|||||||
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'huobi')
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'huobi')
|
||||||
|
|
||||||
with pytest.raises(OperationalException):
|
with pytest.raises(InvalidOrderException):
|
||||||
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
|
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
|
||||||
order_types={'stoploss_on_exchange_limit_ratio': 1.05},
|
order_types={'stoploss_on_exchange_limit_ratio': 1.05},
|
||||||
side=side,
|
side=side,
|
||||||
@ -84,7 +84,7 @@ def test_create_stoploss_order_dry_run_huobi(default_conf, mocker):
|
|||||||
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'huobi')
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'huobi')
|
||||||
|
|
||||||
with pytest.raises(OperationalException):
|
with pytest.raises(InvalidOrderException):
|
||||||
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
|
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
|
||||||
order_types={'stoploss_on_exchange_limit_ratio': 1.05},
|
order_types={'stoploss_on_exchange_limit_ratio': 1.05},
|
||||||
side='sell', leverage=1.0)
|
side='sell', leverage=1.0)
|
||||||
|
@ -4,7 +4,7 @@ from unittest.mock import MagicMock
|
|||||||
import ccxt
|
import ccxt
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from freqtrade.exceptions import DependencyException, InvalidOrderException, OperationalException
|
from freqtrade.exceptions import DependencyException, InvalidOrderException
|
||||||
from tests.conftest import EXMS, get_patched_exchange
|
from tests.conftest import EXMS, get_patched_exchange
|
||||||
from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
||||||
|
|
||||||
@ -31,7 +31,7 @@ def test_create_stoploss_order_kucoin(default_conf, mocker, limitratio, expected
|
|||||||
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'kucoin')
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'kucoin')
|
||||||
if order_type == 'limit':
|
if order_type == 'limit':
|
||||||
with pytest.raises(OperationalException):
|
with pytest.raises(InvalidOrderException):
|
||||||
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
|
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
|
||||||
order_types={
|
order_types={
|
||||||
'stoploss': order_type,
|
'stoploss': order_type,
|
||||||
@ -92,7 +92,7 @@ def test_stoploss_order_dry_run_kucoin(default_conf, mocker):
|
|||||||
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'kucoin')
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'kucoin')
|
||||||
|
|
||||||
with pytest.raises(OperationalException):
|
with pytest.raises(InvalidOrderException):
|
||||||
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
|
order = exchange.create_stoploss(pair='ETH/BTC', amount=1, stop_price=190,
|
||||||
order_types={'stoploss': 'limit',
|
order_types={'stoploss': 'limit',
|
||||||
'stoploss_on_exchange_limit_ratio': 1.05},
|
'stoploss_on_exchange_limit_ratio': 1.05},
|
||||||
|
@ -2,11 +2,13 @@ from datetime import datetime, timedelta, timezone
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest.mock import MagicMock, PropertyMock
|
from unittest.mock import MagicMock, PropertyMock
|
||||||
|
|
||||||
|
import ccxt
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
||||||
|
from freqtrade.exceptions import RetryableOrderError
|
||||||
from freqtrade.exchange.exchange import timeframe_to_minutes
|
from freqtrade.exchange.exchange import timeframe_to_minutes
|
||||||
from tests.conftest import get_mock_coro, get_patched_exchange, log_has
|
from tests.conftest import EXMS, get_mock_coro, get_patched_exchange, log_has
|
||||||
from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
||||||
|
|
||||||
|
|
||||||
@ -476,3 +478,116 @@ def test_load_leverage_tiers_okx(default_conf, mocker, markets, tmpdir, caplog,
|
|||||||
exchange.load_leverage_tiers()
|
exchange.load_leverage_tiers()
|
||||||
|
|
||||||
assert log_has(logmsg, caplog)
|
assert log_has(logmsg, caplog)
|
||||||
|
|
||||||
|
|
||||||
|
def test__set_leverage_okx(mocker, default_conf):
|
||||||
|
|
||||||
|
api_mock = MagicMock()
|
||||||
|
api_mock.set_leverage = MagicMock()
|
||||||
|
type(api_mock).has = PropertyMock(return_value={'setLeverage': True})
|
||||||
|
default_conf['dry_run'] = False
|
||||||
|
default_conf['trading_mode'] = TradingMode.FUTURES
|
||||||
|
default_conf['margin_mode'] = MarginMode.ISOLATED
|
||||||
|
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id="okx")
|
||||||
|
exchange._lev_prep('BTC/USDT:USDT', 3.2, 'buy')
|
||||||
|
assert api_mock.set_leverage.call_count == 1
|
||||||
|
# Leverage is rounded to 3.
|
||||||
|
assert api_mock.set_leverage.call_args_list[0][1]['leverage'] == 3.2
|
||||||
|
assert api_mock.set_leverage.call_args_list[0][1]['symbol'] == 'BTC/USDT:USDT'
|
||||||
|
assert api_mock.set_leverage.call_args_list[0][1]['params'] == {
|
||||||
|
'mgnMode': 'isolated',
|
||||||
|
'posSide': 'net'}
|
||||||
|
|
||||||
|
ccxt_exceptionhandlers(
|
||||||
|
mocker,
|
||||||
|
default_conf,
|
||||||
|
api_mock,
|
||||||
|
"okx",
|
||||||
|
"_lev_prep",
|
||||||
|
"set_leverage",
|
||||||
|
pair="XRP/USDT:USDT",
|
||||||
|
leverage=5.0,
|
||||||
|
side='buy'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("init_persistence")
|
||||||
|
def test_fetch_stoploss_order_okx(default_conf, mocker):
|
||||||
|
default_conf['dry_run'] = False
|
||||||
|
api_mock = MagicMock()
|
||||||
|
api_mock.fetch_order = MagicMock()
|
||||||
|
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='okx')
|
||||||
|
|
||||||
|
exchange.fetch_stoploss_order('1234', 'ETH/BTC')
|
||||||
|
assert api_mock.fetch_order.call_count == 1
|
||||||
|
assert api_mock.fetch_order.call_args_list[0][0][0] == '1234'
|
||||||
|
assert api_mock.fetch_order.call_args_list[0][0][1] == 'ETH/BTC'
|
||||||
|
assert api_mock.fetch_order.call_args_list[0][1]['params'] == {'stop': True}
|
||||||
|
|
||||||
|
api_mock.fetch_order = MagicMock(side_effect=ccxt.OrderNotFound)
|
||||||
|
api_mock.fetch_open_orders = MagicMock(return_value=[])
|
||||||
|
api_mock.fetch_closed_orders = MagicMock(return_value=[])
|
||||||
|
api_mock.fetch_canceled_orders = MagicMock(creturn_value=[])
|
||||||
|
|
||||||
|
with pytest.raises(RetryableOrderError):
|
||||||
|
exchange.fetch_stoploss_order('1234', 'ETH/BTC')
|
||||||
|
assert api_mock.fetch_order.call_count == 1
|
||||||
|
assert api_mock.fetch_open_orders.call_count == 1
|
||||||
|
assert api_mock.fetch_closed_orders.call_count == 1
|
||||||
|
assert api_mock.fetch_canceled_orders.call_count == 1
|
||||||
|
|
||||||
|
api_mock.fetch_order.reset_mock()
|
||||||
|
api_mock.fetch_open_orders.reset_mock()
|
||||||
|
api_mock.fetch_closed_orders.reset_mock()
|
||||||
|
api_mock.fetch_canceled_orders.reset_mock()
|
||||||
|
|
||||||
|
api_mock.fetch_closed_orders = MagicMock(return_value=[
|
||||||
|
{
|
||||||
|
'id': '1234',
|
||||||
|
'status': 'closed',
|
||||||
|
'info': {'ordId': '123455'}
|
||||||
|
}
|
||||||
|
])
|
||||||
|
mocker.patch(f"{EXMS}.fetch_order", MagicMock(return_value={'id': '123455'}))
|
||||||
|
resp = exchange.fetch_stoploss_order('1234', 'ETH/BTC')
|
||||||
|
assert api_mock.fetch_order.call_count == 1
|
||||||
|
assert api_mock.fetch_open_orders.call_count == 1
|
||||||
|
assert api_mock.fetch_closed_orders.call_count == 1
|
||||||
|
assert api_mock.fetch_canceled_orders.call_count == 0
|
||||||
|
|
||||||
|
assert resp['id'] == '1234'
|
||||||
|
assert resp['id_stop'] == '123455'
|
||||||
|
assert resp['type'] == 'stoploss'
|
||||||
|
|
||||||
|
default_conf['dry_run'] = True
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='okx')
|
||||||
|
dro_mock = mocker.patch(f"{EXMS}.fetch_dry_run_order", MagicMock(return_value={'id': '123455'}))
|
||||||
|
|
||||||
|
api_mock.fetch_order.reset_mock()
|
||||||
|
api_mock.fetch_open_orders.reset_mock()
|
||||||
|
api_mock.fetch_closed_orders.reset_mock()
|
||||||
|
api_mock.fetch_canceled_orders.reset_mock()
|
||||||
|
resp = exchange.fetch_stoploss_order('1234', 'ETH/BTC')
|
||||||
|
|
||||||
|
assert api_mock.fetch_order.call_count == 0
|
||||||
|
assert api_mock.fetch_open_orders.call_count == 0
|
||||||
|
assert api_mock.fetch_closed_orders.call_count == 0
|
||||||
|
assert api_mock.fetch_canceled_orders.call_count == 0
|
||||||
|
assert dro_mock.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('sl1,sl2,sl3,side', [
|
||||||
|
(1501, 1499, 1501, "sell"),
|
||||||
|
(1499, 1501, 1499, "buy")
|
||||||
|
])
|
||||||
|
def test_stoploss_adjust_okx(mocker, default_conf, sl1, sl2, sl3, side):
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, id='okx')
|
||||||
|
order = {
|
||||||
|
'type': 'stoploss',
|
||||||
|
'price': 1500,
|
||||||
|
'stopLossPrice': 1500,
|
||||||
|
}
|
||||||
|
assert exchange.stoploss_adjust(sl1, order, side=side)
|
||||||
|
assert not exchange.stoploss_adjust(sl2, order, side=side)
|
||||||
|
@ -5,7 +5,7 @@ from unittest.mock import MagicMock
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from freqtrade.data.history import get_timerange
|
from freqtrade.data.history import get_timerange
|
||||||
from freqtrade.enums import ExitType
|
from freqtrade.enums import ExitType, TradingMode
|
||||||
from freqtrade.optimize.backtesting import Backtesting
|
from freqtrade.optimize.backtesting import Backtesting
|
||||||
from freqtrade.persistence.trade_model import LocalTrade
|
from freqtrade.persistence.trade_model import LocalTrade
|
||||||
from tests.conftest import EXMS, patch_exchange
|
from tests.conftest import EXMS, patch_exchange
|
||||||
@ -925,12 +925,14 @@ def test_backtest_results(default_conf, fee, mocker, caplog, data: BTContainer)
|
|||||||
mocker.patch(f"{EXMS}.get_min_pair_stake_amount", return_value=0.00001)
|
mocker.patch(f"{EXMS}.get_min_pair_stake_amount", return_value=0.00001)
|
||||||
mocker.patch(f"{EXMS}.get_max_pair_stake_amount", return_value=float('inf'))
|
mocker.patch(f"{EXMS}.get_max_pair_stake_amount", return_value=float('inf'))
|
||||||
mocker.patch(f"{EXMS}.get_max_leverage", return_value=100)
|
mocker.patch(f"{EXMS}.get_max_leverage", return_value=100)
|
||||||
|
mocker.patch(f"{EXMS}.calculate_funding_fees", return_value=0)
|
||||||
patch_exchange(mocker)
|
patch_exchange(mocker)
|
||||||
frame = _build_backtest_dataframe(data.data)
|
frame = _build_backtest_dataframe(data.data)
|
||||||
backtesting = Backtesting(default_conf)
|
backtesting = Backtesting(default_conf)
|
||||||
# TODO: Should we initialize this properly??
|
# TODO: Should we initialize this properly??
|
||||||
backtesting._can_short = True
|
backtesting.trading_mode = TradingMode.MARGIN
|
||||||
backtesting._set_strategy(backtesting.strategylist[0])
|
backtesting._set_strategy(backtesting.strategylist[0])
|
||||||
|
backtesting._can_short = True
|
||||||
backtesting.required_startup = 0
|
backtesting.required_startup = 0
|
||||||
backtesting.strategy.advise_entry = lambda a, m: frame
|
backtesting.strategy.advise_entry = lambda a, m: frame
|
||||||
backtesting.strategy.advise_exit = lambda a, m: frame
|
backtesting.strategy.advise_exit = lambda a, m: frame
|
||||||
|
@ -344,7 +344,7 @@ def test_backtest_abort(default_conf, mocker, testdatadir) -> None:
|
|||||||
assert backtesting.progress.progress == 0
|
assert backtesting.progress.progress == 0
|
||||||
|
|
||||||
|
|
||||||
def test_backtesting_start(default_conf, mocker, testdatadir, caplog) -> None:
|
def test_backtesting_start(default_conf, mocker, caplog) -> None:
|
||||||
def get_timerange(input1):
|
def get_timerange(input1):
|
||||||
return Arrow(2017, 11, 14, 21, 17), Arrow(2017, 11, 14, 22, 59)
|
return Arrow(2017, 11, 14, 21, 17), Arrow(2017, 11, 14, 22, 59)
|
||||||
|
|
||||||
@ -367,6 +367,7 @@ def test_backtesting_start(default_conf, mocker, testdatadir, caplog) -> None:
|
|||||||
backtesting = Backtesting(default_conf)
|
backtesting = Backtesting(default_conf)
|
||||||
backtesting._set_strategy(backtesting.strategylist[0])
|
backtesting._set_strategy(backtesting.strategylist[0])
|
||||||
backtesting.strategy.bot_loop_start = MagicMock()
|
backtesting.strategy.bot_loop_start = MagicMock()
|
||||||
|
backtesting.strategy.bot_start = MagicMock()
|
||||||
backtesting.start()
|
backtesting.start()
|
||||||
# check the logs, that will contain the backtest result
|
# check the logs, that will contain the backtest result
|
||||||
exists = [
|
exists = [
|
||||||
@ -376,7 +377,8 @@ def test_backtesting_start(default_conf, mocker, testdatadir, caplog) -> None:
|
|||||||
for line in exists:
|
for line in exists:
|
||||||
assert log_has(line, caplog)
|
assert log_has(line, caplog)
|
||||||
assert backtesting.strategy.dp._pairlists is not None
|
assert backtesting.strategy.dp._pairlists is not None
|
||||||
assert backtesting.strategy.bot_loop_start.call_count == 1
|
assert backtesting.strategy.bot_start.call_count == 1
|
||||||
|
assert backtesting.strategy.bot_loop_start.call_count == 0
|
||||||
assert sbs.call_count == 1
|
assert sbs.call_count == 1
|
||||||
assert sbc.call_count == 1
|
assert sbc.call_count == 1
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ from arrow import Arrow
|
|||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.data import history
|
from freqtrade.data import history
|
||||||
from freqtrade.data.history import get_timerange
|
from freqtrade.data.history import get_timerange
|
||||||
from freqtrade.enums import ExitType
|
from freqtrade.enums import ExitType, TradingMode
|
||||||
from freqtrade.optimize.backtesting import Backtesting
|
from freqtrade.optimize.backtesting import Backtesting
|
||||||
from tests.conftest import EXMS, patch_exchange
|
from tests.conftest import EXMS, patch_exchange
|
||||||
|
|
||||||
@ -108,9 +108,10 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
|||||||
default_conf.update({
|
default_conf.update({
|
||||||
"stake_amount": 100.0,
|
"stake_amount": 100.0,
|
||||||
"dry_run_wallet": 1000.0,
|
"dry_run_wallet": 1000.0,
|
||||||
"strategy": "StrategyTestV3"
|
"strategy": "StrategyTestV3",
|
||||||
})
|
})
|
||||||
backtesting = Backtesting(default_conf)
|
backtesting = Backtesting(default_conf)
|
||||||
|
backtesting.trading_mode = TradingMode.FUTURES
|
||||||
backtesting._can_short = True
|
backtesting._can_short = True
|
||||||
backtesting._set_strategy(backtesting.strategylist[0])
|
backtesting._set_strategy(backtesting.strategylist[0])
|
||||||
pair = 'XRP/USDT'
|
pair = 'XRP/USDT'
|
||||||
|
@ -872,7 +872,8 @@ def test_in_strategy_auto_hyperopt(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
|||||||
hyperopt.backtesting.exchange.get_max_leverage = MagicMock(return_value=1.0)
|
hyperopt.backtesting.exchange.get_max_leverage = MagicMock(return_value=1.0)
|
||||||
assert isinstance(hyperopt.custom_hyperopt, HyperOptAuto)
|
assert isinstance(hyperopt.custom_hyperopt, HyperOptAuto)
|
||||||
assert isinstance(hyperopt.backtesting.strategy.buy_rsi, IntParameter)
|
assert isinstance(hyperopt.backtesting.strategy.buy_rsi, IntParameter)
|
||||||
assert hyperopt.backtesting.strategy.bot_loop_started is True
|
assert hyperopt.backtesting.strategy.bot_started is True
|
||||||
|
assert hyperopt.backtesting.strategy.bot_loop_started is False
|
||||||
|
|
||||||
assert hyperopt.backtesting.strategy.buy_rsi.in_space is True
|
assert hyperopt.backtesting.strategy.buy_rsi.in_space is True
|
||||||
assert hyperopt.backtesting.strategy.buy_rsi.value == 35
|
assert hyperopt.backtesting.strategy.buy_rsi.value == 35
|
||||||
@ -922,7 +923,8 @@ def test_in_strategy_auto_hyperopt_with_parallel(mocker, hyperopt_conf, tmpdir,
|
|||||||
|
|
||||||
assert isinstance(hyperopt.custom_hyperopt, HyperOptAuto)
|
assert isinstance(hyperopt.custom_hyperopt, HyperOptAuto)
|
||||||
assert isinstance(hyperopt.backtesting.strategy.buy_rsi, IntParameter)
|
assert isinstance(hyperopt.backtesting.strategy.buy_rsi, IntParameter)
|
||||||
assert hyperopt.backtesting.strategy.bot_loop_started is True
|
assert hyperopt.backtesting.strategy.bot_started is True
|
||||||
|
assert hyperopt.backtesting.strategy.bot_loop_started is False
|
||||||
|
|
||||||
assert hyperopt.backtesting.strategy.buy_rsi.in_space is True
|
assert hyperopt.backtesting.strategy.buy_rsi.in_space is True
|
||||||
assert hyperopt.backtesting.strategy.buy_rsi.value == 35
|
assert hyperopt.backtesting.strategy.buy_rsi.value == 35
|
||||||
@ -959,7 +961,8 @@ def test_in_strategy_auto_hyperopt_per_epoch(mocker, hyperopt_conf, tmpdir, fee)
|
|||||||
hyperopt.backtesting.exchange.get_max_leverage = MagicMock(return_value=1.0)
|
hyperopt.backtesting.exchange.get_max_leverage = MagicMock(return_value=1.0)
|
||||||
assert isinstance(hyperopt.custom_hyperopt, HyperOptAuto)
|
assert isinstance(hyperopt.custom_hyperopt, HyperOptAuto)
|
||||||
assert isinstance(hyperopt.backtesting.strategy.buy_rsi, IntParameter)
|
assert isinstance(hyperopt.backtesting.strategy.buy_rsi, IntParameter)
|
||||||
assert hyperopt.backtesting.strategy.bot_loop_started is True
|
assert hyperopt.backtesting.strategy.bot_loop_started is False
|
||||||
|
assert hyperopt.backtesting.strategy.bot_started is True
|
||||||
|
|
||||||
assert hyperopt.backtesting.strategy.buy_rsi.in_space is True
|
assert hyperopt.backtesting.strategy.buy_rsi.in_space is True
|
||||||
assert hyperopt.backtesting.strategy.buy_rsi.value == 35
|
assert hyperopt.backtesting.strategy.buy_rsi.value == 35
|
||||||
|
@ -236,7 +236,7 @@ def test_store_backtest_candles(testdatadir, mocker):
|
|||||||
|
|
||||||
assert dump_mock.call_count == 1
|
assert dump_mock.call_count == 1
|
||||||
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||||
assert str(dump_mock.call_args_list[0][0][0]).endswith(str('_signals.pkl'))
|
assert str(dump_mock.call_args_list[0][0][0]).endswith('_signals.pkl')
|
||||||
|
|
||||||
dump_mock.reset_mock()
|
dump_mock.reset_mock()
|
||||||
# mock file exporting
|
# mock file exporting
|
||||||
@ -245,7 +245,7 @@ def test_store_backtest_candles(testdatadir, mocker):
|
|||||||
assert dump_mock.call_count == 1
|
assert dump_mock.call_count == 1
|
||||||
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||||
# result will be testdatadir / testresult-<timestamp>_signals.pkl
|
# result will be testdatadir / testresult-<timestamp>_signals.pkl
|
||||||
assert str(dump_mock.call_args_list[0][0][0]).endswith(str('_signals.pkl'))
|
assert str(dump_mock.call_args_list[0][0][0]).endswith('_signals.pkl')
|
||||||
dump_mock.reset_mock()
|
dump_mock.reset_mock()
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ from pathlib import Path
|
|||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from sqlalchemy import create_engine, text
|
from sqlalchemy import create_engine, select, text
|
||||||
|
|
||||||
from freqtrade.constants import DEFAULT_DB_PROD_URL
|
from freqtrade.constants import DEFAULT_DB_PROD_URL
|
||||||
from freqtrade.enums import TradingMode
|
from freqtrade.enums import TradingMode
|
||||||
@ -21,8 +21,8 @@ spot, margin, futures = TradingMode.SPOT, TradingMode.MARGIN, TradingMode.FUTURE
|
|||||||
def test_init_create_session(default_conf):
|
def test_init_create_session(default_conf):
|
||||||
# Check if init create a session
|
# Check if init create a session
|
||||||
init_db(default_conf['db_url'])
|
init_db(default_conf['db_url'])
|
||||||
assert hasattr(Trade, '_session')
|
assert hasattr(Trade, 'session')
|
||||||
assert 'scoped_session' in type(Trade._session).__name__
|
assert 'scoped_session' in type(Trade.session).__name__
|
||||||
|
|
||||||
|
|
||||||
def test_init_custom_db_url(default_conf, tmpdir):
|
def test_init_custom_db_url(default_conf, tmpdir):
|
||||||
@ -34,7 +34,7 @@ def test_init_custom_db_url(default_conf, tmpdir):
|
|||||||
|
|
||||||
init_db(default_conf['db_url'])
|
init_db(default_conf['db_url'])
|
||||||
assert Path(filename).is_file()
|
assert Path(filename).is_file()
|
||||||
r = Trade._session.execute(text("PRAGMA journal_mode"))
|
r = Trade.session.execute(text("PRAGMA journal_mode"))
|
||||||
assert r.first() == ('wal',)
|
assert r.first() == ('wal',)
|
||||||
|
|
||||||
|
|
||||||
@ -235,8 +235,9 @@ def test_migrate_new(mocker, default_conf, fee, caplog):
|
|||||||
# Run init to test migration
|
# Run init to test migration
|
||||||
init_db(default_conf['db_url'])
|
init_db(default_conf['db_url'])
|
||||||
|
|
||||||
assert len(Trade.query.filter(Trade.id == 1).all()) == 1
|
trades = Trade.session.scalars(select(Trade).filter(Trade.id == 1)).all()
|
||||||
trade = Trade.query.filter(Trade.id == 1).first()
|
assert len(trades) == 1
|
||||||
|
trade = trades[0]
|
||||||
assert trade.fee_open == fee.return_value
|
assert trade.fee_open == fee.return_value
|
||||||
assert trade.fee_close == fee.return_value
|
assert trade.fee_close == fee.return_value
|
||||||
assert trade.open_rate_requested is None
|
assert trade.open_rate_requested is None
|
||||||
@ -404,9 +405,9 @@ def test_migrate_pairlocks(mocker, default_conf, fee, caplog):
|
|||||||
|
|
||||||
init_db(default_conf['db_url'])
|
init_db(default_conf['db_url'])
|
||||||
|
|
||||||
assert len(PairLock.query.all()) == 2
|
assert len(PairLock.get_all_locks().all()) == 2
|
||||||
assert len(PairLock.query.filter(PairLock.pair == '*').all()) == 1
|
assert len(PairLock.session.scalars(select(PairLock).filter(PairLock.pair == '*')).all()) == 1
|
||||||
pairlocks = PairLock.query.filter(PairLock.pair == 'ETH/BTC').all()
|
pairlocks = PairLock.session.scalars(select(PairLock).filter(PairLock.pair == 'ETH/BTC')).all()
|
||||||
assert len(pairlocks) == 1
|
assert len(pairlocks) == 1
|
||||||
pairlocks[0].pair == 'ETH/BTC'
|
pairlocks[0].pair == 'ETH/BTC'
|
||||||
pairlocks[0].side == '*'
|
pairlocks[0].side == '*'
|
||||||
|
@ -4,6 +4,7 @@ from types import FunctionType
|
|||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import pytest
|
import pytest
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||||
from freqtrade.enums import TradingMode
|
from freqtrade.enums import TradingMode
|
||||||
@ -1329,12 +1330,16 @@ def test_to_json(fee):
|
|||||||
open_rate=0.123,
|
open_rate=0.123,
|
||||||
exchange='binance',
|
exchange='binance',
|
||||||
enter_tag=None,
|
enter_tag=None,
|
||||||
open_order_id='dry_run_buy_12345'
|
open_order_id='dry_run_buy_12345',
|
||||||
|
precision_mode=1,
|
||||||
|
amount_precision=8.0,
|
||||||
|
price_precision=7.0,
|
||||||
)
|
)
|
||||||
result = trade.to_json()
|
result = trade.to_json()
|
||||||
assert isinstance(result, dict)
|
assert isinstance(result, dict)
|
||||||
|
|
||||||
assert result == {'trade_id': None,
|
assert result == {
|
||||||
|
'trade_id': None,
|
||||||
'pair': 'ADA/USDT',
|
'pair': 'ADA/USDT',
|
||||||
'base_currency': 'ADA',
|
'base_currency': 'ADA',
|
||||||
'quote_currency': 'USDT',
|
'quote_currency': 'USDT',
|
||||||
@ -1392,6 +1397,9 @@ def test_to_json(fee):
|
|||||||
'is_short': None,
|
'is_short': None,
|
||||||
'trading_mode': None,
|
'trading_mode': None,
|
||||||
'funding_fees': None,
|
'funding_fees': None,
|
||||||
|
'amount_precision': 8.0,
|
||||||
|
'price_precision': 7.0,
|
||||||
|
'precision_mode': 1,
|
||||||
'orders': [],
|
'orders': [],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1409,11 +1417,15 @@ def test_to_json(fee):
|
|||||||
close_rate=0.125,
|
close_rate=0.125,
|
||||||
enter_tag='buys_signal_001',
|
enter_tag='buys_signal_001',
|
||||||
exchange='binance',
|
exchange='binance',
|
||||||
|
precision_mode=2,
|
||||||
|
amount_precision=7.0,
|
||||||
|
price_precision=8.0,
|
||||||
)
|
)
|
||||||
result = trade.to_json()
|
result = trade.to_json()
|
||||||
assert isinstance(result, dict)
|
assert isinstance(result, dict)
|
||||||
|
|
||||||
assert result == {'trade_id': None,
|
assert result == {
|
||||||
|
'trade_id': None,
|
||||||
'pair': 'XRP/BTC',
|
'pair': 'XRP/BTC',
|
||||||
'base_currency': 'XRP',
|
'base_currency': 'XRP',
|
||||||
'quote_currency': 'BTC',
|
'quote_currency': 'BTC',
|
||||||
@ -1471,6 +1483,9 @@ def test_to_json(fee):
|
|||||||
'is_short': None,
|
'is_short': None,
|
||||||
'trading_mode': None,
|
'trading_mode': None,
|
||||||
'funding_fees': None,
|
'funding_fees': None,
|
||||||
|
'amount_precision': 7.0,
|
||||||
|
'price_precision': 8.0,
|
||||||
|
'precision_mode': 2,
|
||||||
'orders': [],
|
'orders': [],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1494,7 +1509,7 @@ def test_stoploss_reinitialization(default_conf, fee):
|
|||||||
assert trade.stop_loss_pct == -0.05
|
assert trade.stop_loss_pct == -0.05
|
||||||
assert trade.initial_stop_loss == 0.95
|
assert trade.initial_stop_loss == 0.95
|
||||||
assert trade.initial_stop_loss_pct == -0.05
|
assert trade.initial_stop_loss_pct == -0.05
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
|
|
||||||
# Lower stoploss
|
# Lower stoploss
|
||||||
@ -1556,7 +1571,7 @@ def test_stoploss_reinitialization_leverage(default_conf, fee):
|
|||||||
assert trade.stop_loss_pct == -0.1
|
assert trade.stop_loss_pct == -0.1
|
||||||
assert trade.initial_stop_loss == 0.98
|
assert trade.initial_stop_loss == 0.98
|
||||||
assert trade.initial_stop_loss_pct == -0.1
|
assert trade.initial_stop_loss_pct == -0.1
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
|
|
||||||
# Lower stoploss
|
# Lower stoploss
|
||||||
@ -1618,7 +1633,7 @@ def test_stoploss_reinitialization_short(default_conf, fee):
|
|||||||
assert trade.stop_loss_pct == -0.1
|
assert trade.stop_loss_pct == -0.1
|
||||||
assert trade.initial_stop_loss == 1.02
|
assert trade.initial_stop_loss == 1.02
|
||||||
assert trade.initial_stop_loss_pct == -0.1
|
assert trade.initial_stop_loss_pct == -0.1
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
# Lower stoploss
|
# Lower stoploss
|
||||||
Trade.stoploss_reinitialization(-0.15)
|
Trade.stoploss_reinitialization(-0.15)
|
||||||
@ -1793,17 +1808,17 @@ def test_get_trades_proxy(fee, use_db, is_short):
|
|||||||
@pytest.mark.usefixtures("init_persistence")
|
@pytest.mark.usefixtures("init_persistence")
|
||||||
@pytest.mark.parametrize('is_short', [True, False])
|
@pytest.mark.parametrize('is_short', [True, False])
|
||||||
def test_get_trades__query(fee, is_short):
|
def test_get_trades__query(fee, is_short):
|
||||||
query = Trade.get_trades([])
|
query = Trade.get_trades_query([])
|
||||||
# without orders there should be no join issued.
|
# without orders there should be no join issued.
|
||||||
query1 = Trade.get_trades([], include_orders=False)
|
query1 = Trade.get_trades_query([], include_orders=False)
|
||||||
|
|
||||||
# Empty "with-options -> default - selectin"
|
# Empty "with-options -> default - selectin"
|
||||||
assert query._with_options == ()
|
assert query._with_options == ()
|
||||||
assert query1._with_options != ()
|
assert query1._with_options != ()
|
||||||
|
|
||||||
create_mock_trades(fee, is_short)
|
create_mock_trades(fee, is_short)
|
||||||
query = Trade.get_trades([])
|
query = Trade.get_trades_query([])
|
||||||
query1 = Trade.get_trades([], include_orders=False)
|
query1 = Trade.get_trades_query([], include_orders=False)
|
||||||
|
|
||||||
assert query._with_options == ()
|
assert query._with_options == ()
|
||||||
assert query1._with_options != ()
|
assert query1._with_options != ()
|
||||||
@ -2016,6 +2031,7 @@ def test_Trade_object_idem():
|
|||||||
'get_open_trades_without_assigned_fees',
|
'get_open_trades_without_assigned_fees',
|
||||||
'get_open_order_trades',
|
'get_open_order_trades',
|
||||||
'get_trades',
|
'get_trades',
|
||||||
|
'get_trades_query',
|
||||||
'get_exit_reason_performance',
|
'get_exit_reason_performance',
|
||||||
'get_enter_tag_performance',
|
'get_enter_tag_performance',
|
||||||
'get_mix_tag_performance',
|
'get_mix_tag_performance',
|
||||||
@ -2443,8 +2459,8 @@ def test_order_to_ccxt(limit_buy_order_open):
|
|||||||
|
|
||||||
order = Order.parse_from_ccxt_object(limit_buy_order_open, 'mocked', 'buy')
|
order = Order.parse_from_ccxt_object(limit_buy_order_open, 'mocked', 'buy')
|
||||||
order.ft_trade_id = 1
|
order.ft_trade_id = 1
|
||||||
order.query.session.add(order)
|
order.session.add(order)
|
||||||
Order.query.session.commit()
|
Order.session.commit()
|
||||||
|
|
||||||
order_resp = Order.order_by_id(limit_buy_order_open['id'])
|
order_resp = Order.order_by_id(limit_buy_order_open['id'])
|
||||||
assert order_resp
|
assert order_resp
|
||||||
@ -2546,7 +2562,7 @@ def test_recalc_trade_from_orders_dca(data) -> None:
|
|||||||
leverage=1.0,
|
leverage=1.0,
|
||||||
trading_mode=TradingMode.SPOT
|
trading_mode=TradingMode.SPOT
|
||||||
)
|
)
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
|
|
||||||
for idx, (order, result) in enumerate(data['orders']):
|
for idx, (order, result) in enumerate(data['orders']):
|
||||||
amount = order[1]
|
amount = order[1]
|
||||||
@ -2575,11 +2591,11 @@ def test_recalc_trade_from_orders_dca(data) -> None:
|
|||||||
trade.recalc_trade_from_orders()
|
trade.recalc_trade_from_orders()
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
|
|
||||||
orders1 = Order.query.all()
|
orders1 = Order.session.scalars(select(Order)).all()
|
||||||
assert orders1
|
assert orders1
|
||||||
assert len(orders1) == idx + 1
|
assert len(orders1) == idx + 1
|
||||||
|
|
||||||
trade = Trade.query.first()
|
trade = Trade.session.scalars(select(Trade)).first()
|
||||||
assert trade
|
assert trade
|
||||||
assert len(trade.orders) == idx + 1
|
assert len(trade.orders) == idx + 1
|
||||||
if idx < len(data) - 1:
|
if idx < len(data) - 1:
|
||||||
@ -2596,6 +2612,6 @@ def test_recalc_trade_from_orders_dca(data) -> None:
|
|||||||
assert pytest.approx(trade.close_profit_abs) == data['end_profit']
|
assert pytest.approx(trade.close_profit_abs) == data['end_profit']
|
||||||
assert pytest.approx(trade.close_profit) == data['end_profit_ratio']
|
assert pytest.approx(trade.close_profit) == data['end_profit_ratio']
|
||||||
assert not trade.is_open
|
assert not trade.is_open
|
||||||
trade = Trade.query.first()
|
trade = Trade.session.scalars(select(Trade)).first()
|
||||||
assert trade
|
assert trade
|
||||||
assert trade.open_order_id is None
|
assert trade.open_order_id is None
|
||||||
|
@ -50,8 +50,8 @@ def test_trade_fromjson():
|
|||||||
"stop_loss_ratio": -0.216,
|
"stop_loss_ratio": -0.216,
|
||||||
"stop_loss_pct": -21.6,
|
"stop_loss_pct": -21.6,
|
||||||
"stoploss_order_id": null,
|
"stoploss_order_id": null,
|
||||||
"stoploss_last_update": null,
|
"stoploss_last_update": "2022-10-18 09:13:42",
|
||||||
"stoploss_last_update_timestamp": null,
|
"stoploss_last_update_timestamp": 1666077222000,
|
||||||
"initial_stop_loss_abs": 0.1981,
|
"initial_stop_loss_abs": 0.1981,
|
||||||
"initial_stop_loss_ratio": -0.216,
|
"initial_stop_loss_ratio": -0.216,
|
||||||
"initial_stop_loss_pct": -21.6,
|
"initial_stop_loss_pct": -21.6,
|
||||||
|
@ -711,8 +711,8 @@ def test_PrecisionFilter_error(mocker, whitelist_conf) -> None:
|
|||||||
|
|
||||||
def test_PerformanceFilter_error(mocker, whitelist_conf, caplog) -> None:
|
def test_PerformanceFilter_error(mocker, whitelist_conf, caplog) -> None:
|
||||||
whitelist_conf['pairlists'] = [{"method": "StaticPairList"}, {"method": "PerformanceFilter"}]
|
whitelist_conf['pairlists'] = [{"method": "StaticPairList"}, {"method": "PerformanceFilter"}]
|
||||||
if hasattr(Trade, 'query'):
|
if hasattr(Trade, 'session'):
|
||||||
del Trade.query
|
del Trade.session
|
||||||
mocker.patch(f'{EXMS}.exchange_has', MagicMock(return_value=True))
|
mocker.patch(f'{EXMS}.exchange_has', MagicMock(return_value=True))
|
||||||
exchange = get_patched_exchange(mocker, whitelist_conf)
|
exchange = get_patched_exchange(mocker, whitelist_conf)
|
||||||
pm = PairListManager(exchange, whitelist_conf, MagicMock())
|
pm = PairListManager(exchange, whitelist_conf, MagicMock())
|
||||||
|
@ -14,7 +14,7 @@ def test_PairLocks(use_db):
|
|||||||
PairLocks.use_db = use_db
|
PairLocks.use_db = use_db
|
||||||
# No lock should be present
|
# No lock should be present
|
||||||
if use_db:
|
if use_db:
|
||||||
assert len(PairLock.query.all()) == 0
|
assert len(PairLock.get_all_locks().all()) == 0
|
||||||
|
|
||||||
assert PairLocks.use_db == use_db
|
assert PairLocks.use_db == use_db
|
||||||
|
|
||||||
@ -88,13 +88,13 @@ def test_PairLocks(use_db):
|
|||||||
|
|
||||||
if use_db:
|
if use_db:
|
||||||
locks = PairLocks.get_all_locks()
|
locks = PairLocks.get_all_locks()
|
||||||
locks_db = PairLock.query.all()
|
locks_db = PairLock.get_all_locks().all()
|
||||||
assert len(locks) == len(locks_db)
|
assert len(locks) == len(locks_db)
|
||||||
assert len(locks_db) > 0
|
assert len(locks_db) > 0
|
||||||
else:
|
else:
|
||||||
# Nothing was pushed to the database
|
# Nothing was pushed to the database
|
||||||
assert len(PairLocks.get_all_locks()) > 0
|
assert len(PairLocks.get_all_locks()) > 0
|
||||||
assert len(PairLock.query.all()) == 0
|
assert len(PairLock.get_all_locks().all()) == 0
|
||||||
# Reset use-db variable
|
# Reset use-db variable
|
||||||
PairLocks.reset_locks()
|
PairLocks.reset_locks()
|
||||||
PairLocks.use_db = True
|
PairLocks.use_db = True
|
||||||
@ -107,7 +107,7 @@ def test_PairLocks_getlongestlock(use_db):
|
|||||||
# No lock should be present
|
# No lock should be present
|
||||||
PairLocks.use_db = use_db
|
PairLocks.use_db = use_db
|
||||||
if use_db:
|
if use_db:
|
||||||
assert len(PairLock.query.all()) == 0
|
assert len(PairLock.get_all_locks().all()) == 0
|
||||||
|
|
||||||
assert PairLocks.use_db == use_db
|
assert PairLocks.use_db == use_db
|
||||||
|
|
||||||
@ -139,7 +139,7 @@ def test_PairLocks_reason(use_db):
|
|||||||
PairLocks.use_db = use_db
|
PairLocks.use_db = use_db
|
||||||
# No lock should be present
|
# No lock should be present
|
||||||
if use_db:
|
if use_db:
|
||||||
assert len(PairLock.query.all()) == 0
|
assert len(PairLock.get_all_locks().all()) == 0
|
||||||
|
|
||||||
assert PairLocks.use_db == use_db
|
assert PairLocks.use_db == use_db
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ def generate_mock_trade(pair: str, fee: float, is_open: bool,
|
|||||||
trade.close(close_price)
|
trade.close(close_price)
|
||||||
trade.exit_reason = exit_reason
|
trade.exit_reason = exit_reason
|
||||||
|
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
return trade
|
return trade
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ from unittest.mock import ANY, MagicMock, PropertyMock
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from numpy import isnan
|
from numpy import isnan
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
from freqtrade.edge import PairInfo
|
from freqtrade.edge import PairInfo
|
||||||
from freqtrade.enums import SignalDirection, State, TradingMode
|
from freqtrade.enums import SignalDirection, State, TradingMode
|
||||||
@ -87,6 +88,9 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
|
|||||||
'is_short': False,
|
'is_short': False,
|
||||||
'funding_fees': 0.0,
|
'funding_fees': 0.0,
|
||||||
'trading_mode': TradingMode.SPOT,
|
'trading_mode': TradingMode.SPOT,
|
||||||
|
'amount_precision': 8.0,
|
||||||
|
'price_precision': 8.0,
|
||||||
|
'precision_mode': 2,
|
||||||
'orders': [{
|
'orders': [{
|
||||||
'amount': 91.07468123, 'average': 1.098e-05, 'safe_price': 1.098e-05,
|
'amount': 91.07468123, 'average': 1.098e-05, 'safe_price': 1.098e-05,
|
||||||
'cost': 0.0009999999999054, 'filled': 91.07468123, 'ft_order_side': 'buy',
|
'cost': 0.0009999999999054, 'filled': 91.07468123, 'ft_order_side': 'buy',
|
||||||
@ -124,17 +128,6 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
|
|||||||
'profit_pct': 0.0,
|
'profit_pct': 0.0,
|
||||||
'profit_abs': 0.0,
|
'profit_abs': 0.0,
|
||||||
'total_profit_abs': 0.0,
|
'total_profit_abs': 0.0,
|
||||||
'stop_loss_abs': 0.0,
|
|
||||||
'stop_loss_pct': None,
|
|
||||||
'stop_loss_ratio': None,
|
|
||||||
'stoploss_current_dist': -1.099e-05,
|
|
||||||
'stoploss_current_dist_ratio': -1.0,
|
|
||||||
'stoploss_current_dist_pct': pytest.approx(-100.0),
|
|
||||||
'stoploss_entry_dist': -0.0010025,
|
|
||||||
'stoploss_entry_dist_ratio': -1.0,
|
|
||||||
'initial_stop_loss_abs': 0.0,
|
|
||||||
'initial_stop_loss_pct': None,
|
|
||||||
'initial_stop_loss_ratio': None,
|
|
||||||
'open_order': '(limit buy rem=91.07468123)',
|
'open_order': '(limit buy rem=91.07468123)',
|
||||||
})
|
})
|
||||||
response_unfilled['orders'][0].update({
|
response_unfilled['orders'][0].update({
|
||||||
@ -354,7 +347,7 @@ def test_rpc_delete_trade(mocker, default_conf, fee, markets, caplog, is_short):
|
|||||||
with pytest.raises(RPCException, match='invalid argument'):
|
with pytest.raises(RPCException, match='invalid argument'):
|
||||||
rpc._rpc_delete('200')
|
rpc._rpc_delete('200')
|
||||||
|
|
||||||
trades = Trade.query.all()
|
trades = Trade.session.scalars(select(Trade)).all()
|
||||||
trades[1].stoploss_order_id = '1234'
|
trades[1].stoploss_order_id = '1234'
|
||||||
trades[2].stoploss_order_id = '1234'
|
trades[2].stoploss_order_id = '1234'
|
||||||
assert len(trades) > 2
|
assert len(trades) > 2
|
||||||
@ -717,7 +710,7 @@ def test_rpc_force_exit(default_conf, ticker, fee, mocker) -> None:
|
|||||||
mocker.patch(f'{EXMS}._dry_is_price_crossed', MagicMock(return_value=False))
|
mocker.patch(f'{EXMS}._dry_is_price_crossed', MagicMock(return_value=False))
|
||||||
freqtradebot.enter_positions()
|
freqtradebot.enter_positions()
|
||||||
# make an limit-buy open trade
|
# make an limit-buy open trade
|
||||||
trade = Trade.query.filter(Trade.id == '3').first()
|
trade = Trade.session.scalars(select(Trade).filter(Trade.id == '3')).first()
|
||||||
filled_amount = trade.amount / 2
|
filled_amount = trade.amount / 2
|
||||||
# Fetch order - it's open first, and closed after cancel_order is called.
|
# Fetch order - it's open first, and closed after cancel_order is called.
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@ -753,7 +746,7 @@ def test_rpc_force_exit(default_conf, ticker, fee, mocker) -> None:
|
|||||||
|
|
||||||
freqtradebot.config['max_open_trades'] = 3
|
freqtradebot.config['max_open_trades'] = 3
|
||||||
freqtradebot.enter_positions()
|
freqtradebot.enter_positions()
|
||||||
trade = Trade.query.filter(Trade.id == '2').first()
|
trade = Trade.session.scalars(select(Trade).filter(Trade.id == '2')).first()
|
||||||
amount = trade.amount
|
amount = trade.amount
|
||||||
# make an limit-buy open trade, if there is no 'filled', don't sell it
|
# make an limit-buy open trade, if there is no 'filled', don't sell it
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@ -771,7 +764,7 @@ def test_rpc_force_exit(default_conf, ticker, fee, mocker) -> None:
|
|||||||
assert cancel_order_mock.call_count == 2
|
assert cancel_order_mock.call_count == 2
|
||||||
assert trade.amount == amount
|
assert trade.amount == amount
|
||||||
|
|
||||||
trade = Trade.query.filter(Trade.id == '3').first()
|
trade = Trade.session.scalars(select(Trade).filter(Trade.id == '3')).first()
|
||||||
|
|
||||||
# make an limit-sell open trade
|
# make an limit-sell open trade
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Unit test file for rpc/api_server.py
|
Unit test file for rpc/api_server.py
|
||||||
"""
|
"""
|
||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
@ -14,6 +15,7 @@ from fastapi import FastAPI, WebSocketDisconnect
|
|||||||
from fastapi.exceptions import HTTPException
|
from fastapi.exceptions import HTTPException
|
||||||
from fastapi.testclient import TestClient
|
from fastapi.testclient import TestClient
|
||||||
from requests.auth import _basic_auth_str
|
from requests.auth import _basic_auth_str
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
from freqtrade.__init__ import __version__
|
from freqtrade.__init__ import __version__
|
||||||
from freqtrade.enums import CandleType, RunMode, State, TradingMode
|
from freqtrade.enums import CandleType, RunMode, State, TradingMode
|
||||||
@ -298,10 +300,6 @@ def test_api_UvicornServer(mocker):
|
|||||||
s = UvicornServer(uvicorn.Config(MagicMock(), port=8080, host='127.0.0.1'))
|
s = UvicornServer(uvicorn.Config(MagicMock(), port=8080, host='127.0.0.1'))
|
||||||
assert thread_mock.call_count == 0
|
assert thread_mock.call_count == 0
|
||||||
|
|
||||||
s.install_signal_handlers()
|
|
||||||
# Original implementation starts a thread - make sure that's not the case
|
|
||||||
assert thread_mock.call_count == 0
|
|
||||||
|
|
||||||
# Fake started to avoid sleeping forever
|
# Fake started to avoid sleeping forever
|
||||||
s.started = True
|
s.started = True
|
||||||
s.run_in_thread()
|
s.run_in_thread()
|
||||||
@ -317,10 +315,6 @@ def test_api_UvicornServer_run(mocker):
|
|||||||
s = UvicornServer(uvicorn.Config(MagicMock(), port=8080, host='127.0.0.1'))
|
s = UvicornServer(uvicorn.Config(MagicMock(), port=8080, host='127.0.0.1'))
|
||||||
assert serve_mock.call_count == 0
|
assert serve_mock.call_count == 0
|
||||||
|
|
||||||
s.install_signal_handlers()
|
|
||||||
# Original implementation starts a thread - make sure that's not the case
|
|
||||||
assert serve_mock.call_count == 0
|
|
||||||
|
|
||||||
# Fake started to avoid sleeping forever
|
# Fake started to avoid sleeping forever
|
||||||
s.started = True
|
s.started = True
|
||||||
s.run()
|
s.run()
|
||||||
@ -330,13 +324,10 @@ def test_api_UvicornServer_run(mocker):
|
|||||||
def test_api_UvicornServer_run_no_uvloop(mocker, import_fails):
|
def test_api_UvicornServer_run_no_uvloop(mocker, import_fails):
|
||||||
serve_mock = mocker.patch('freqtrade.rpc.api_server.uvicorn_threaded.UvicornServer.serve',
|
serve_mock = mocker.patch('freqtrade.rpc.api_server.uvicorn_threaded.UvicornServer.serve',
|
||||||
get_mock_coro(None))
|
get_mock_coro(None))
|
||||||
|
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||||
s = UvicornServer(uvicorn.Config(MagicMock(), port=8080, host='127.0.0.1'))
|
s = UvicornServer(uvicorn.Config(MagicMock(), port=8080, host='127.0.0.1'))
|
||||||
assert serve_mock.call_count == 0
|
assert serve_mock.call_count == 0
|
||||||
|
|
||||||
s.install_signal_handlers()
|
|
||||||
# Original implementation starts a thread - make sure that's not the case
|
|
||||||
assert serve_mock.call_count == 0
|
|
||||||
|
|
||||||
# Fake started to avoid sleeping forever
|
# Fake started to avoid sleeping forever
|
||||||
s.started = True
|
s.started = True
|
||||||
s.run()
|
s.run()
|
||||||
@ -624,7 +615,7 @@ def test_api_trades(botclient, mocker, fee, markets, is_short):
|
|||||||
assert rc.json()['offset'] == 0
|
assert rc.json()['offset'] == 0
|
||||||
|
|
||||||
create_mock_trades(fee, is_short=is_short)
|
create_mock_trades(fee, is_short=is_short)
|
||||||
Trade.query.session.flush()
|
Trade.session.flush()
|
||||||
|
|
||||||
rc = client_get(client, f"{BASE_URI}/trades")
|
rc = client_get(client, f"{BASE_URI}/trades")
|
||||||
assert_response(rc)
|
assert_response(rc)
|
||||||
@ -652,7 +643,7 @@ def test_api_trade_single(botclient, mocker, fee, ticker, markets, is_short):
|
|||||||
assert_response(rc, 404)
|
assert_response(rc, 404)
|
||||||
assert rc.json()['detail'] == 'Trade not found.'
|
assert rc.json()['detail'] == 'Trade not found.'
|
||||||
|
|
||||||
Trade.query.session.rollback()
|
Trade.rollback()
|
||||||
create_mock_trades(fee, is_short=is_short)
|
create_mock_trades(fee, is_short=is_short)
|
||||||
|
|
||||||
rc = client_get(client, f"{BASE_URI}/trade/3")
|
rc = client_get(client, f"{BASE_URI}/trade/3")
|
||||||
@ -677,7 +668,7 @@ def test_api_delete_trade(botclient, mocker, fee, markets, is_short):
|
|||||||
create_mock_trades(fee, is_short=is_short)
|
create_mock_trades(fee, is_short=is_short)
|
||||||
|
|
||||||
ftbot.strategy.order_types['stoploss_on_exchange'] = True
|
ftbot.strategy.order_types['stoploss_on_exchange'] = True
|
||||||
trades = Trade.query.all()
|
trades = Trade.session.scalars(select(Trade)).all()
|
||||||
trades[1].stoploss_order_id = '1234'
|
trades[1].stoploss_order_id = '1234'
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
assert len(trades) > 2
|
assert len(trades) > 2
|
||||||
@ -685,7 +676,7 @@ def test_api_delete_trade(botclient, mocker, fee, markets, is_short):
|
|||||||
rc = client_delete(client, f"{BASE_URI}/trades/1")
|
rc = client_delete(client, f"{BASE_URI}/trades/1")
|
||||||
assert_response(rc)
|
assert_response(rc)
|
||||||
assert rc.json()['result_msg'] == 'Deleted trade 1. Closed 1 open orders.'
|
assert rc.json()['result_msg'] == 'Deleted trade 1. Closed 1 open orders.'
|
||||||
assert len(trades) - 1 == len(Trade.query.all())
|
assert len(trades) - 1 == len(Trade.session.scalars(select(Trade)).all())
|
||||||
assert cancel_mock.call_count == 1
|
assert cancel_mock.call_count == 1
|
||||||
|
|
||||||
cancel_mock.reset_mock()
|
cancel_mock.reset_mock()
|
||||||
@ -694,11 +685,11 @@ def test_api_delete_trade(botclient, mocker, fee, markets, is_short):
|
|||||||
assert_response(rc, 502)
|
assert_response(rc, 502)
|
||||||
assert cancel_mock.call_count == 0
|
assert cancel_mock.call_count == 0
|
||||||
|
|
||||||
assert len(trades) - 1 == len(Trade.query.all())
|
assert len(trades) - 1 == len(Trade.session.scalars(select(Trade)).all())
|
||||||
rc = client_delete(client, f"{BASE_URI}/trades/2")
|
rc = client_delete(client, f"{BASE_URI}/trades/2")
|
||||||
assert_response(rc)
|
assert_response(rc)
|
||||||
assert rc.json()['result_msg'] == 'Deleted trade 2. Closed 2 open orders.'
|
assert rc.json()['result_msg'] == 'Deleted trade 2. Closed 2 open orders.'
|
||||||
assert len(trades) - 2 == len(Trade.query.all())
|
assert len(trades) - 2 == len(Trade.session.scalars(select(Trade)).all())
|
||||||
assert stoploss_mock.call_count == 1
|
assert stoploss_mock.call_count == 1
|
||||||
|
|
||||||
rc = client_delete(client, f"{BASE_URI}/trades/502")
|
rc = client_delete(client, f"{BASE_URI}/trades/502")
|
||||||
@ -943,7 +934,7 @@ def test_api_performance(botclient, fee):
|
|||||||
)
|
)
|
||||||
trade.close_profit = trade.calc_profit_ratio(trade.close_rate)
|
trade.close_profit = trade.calc_profit_ratio(trade.close_rate)
|
||||||
trade.close_profit_abs = trade.calc_profit(trade.close_rate)
|
trade.close_profit_abs = trade.calc_profit(trade.close_rate)
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
|
|
||||||
trade = Trade(
|
trade = Trade(
|
||||||
pair='XRP/ETH',
|
pair='XRP/ETH',
|
||||||
@ -960,7 +951,7 @@ def test_api_performance(botclient, fee):
|
|||||||
trade.close_profit = trade.calc_profit_ratio(trade.close_rate)
|
trade.close_profit = trade.calc_profit_ratio(trade.close_rate)
|
||||||
trade.close_profit_abs = trade.calc_profit(trade.close_rate)
|
trade.close_profit_abs = trade.calc_profit(trade.close_rate)
|
||||||
|
|
||||||
Trade.query.session.add(trade)
|
Trade.session.add(trade)
|
||||||
Trade.commit()
|
Trade.commit()
|
||||||
|
|
||||||
rc = client_get(client, f"{BASE_URI}/performance")
|
rc = client_get(client, f"{BASE_URI}/performance")
|
||||||
@ -1065,6 +1056,9 @@ def test_api_status(botclient, mocker, ticker, fee, markets, is_short,
|
|||||||
'liquidation_price': None,
|
'liquidation_price': None,
|
||||||
'funding_fees': None,
|
'funding_fees': None,
|
||||||
'trading_mode': ANY,
|
'trading_mode': ANY,
|
||||||
|
'amount_precision': None,
|
||||||
|
'price_precision': None,
|
||||||
|
'precision_mode': None,
|
||||||
'orders': [ANY],
|
'orders': [ANY],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1270,6 +1264,9 @@ def test_api_force_entry(botclient, mocker, fee, endpoint):
|
|||||||
'liquidation_price': None,
|
'liquidation_price': None,
|
||||||
'funding_fees': None,
|
'funding_fees': None,
|
||||||
'trading_mode': 'spot',
|
'trading_mode': 'spot',
|
||||||
|
'amount_precision': None,
|
||||||
|
'price_precision': None,
|
||||||
|
'precision_mode': None,
|
||||||
'orders': [],
|
'orders': [],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1290,7 +1287,7 @@ def test_api_forceexit(botclient, mocker, ticker, fee, markets):
|
|||||||
data={"tradeid": "1"})
|
data={"tradeid": "1"})
|
||||||
assert_response(rc, 502)
|
assert_response(rc, 502)
|
||||||
assert rc.json() == {"error": "Error querying /api/v1/forceexit: invalid argument"}
|
assert rc.json() == {"error": "Error querying /api/v1/forceexit: invalid argument"}
|
||||||
Trade.query.session.rollback()
|
Trade.rollback()
|
||||||
|
|
||||||
create_mock_trades(fee)
|
create_mock_trades(fee)
|
||||||
trade = Trade.get_trades([Trade.id == 5]).first()
|
trade = Trade.get_trades([Trade.id == 5]).first()
|
||||||
@ -1299,7 +1296,7 @@ def test_api_forceexit(botclient, mocker, ticker, fee, markets):
|
|||||||
data={"tradeid": "5", "ordertype": "market", "amount": 23})
|
data={"tradeid": "5", "ordertype": "market", "amount": 23})
|
||||||
assert_response(rc)
|
assert_response(rc)
|
||||||
assert rc.json() == {'result': 'Created sell order for trade 5.'}
|
assert rc.json() == {'result': 'Created sell order for trade 5.'}
|
||||||
Trade.query.session.rollback()
|
Trade.rollback()
|
||||||
|
|
||||||
trade = Trade.get_trades([Trade.id == 5]).first()
|
trade = Trade.get_trades([Trade.id == 5]).first()
|
||||||
assert pytest.approx(trade.amount) == 100
|
assert pytest.approx(trade.amount) == 100
|
||||||
@ -1309,7 +1306,7 @@ def test_api_forceexit(botclient, mocker, ticker, fee, markets):
|
|||||||
data={"tradeid": "5"})
|
data={"tradeid": "5"})
|
||||||
assert_response(rc)
|
assert_response(rc)
|
||||||
assert rc.json() == {'result': 'Created sell order for trade 5.'}
|
assert rc.json() == {'result': 'Created sell order for trade 5.'}
|
||||||
Trade.query.session.rollback()
|
Trade.rollback()
|
||||||
|
|
||||||
trade = Trade.get_trades([Trade.id == 5]).first()
|
trade = Trade.get_trades([Trade.id == 5]).first()
|
||||||
assert trade.is_open is False
|
assert trade.is_open is False
|
||||||
|
@ -14,6 +14,7 @@ import arrow
|
|||||||
import pytest
|
import pytest
|
||||||
import time_machine
|
import time_machine
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
from sqlalchemy import select
|
||||||
from telegram import Chat, Message, ReplyKeyboardMarkup, Update
|
from telegram import Chat, Message, ReplyKeyboardMarkup, Update
|
||||||
from telegram.error import BadRequest, NetworkError, TelegramError
|
from telegram.error import BadRequest, NetworkError, TelegramError
|
||||||
|
|
||||||
@ -302,8 +303,7 @@ def test_telegram_status_closed_trade(default_conf, update, mocker, fee) -> None
|
|||||||
telegram, _, msg_mock = get_telegram_testobject(mocker, default_conf)
|
telegram, _, msg_mock = get_telegram_testobject(mocker, default_conf)
|
||||||
|
|
||||||
create_mock_trades(fee)
|
create_mock_trades(fee)
|
||||||
trades = Trade.get_trades([Trade.is_open.is_(False)])
|
trade = Trade.get_trades([Trade.is_open.is_(False)]).first()
|
||||||
trade = trades[0]
|
|
||||||
context = MagicMock()
|
context = MagicMock()
|
||||||
context.args = [str(trade.id)]
|
context.args = [str(trade.id)]
|
||||||
telegram._status(update=update, context=context)
|
telegram._status(update=update, context=context)
|
||||||
@ -652,7 +652,7 @@ def test_monthly_handle(default_conf_usdt, update, ticker, fee, mocker, time_mac
|
|||||||
|
|
||||||
# The one-digit months should contain a zero, Eg: September 2021 = "2021-09"
|
# The one-digit months should contain a zero, Eg: September 2021 = "2021-09"
|
||||||
# Since we loaded the last 12 months, any month should appear
|
# Since we loaded the last 12 months, any month should appear
|
||||||
assert str('-09') in msg_mock.call_args_list[0][0][0]
|
assert '-09' in msg_mock.call_args_list[0][0][0]
|
||||||
|
|
||||||
# Try invalid data
|
# Try invalid data
|
||||||
msg_mock.reset_mock()
|
msg_mock.reset_mock()
|
||||||
@ -671,10 +671,11 @@ def test_monthly_handle(default_conf_usdt, update, ticker, fee, mocker, time_mac
|
|||||||
context = MagicMock()
|
context = MagicMock()
|
||||||
context.args = ["february"]
|
context.args = ["february"]
|
||||||
telegram._monthly(update=update, context=context)
|
telegram._monthly(update=update, context=context)
|
||||||
assert str('Monthly Profit over the last 6 months</b>:') in msg_mock.call_args_list[0][0][0]
|
assert 'Monthly Profit over the last 6 months</b>:' in msg_mock.call_args_list[0][0][0]
|
||||||
|
|
||||||
|
|
||||||
def test_profit_handle(default_conf_usdt, update, ticker_usdt, ticker_sell_up, fee,
|
def test_telegram_profit_handle(
|
||||||
|
default_conf_usdt, update, ticker_usdt, ticker_sell_up, fee,
|
||||||
limit_sell_order_usdt, mocker) -> None:
|
limit_sell_order_usdt, mocker) -> None:
|
||||||
mocker.patch('freqtrade.rpc.rpc.CryptoToFiatConverter._find_price', return_value=1.1)
|
mocker.patch('freqtrade.rpc.rpc.CryptoToFiatConverter._find_price', return_value=1.1)
|
||||||
mocker.patch.multiple(
|
mocker.patch.multiple(
|
||||||
@ -693,7 +694,7 @@ def test_profit_handle(default_conf_usdt, update, ticker_usdt, ticker_sell_up, f
|
|||||||
|
|
||||||
# Create some test data
|
# Create some test data
|
||||||
freqtradebot.enter_positions()
|
freqtradebot.enter_positions()
|
||||||
trade = Trade.query.first()
|
trade = Trade.session.scalars(select(Trade)).first()
|
||||||
|
|
||||||
context = MagicMock()
|
context = MagicMock()
|
||||||
# Test with invalid 2nd argument (should silently pass)
|
# Test with invalid 2nd argument (should silently pass)
|
||||||
@ -710,6 +711,7 @@ def test_profit_handle(default_conf_usdt, update, ticker_usdt, ticker_sell_up, f
|
|||||||
# Update the ticker with a market going up
|
# Update the ticker with a market going up
|
||||||
mocker.patch(f'{EXMS}.fetch_ticker', ticker_sell_up)
|
mocker.patch(f'{EXMS}.fetch_ticker', ticker_sell_up)
|
||||||
# Simulate fulfilled LIMIT_SELL order for trade
|
# Simulate fulfilled LIMIT_SELL order for trade
|
||||||
|
trade = Trade.session.scalars(select(Trade)).first()
|
||||||
oobj = Order.parse_from_ccxt_object(
|
oobj = Order.parse_from_ccxt_object(
|
||||||
limit_sell_order_usdt, limit_sell_order_usdt['symbol'], 'sell')
|
limit_sell_order_usdt, limit_sell_order_usdt['symbol'], 'sell')
|
||||||
trade.orders.append(oobj)
|
trade.orders.append(oobj)
|
||||||
@ -946,7 +948,7 @@ def test_telegram_forceexit_handle(default_conf, update, ticker, fee,
|
|||||||
# Create some test data
|
# Create some test data
|
||||||
freqtradebot.enter_positions()
|
freqtradebot.enter_positions()
|
||||||
|
|
||||||
trade = Trade.query.first()
|
trade = Trade.session.scalars(select(Trade)).first()
|
||||||
assert trade
|
assert trade
|
||||||
|
|
||||||
# Increase the price and sell it
|
# Increase the price and sell it
|
||||||
@ -1021,7 +1023,7 @@ def test_telegram_force_exit_down_handle(default_conf, update, ticker, fee,
|
|||||||
fetch_ticker=ticker_sell_down
|
fetch_ticker=ticker_sell_down
|
||||||
)
|
)
|
||||||
|
|
||||||
trade = Trade.query.first()
|
trade = Trade.session.scalars(select(Trade)).first()
|
||||||
assert trade
|
assert trade
|
||||||
|
|
||||||
# /forceexit 1
|
# /forceexit 1
|
||||||
@ -1730,14 +1732,14 @@ def test_version_handle(default_conf, update, mocker) -> None:
|
|||||||
|
|
||||||
telegram._version(update=update, context=MagicMock())
|
telegram._version(update=update, context=MagicMock())
|
||||||
assert msg_mock.call_count == 1
|
assert msg_mock.call_count == 1
|
||||||
assert '*Version:* `{}`'.format(__version__) in msg_mock.call_args_list[0][0][0]
|
assert f'*Version:* `{__version__}`' in msg_mock.call_args_list[0][0][0]
|
||||||
|
|
||||||
msg_mock.reset_mock()
|
msg_mock.reset_mock()
|
||||||
freqtradebot.strategy.version = lambda: '1.1.1'
|
freqtradebot.strategy.version = lambda: '1.1.1'
|
||||||
|
|
||||||
telegram._version(update=update, context=MagicMock())
|
telegram._version(update=update, context=MagicMock())
|
||||||
assert msg_mock.call_count == 1
|
assert msg_mock.call_count == 1
|
||||||
assert '*Version:* `{}`'.format(__version__) in msg_mock.call_args_list[0][0][0]
|
assert f'*Version:* `{__version__}`' in msg_mock.call_args_list[0][0][0]
|
||||||
assert '*Strategy version: * `1.1.1`' in msg_mock.call_args_list[0][0][0]
|
assert '*Strategy version: * `1.1.1`' in msg_mock.call_args_list[0][0][0]
|
||||||
|
|
||||||
|
|
||||||
|
@ -50,6 +50,7 @@ class HyperoptableStrategy(StrategyTestV3):
|
|||||||
return prot
|
return prot
|
||||||
|
|
||||||
bot_loop_started = False
|
bot_loop_started = False
|
||||||
|
bot_started = False
|
||||||
|
|
||||||
def bot_loop_start(self):
|
def bot_loop_start(self):
|
||||||
self.bot_loop_started = True
|
self.bot_loop_started = True
|
||||||
@ -58,6 +59,7 @@ class HyperoptableStrategy(StrategyTestV3):
|
|||||||
"""
|
"""
|
||||||
Parameters can also be defined here ...
|
Parameters can also be defined here ...
|
||||||
"""
|
"""
|
||||||
|
self.bot_started = True
|
||||||
self.buy_rsi = IntParameter([0, 50], default=30, space='buy')
|
self.buy_rsi = IntParameter([0, 50], default=30, space='buy')
|
||||||
|
|
||||||
def informative_pairs(self):
|
def informative_pairs(self):
|
||||||
|
@ -69,7 +69,7 @@ def test_load_strategy(default_conf, dataframe_1m):
|
|||||||
def test_load_strategy_base64(dataframe_1m, caplog, default_conf):
|
def test_load_strategy_base64(dataframe_1m, caplog, default_conf):
|
||||||
filepath = Path(__file__).parents[2] / 'freqtrade/templates/sample_strategy.py'
|
filepath = Path(__file__).parents[2] / 'freqtrade/templates/sample_strategy.py'
|
||||||
encoded_string = urlsafe_b64encode(filepath.read_bytes()).decode("utf-8")
|
encoded_string = urlsafe_b64encode(filepath.read_bytes()).decode("utf-8")
|
||||||
default_conf.update({'strategy': 'SampleStrategy:{}'.format(encoded_string)})
|
default_conf.update({'strategy': f'SampleStrategy:{encoded_string}'})
|
||||||
|
|
||||||
strategy = StrategyResolver.load_strategy(default_conf)
|
strategy = StrategyResolver.load_strategy(default_conf)
|
||||||
assert 'rsi' in strategy.advise_indicators(dataframe_1m, {'pair': 'ETH/BTC'})
|
assert 'rsi' in strategy.advise_indicators(dataframe_1m, {'pair': 'ETH/BTC'})
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,7 @@
|
|||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
from freqtrade.enums import ExitCheckTuple, ExitType, TradingMode
|
from freqtrade.enums import ExitCheckTuple, ExitType, TradingMode
|
||||||
from freqtrade.persistence import Trade
|
from freqtrade.persistence import Trade
|
||||||
@ -91,7 +92,7 @@ def test_may_execute_exit_stoploss_on_exchange_multi(default_conf, ticker, fee,
|
|||||||
assert freqtrade.strategy.confirm_trade_exit.call_count == 0
|
assert freqtrade.strategy.confirm_trade_exit.call_count == 0
|
||||||
wallets_mock.reset_mock()
|
wallets_mock.reset_mock()
|
||||||
|
|
||||||
trades = Trade.query.all()
|
trades = Trade.session.scalars(select(Trade)).all()
|
||||||
# Make sure stoploss-order is open and trade is bought (since we mock update_trade_state)
|
# Make sure stoploss-order is open and trade is bought (since we mock update_trade_state)
|
||||||
for trade in trades:
|
for trade in trades:
|
||||||
stoploss_order_closed['id'] = '3'
|
stoploss_order_closed['id'] = '3'
|
||||||
@ -179,13 +180,13 @@ def test_forcebuy_last_unlimited(default_conf, ticker, fee, mocker, balance_rati
|
|||||||
n = freqtrade.enter_positions()
|
n = freqtrade.enter_positions()
|
||||||
assert n == 4
|
assert n == 4
|
||||||
|
|
||||||
trades = Trade.query.all()
|
trades = Trade.session.scalars(select(Trade)).all()
|
||||||
assert len(trades) == 4
|
assert len(trades) == 4
|
||||||
assert freqtrade.wallets.get_trade_stake_amount('XRP/BTC') == result1
|
assert freqtrade.wallets.get_trade_stake_amount('XRP/BTC') == result1
|
||||||
|
|
||||||
rpc._rpc_force_entry('TKN/BTC', None)
|
rpc._rpc_force_entry('TKN/BTC', None)
|
||||||
|
|
||||||
trades = Trade.query.all()
|
trades = Trade.session.scalars(select(Trade)).all()
|
||||||
assert len(trades) == 5
|
assert len(trades) == 5
|
||||||
|
|
||||||
for trade in trades:
|
for trade in trades:
|
||||||
@ -385,12 +386,12 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
|
|||||||
assert trade.open_order_id is not None
|
assert trade.open_order_id is not None
|
||||||
assert pytest.approx(trade.stake_amount) == 60
|
assert pytest.approx(trade.stake_amount) == 60
|
||||||
assert trade.open_rate == 1.96
|
assert trade.open_rate == 1.96
|
||||||
assert trade.stop_loss_pct is None
|
assert trade.stop_loss_pct == -0.1
|
||||||
assert trade.stop_loss == 0.0
|
assert pytest.approx(trade.stop_loss) == trade.open_rate * (1 - 0.1 / leverage)
|
||||||
|
assert pytest.approx(trade.initial_stop_loss) == trade.open_rate * (1 - 0.1 / leverage)
|
||||||
|
assert trade.initial_stop_loss_pct == -0.1
|
||||||
assert trade.leverage == leverage
|
assert trade.leverage == leverage
|
||||||
assert trade.stake_amount == 60
|
assert trade.stake_amount == 60
|
||||||
assert trade.initial_stop_loss == 0.0
|
|
||||||
assert trade.initial_stop_loss_pct is None
|
|
||||||
# No adjustment
|
# No adjustment
|
||||||
freqtrade.process()
|
freqtrade.process()
|
||||||
trade = Trade.get_trades().first()
|
trade = Trade.get_trades().first()
|
||||||
@ -406,11 +407,11 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
|
|||||||
assert trade.open_order_id is not None
|
assert trade.open_order_id is not None
|
||||||
# Open rate is not adjusted yet
|
# Open rate is not adjusted yet
|
||||||
assert trade.open_rate == 1.96
|
assert trade.open_rate == 1.96
|
||||||
assert trade.stop_loss_pct is None
|
assert trade.stop_loss_pct == -0.1
|
||||||
assert trade.stop_loss == 0.0
|
assert pytest.approx(trade.stop_loss) == trade.open_rate * (1 - 0.1 / leverage)
|
||||||
|
assert pytest.approx(trade.initial_stop_loss) == trade.open_rate * (1 - 0.1 / leverage)
|
||||||
assert trade.stake_amount == 60
|
assert trade.stake_amount == 60
|
||||||
assert trade.initial_stop_loss == 0.0
|
assert trade.initial_stop_loss_pct == -0.1
|
||||||
assert trade.initial_stop_loss_pct is None
|
|
||||||
|
|
||||||
# Fill order
|
# Fill order
|
||||||
mocker.patch(f'{EXMS}._dry_is_price_crossed', return_value=True)
|
mocker.patch(f'{EXMS}._dry_is_price_crossed', return_value=True)
|
||||||
@ -423,7 +424,7 @@ def test_dca_order_adjust(default_conf_usdt, ticker_usdt, leverage, fee, mocker)
|
|||||||
assert pytest.approx(trade.stake_amount) == 60
|
assert pytest.approx(trade.stake_amount) == 60
|
||||||
assert trade.stop_loss_pct == -0.1
|
assert trade.stop_loss_pct == -0.1
|
||||||
assert pytest.approx(trade.stop_loss) == 1.99 * (1 - 0.1 / leverage)
|
assert pytest.approx(trade.stop_loss) == 1.99 * (1 - 0.1 / leverage)
|
||||||
assert pytest.approx(trade.initial_stop_loss) == 1.99 * (1 - 0.1 / leverage)
|
assert pytest.approx(trade.initial_stop_loss) == 1.96 * (1 - 0.1 / leverage)
|
||||||
assert trade.initial_stop_loss_pct == -0.1
|
assert trade.initial_stop_loss_pct == -0.1
|
||||||
|
|
||||||
# 2nd order - not filling
|
# 2nd order - not filling
|
||||||
|
214
tests/test_strategy_updater.py
Normal file
214
tests/test_strategy_updater.py
Normal file
@ -0,0 +1,214 @@
|
|||||||
|
# pragma pylint: disable=missing-docstring, protected-access, invalid-name
|
||||||
|
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from freqtrade.commands.strategy_utils_commands import start_strategy_update
|
||||||
|
from freqtrade.strategy.strategyupdater import StrategyUpdater
|
||||||
|
from tests.conftest import get_args
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info < (3, 9):
|
||||||
|
pytest.skip("StrategyUpdater is not compatible with Python 3.8", allow_module_level=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_start(tmpdir, capsys) -> None:
|
||||||
|
# Effective test without mocks.
|
||||||
|
teststrats = Path(__file__).parent / 'strategy/strats'
|
||||||
|
tmpdirp = Path(tmpdir) / "strategies"
|
||||||
|
tmpdirp.mkdir()
|
||||||
|
shutil.copy(teststrats / 'strategy_test_v2.py', tmpdirp)
|
||||||
|
old_code = (teststrats / 'strategy_test_v2.py').read_text()
|
||||||
|
|
||||||
|
args = [
|
||||||
|
"strategy-updater",
|
||||||
|
"--userdir",
|
||||||
|
str(tmpdir),
|
||||||
|
"--strategy-list",
|
||||||
|
"StrategyTestV2"
|
||||||
|
]
|
||||||
|
pargs = get_args(args)
|
||||||
|
pargs['config'] = None
|
||||||
|
|
||||||
|
start_strategy_update(pargs)
|
||||||
|
|
||||||
|
assert Path(tmpdir / "strategies_orig_updater").exists()
|
||||||
|
# Backup file exists
|
||||||
|
assert Path(tmpdir / "strategies_orig_updater" / 'strategy_test_v2.py').exists()
|
||||||
|
# updated file exists
|
||||||
|
new_file = Path(tmpdirp / 'strategy_test_v2.py')
|
||||||
|
assert new_file.exists()
|
||||||
|
new_code = new_file.read_text()
|
||||||
|
assert 'INTERFACE_VERSION = 3' in new_code
|
||||||
|
assert 'INTERFACE_VERSION = 2' in old_code
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
|
||||||
|
assert 'Conversion of strategy_test_v2.py started.' in captured.out
|
||||||
|
assert re.search(r'Conversion of strategy_test_v2\.py took .* seconds', captured.out)
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_methods(default_conf, caplog) -> None:
|
||||||
|
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
modified_code1 = instance_strategy_updater.update_code("""
|
||||||
|
class testClass(IStrategy):
|
||||||
|
def populate_buy_trend():
|
||||||
|
pass
|
||||||
|
def populate_sell_trend():
|
||||||
|
pass
|
||||||
|
def check_buy_timeout():
|
||||||
|
pass
|
||||||
|
def check_sell_timeout():
|
||||||
|
pass
|
||||||
|
def custom_sell():
|
||||||
|
pass
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert "populate_entry_trend" in modified_code1
|
||||||
|
assert "populate_exit_trend" in modified_code1
|
||||||
|
assert "check_entry_timeout" in modified_code1
|
||||||
|
assert "check_exit_timeout" in modified_code1
|
||||||
|
assert "custom_exit" in modified_code1
|
||||||
|
assert "INTERFACE_VERSION = 3" in modified_code1
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_params(default_conf, caplog) -> None:
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
|
||||||
|
modified_code2 = instance_strategy_updater.update_code("""
|
||||||
|
ticker_interval = '15m'
|
||||||
|
buy_some_parameter = IntParameter(space='buy')
|
||||||
|
sell_some_parameter = IntParameter(space='sell')
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert "timeframe" in modified_code2
|
||||||
|
# check for not editing hyperopt spaces
|
||||||
|
assert "space='buy'" in modified_code2
|
||||||
|
assert "space='sell'" in modified_code2
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_constants(default_conf, caplog) -> None:
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
modified_code3 = instance_strategy_updater.update_code("""
|
||||||
|
use_sell_signal = True
|
||||||
|
sell_profit_only = True
|
||||||
|
sell_profit_offset = True
|
||||||
|
ignore_roi_if_buy_signal = True
|
||||||
|
forcebuy_enable = True
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert "use_exit_signal" in modified_code3
|
||||||
|
assert "exit_profit_only" in modified_code3
|
||||||
|
assert "exit_profit_offset" in modified_code3
|
||||||
|
assert "ignore_roi_if_entry_signal" in modified_code3
|
||||||
|
assert "force_entry_enable" in modified_code3
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_df_columns(default_conf, caplog) -> None:
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
modified_code = instance_strategy_updater.update_code("""
|
||||||
|
dataframe.loc[reduce(lambda x, y: x & y, conditions), ["buy", "buy_tag"]] = (1, "buy_signal_1")
|
||||||
|
dataframe.loc[reduce(lambda x, y: x & y, conditions), 'sell'] = 1
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert "enter_long" in modified_code
|
||||||
|
assert "exit_long" in modified_code
|
||||||
|
assert "enter_tag" in modified_code
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_method_params(default_conf, caplog) -> None:
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
modified_code = instance_strategy_updater.update_code("""
|
||||||
|
def confirm_trade_exit(sell_reason: str):
|
||||||
|
nr_orders = trade.nr_of_successful_buys
|
||||||
|
pass
|
||||||
|
""")
|
||||||
|
assert "exit_reason" in modified_code
|
||||||
|
assert "nr_orders = trade.nr_of_successful_entries" in modified_code
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_dicts(default_conf, caplog) -> None:
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
modified_code = instance_strategy_updater.update_code("""
|
||||||
|
order_time_in_force = {
|
||||||
|
'buy': 'gtc',
|
||||||
|
'sell': 'ioc'
|
||||||
|
}
|
||||||
|
order_types = {
|
||||||
|
'buy': 'limit',
|
||||||
|
'sell': 'market',
|
||||||
|
'stoploss': 'market',
|
||||||
|
'stoploss_on_exchange': False
|
||||||
|
}
|
||||||
|
unfilledtimeout = {
|
||||||
|
'buy': 1,
|
||||||
|
'sell': 2
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert "'entry': 'gtc'" in modified_code
|
||||||
|
assert "'exit': 'ioc'" in modified_code
|
||||||
|
assert "'entry': 'limit'" in modified_code
|
||||||
|
assert "'exit': 'market'" in modified_code
|
||||||
|
assert "'entry': 1" in modified_code
|
||||||
|
assert "'exit': 2" in modified_code
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_comparisons(default_conf, caplog) -> None:
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
modified_code = instance_strategy_updater.update_code("""
|
||||||
|
def confirm_trade_exit(sell_reason):
|
||||||
|
if (sell_reason == 'stop_loss'):
|
||||||
|
pass
|
||||||
|
""")
|
||||||
|
assert "exit_reason" in modified_code
|
||||||
|
assert "exit_reason == 'stop_loss'" in modified_code
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_strings(default_conf, caplog) -> None:
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
|
||||||
|
modified_code = instance_strategy_updater.update_code("""
|
||||||
|
sell_reason == 'sell_signal'
|
||||||
|
sell_reason == 'force_sell'
|
||||||
|
sell_reason == 'emergency_sell'
|
||||||
|
""")
|
||||||
|
|
||||||
|
# those tests currently don't work, next in line.
|
||||||
|
assert "exit_signal" in modified_code
|
||||||
|
assert "exit_reason" in modified_code
|
||||||
|
assert "force_exit" in modified_code
|
||||||
|
assert "emergency_exit" in modified_code
|
||||||
|
|
||||||
|
|
||||||
|
def test_strategy_updater_comments(default_conf, caplog) -> None:
|
||||||
|
instance_strategy_updater = StrategyUpdater()
|
||||||
|
modified_code = instance_strategy_updater.update_code("""
|
||||||
|
# This is the 1st comment
|
||||||
|
import talib.abstract as ta
|
||||||
|
# This is the 2nd comment
|
||||||
|
import freqtrade.vendor.qtpylib.indicators as qtpylib
|
||||||
|
|
||||||
|
|
||||||
|
class someStrategy(IStrategy):
|
||||||
|
INTERFACE_VERSION = 2
|
||||||
|
# This is the 3rd comment
|
||||||
|
# This attribute will be overridden if the config file contains "minimal_roi"
|
||||||
|
minimal_roi = {
|
||||||
|
"0": 0.50
|
||||||
|
}
|
||||||
|
|
||||||
|
# This is the 4th comment
|
||||||
|
stoploss = -0.1
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert "This is the 1st comment" in modified_code
|
||||||
|
assert "This is the 2nd comment" in modified_code
|
||||||
|
assert "This is the 3rd comment" in modified_code
|
||||||
|
assert "INTERFACE_VERSION = 3" in modified_code
|
||||||
|
# currently still missing:
|
||||||
|
# Webhook terminology, Telegram notification settings, Strategy/Config settings
|
BIN
tests/testdata/XRP_ETH-trades.feather
vendored
Normal file
BIN
tests/testdata/XRP_ETH-trades.feather
vendored
Normal file
Binary file not shown.
Loading…
Reference in New Issue
Block a user