Merge branch 'develop' into feat/short
This commit is contained in:
commit
ddfbe55e7c
8
.github/dependabot.yml
vendored
8
.github/dependabot.yml
vendored
@ -5,9 +5,17 @@ updates:
|
|||||||
schedule:
|
schedule:
|
||||||
interval: daily
|
interval: daily
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
|
|
||||||
- package-ecosystem: pip
|
- package-ecosystem: pip
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: weekly
|
interval: weekly
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
target-branch: develop
|
target-branch: develop
|
||||||
|
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
target-branch: develop
|
||||||
|
102
.github/workflows/ci.yml
vendored
102
.github/workflows/ci.yml
vendored
@ -3,7 +3,6 @@ name: Freqtrade CI
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
|
||||||
- stable
|
- stable
|
||||||
- develop
|
- develop
|
||||||
tags:
|
tags:
|
||||||
@ -20,7 +19,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ ubuntu-18.04, ubuntu-20.04 ]
|
os: [ ubuntu-18.04, ubuntu-20.04 ]
|
||||||
python-version: [3.7, 3.8, 3.9]
|
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -39,7 +38,7 @@ jobs:
|
|||||||
|
|
||||||
- name: pip cache (linux)
|
- name: pip cache (linux)
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v2
|
||||||
if: startsWith(matrix.os, 'ubuntu')
|
if: runner.os == 'Linux'
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pip
|
path: ~/.cache/pip
|
||||||
key: test-${{ matrix.os }}-${{ matrix.python-version }}-pip
|
key: test-${{ matrix.os }}-${{ matrix.python-version }}-pip
|
||||||
@ -50,8 +49,9 @@ jobs:
|
|||||||
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
|
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
|
||||||
|
|
||||||
- name: Installation - *nix
|
- name: Installation - *nix
|
||||||
|
if: runner.os == 'Linux'
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip wheel
|
||||||
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
||||||
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
||||||
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
||||||
@ -69,7 +69,7 @@ jobs:
|
|||||||
if: matrix.python-version == '3.9'
|
if: matrix.python-version == '3.9'
|
||||||
|
|
||||||
- name: Coveralls
|
- name: Coveralls
|
||||||
if: (startsWith(matrix.os, 'ubuntu-20') && matrix.python-version == '3.8')
|
if: (runner.os == 'Linux' && matrix.python-version == '3.8')
|
||||||
env:
|
env:
|
||||||
# Coveralls token. Not used as secret due to github not providing secrets to forked repositories
|
# Coveralls token. Not used as secret due to github not providing secrets to forked repositories
|
||||||
COVERALLS_REPO_TOKEN: 6D1m0xupS3FgutfuGao8keFf9Hc0FpIXu
|
COVERALLS_REPO_TOKEN: 6D1m0xupS3FgutfuGao8keFf9Hc0FpIXu
|
||||||
@ -101,23 +101,20 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
mypy freqtrade scripts
|
mypy freqtrade scripts
|
||||||
|
|
||||||
- name: Slack Notification
|
- name: Discord notification
|
||||||
uses: lazy-actions/slatify@v3.0.0
|
uses: rjstone/discord-webhook-notify@v1
|
||||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||||
with:
|
with:
|
||||||
type: ${{ job.status }}
|
severity: error
|
||||||
job_name: '*Freqtrade CI ${{ matrix.os }}*'
|
details: Freqtrade CI failed on ${{ matrix.os }}
|
||||||
mention: 'here'
|
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
mention_if: 'failure'
|
|
||||||
channel: '#notifications'
|
|
||||||
url: ${{ secrets.SLACK_WEBHOOK }}
|
|
||||||
|
|
||||||
build_macos:
|
build_macos:
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ macos-latest ]
|
os: [ macos-latest ]
|
||||||
python-version: [3.7, 3.8, 3.9]
|
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -136,7 +133,7 @@ jobs:
|
|||||||
|
|
||||||
- name: pip cache (macOS)
|
- name: pip cache (macOS)
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v2
|
||||||
if: startsWith(matrix.os, 'macOS')
|
if: runner.os == 'macOS'
|
||||||
with:
|
with:
|
||||||
path: ~/Library/Caches/pip
|
path: ~/Library/Caches/pip
|
||||||
key: test-${{ matrix.os }}-${{ matrix.python-version }}-pip
|
key: test-${{ matrix.os }}-${{ matrix.python-version }}-pip
|
||||||
@ -147,10 +144,11 @@ jobs:
|
|||||||
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
|
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
|
||||||
|
|
||||||
- name: Installation - macOS
|
- name: Installation - macOS
|
||||||
|
if: runner.os == 'macOS'
|
||||||
run: |
|
run: |
|
||||||
brew update
|
brew update
|
||||||
brew install hdf5 c-blosc
|
brew install hdf5 c-blosc
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip wheel
|
||||||
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
||||||
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
||||||
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
||||||
@ -162,7 +160,7 @@ jobs:
|
|||||||
pytest --random-order --cov=freqtrade --cov-config=.coveragerc
|
pytest --random-order --cov=freqtrade --cov-config=.coveragerc
|
||||||
|
|
||||||
- name: Coveralls
|
- name: Coveralls
|
||||||
if: (startsWith(matrix.os, 'ubuntu-20') && matrix.python-version == '3.8')
|
if: (runner.os == 'Linux' && matrix.python-version == '3.8')
|
||||||
env:
|
env:
|
||||||
# Coveralls token. Not used as secret due to github not providing secrets to forked repositories
|
# Coveralls token. Not used as secret due to github not providing secrets to forked repositories
|
||||||
COVERALLS_REPO_TOKEN: 6D1m0xupS3FgutfuGao8keFf9Hc0FpIXu
|
COVERALLS_REPO_TOKEN: 6D1m0xupS3FgutfuGao8keFf9Hc0FpIXu
|
||||||
@ -194,17 +192,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
mypy freqtrade scripts
|
mypy freqtrade scripts
|
||||||
|
|
||||||
- name: Slack Notification
|
- name: Discord notification
|
||||||
uses: lazy-actions/slatify@v3.0.0
|
uses: rjstone/discord-webhook-notify@v1
|
||||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||||
with:
|
with:
|
||||||
type: ${{ job.status }}
|
severity: error
|
||||||
job_name: '*Freqtrade CI ${{ matrix.os }}*'
|
details: Test Succeeded!
|
||||||
mention: 'here'
|
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
mention_if: 'failure'
|
|
||||||
channel: '#notifications'
|
|
||||||
url: ${{ secrets.SLACK_WEBHOOK }}
|
|
||||||
|
|
||||||
|
|
||||||
build_windows:
|
build_windows:
|
||||||
|
|
||||||
@ -212,7 +206,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ windows-latest ]
|
os: [ windows-latest ]
|
||||||
python-version: [3.7, 3.8]
|
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -224,7 +218,6 @@ jobs:
|
|||||||
|
|
||||||
- name: Pip cache (Windows)
|
- name: Pip cache (Windows)
|
||||||
uses: actions/cache@preview
|
uses: actions/cache@preview
|
||||||
if: startsWith(runner.os, 'Windows')
|
|
||||||
with:
|
with:
|
||||||
path: ~\AppData\Local\pip\Cache
|
path: ~\AppData\Local\pip\Cache
|
||||||
key: ${{ matrix.os }}-${{ matrix.python-version }}-pip
|
key: ${{ matrix.os }}-${{ matrix.python-version }}-pip
|
||||||
@ -257,16 +250,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
mypy freqtrade scripts
|
mypy freqtrade scripts
|
||||||
|
|
||||||
- name: Slack Notification
|
- name: Discord notification
|
||||||
uses: lazy-actions/slatify@v3.0.0
|
uses: rjstone/discord-webhook-notify@v1
|
||||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||||
with:
|
with:
|
||||||
type: ${{ job.status }}
|
severity: error
|
||||||
job_name: '*Freqtrade CI windows*'
|
details: Test Failed
|
||||||
mention: 'here'
|
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
mention_if: 'failure'
|
|
||||||
channel: '#notifications'
|
|
||||||
url: ${{ secrets.SLACK_WEBHOOK }}
|
|
||||||
|
|
||||||
docs_check:
|
docs_check:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
@ -288,14 +278,13 @@ jobs:
|
|||||||
pip install mkdocs
|
pip install mkdocs
|
||||||
mkdocs build
|
mkdocs build
|
||||||
|
|
||||||
- name: Slack Notification
|
- name: Discord notification
|
||||||
uses: lazy-actions/slatify@v3.0.0
|
uses: rjstone/discord-webhook-notify@v1
|
||||||
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||||
with:
|
with:
|
||||||
type: ${{ job.status }}
|
severity: error
|
||||||
job_name: '*Freqtrade Docs*'
|
details: Freqtrade doc test failed!
|
||||||
channel: '#notifications'
|
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
url: ${{ secrets.SLACK_WEBHOOK }}
|
|
||||||
|
|
||||||
cleanup-prior-runs:
|
cleanup-prior-runs:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
@ -306,7 +295,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
|
||||||
# Notify on slack only once - when CI completes (and after deploy) in case it's successfull
|
# Notify only once - when CI completes (and after deploy) in case it's successfull
|
||||||
notify-complete:
|
notify-complete:
|
||||||
needs: [ build_linux, build_macos, build_windows, docs_check ]
|
needs: [ build_linux, build_macos, build_windows, docs_check ]
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
@ -320,14 +309,13 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Slack Notification
|
- name: Discord notification
|
||||||
uses: lazy-actions/slatify@v3.0.0
|
uses: rjstone/discord-webhook-notify@v1
|
||||||
if: always() && steps.check.outputs.has-permission && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
if: always() && steps.check.outputs.has-permission && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||||
with:
|
with:
|
||||||
type: ${{ job.status }}
|
severity: info
|
||||||
job_name: '*Freqtrade CI*'
|
details: Test Completed!
|
||||||
channel: '#notifications'
|
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
url: ${{ secrets.SLACK_WEBHOOK }}
|
|
||||||
|
|
||||||
deploy:
|
deploy:
|
||||||
needs: [ build_linux, build_macos, build_windows, docs_check ]
|
needs: [ build_linux, build_macos, build_windows, docs_check ]
|
||||||
@ -385,7 +373,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: crazy-max/ghaction-docker-buildx@v1
|
uses: crazy-max/ghaction-docker-buildx@v3.3.1
|
||||||
with:
|
with:
|
||||||
buildx-version: latest
|
buildx-version: latest
|
||||||
qemu-version: latest
|
qemu-version: latest
|
||||||
@ -400,17 +388,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
build_helpers/publish_docker_multi.sh
|
build_helpers/publish_docker_multi.sh
|
||||||
|
|
||||||
|
- name: Discord notification
|
||||||
- name: Slack Notification
|
uses: rjstone/discord-webhook-notify@v1
|
||||||
uses: lazy-actions/slatify@v3.0.0
|
|
||||||
if: always() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
if: always() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
|
||||||
with:
|
with:
|
||||||
type: ${{ job.status }}
|
severity: info
|
||||||
job_name: '*Freqtrade CI Deploy*'
|
details: Deploy Succeeded!
|
||||||
mention: 'here'
|
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
mention_if: 'failure'
|
|
||||||
channel: '#notifications'
|
|
||||||
url: ${{ secrets.SLACK_WEBHOOK }}
|
|
||||||
|
|
||||||
|
|
||||||
deploy_arm:
|
deploy_arm:
|
||||||
|
2
.github/workflows/docker_update_readme.yml
vendored
2
.github/workflows/docker_update_readme.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Docker Hub Description
|
- name: Docker Hub Description
|
||||||
uses: peter-evans/dockerhub-description@v2.1.0
|
uses: peter-evans/dockerhub-description@v2.4.3
|
||||||
env:
|
env:
|
||||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKERHUB_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
DOCKERHUB_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
55
.travis.yml
55
.travis.yml
@ -1,55 +0,0 @@
|
|||||||
os:
|
|
||||||
- linux
|
|
||||||
dist: bionic
|
|
||||||
language: python
|
|
||||||
python:
|
|
||||||
- 3.8
|
|
||||||
services:
|
|
||||||
- docker
|
|
||||||
env:
|
|
||||||
global:
|
|
||||||
- IMAGE_NAME=freqtradeorg/freqtrade
|
|
||||||
install:
|
|
||||||
- cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies; cd ..
|
|
||||||
- export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
|
||||||
- export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
|
||||||
- export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
|
||||||
- pip install -r requirements-dev.txt
|
|
||||||
- pip install -e .
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
include:
|
|
||||||
- stage: tests
|
|
||||||
script:
|
|
||||||
- pytest --random-order --cov=freqtrade --cov-config=.coveragerc
|
|
||||||
# Allow failure for coveralls
|
|
||||||
# - coveralls || true
|
|
||||||
name: pytest
|
|
||||||
- script:
|
|
||||||
- cp config_examples/config_bittrex.example.json config.json
|
|
||||||
- freqtrade create-userdir --userdir user_data
|
|
||||||
- freqtrade backtesting --datadir tests/testdata --strategy SampleStrategy
|
|
||||||
name: backtest
|
|
||||||
- script:
|
|
||||||
- cp config_examples/config_bittrex.example.json config.json
|
|
||||||
- freqtrade create-userdir --userdir user_data
|
|
||||||
- freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily
|
|
||||||
name: hyperopt
|
|
||||||
- script: flake8
|
|
||||||
name: flake8
|
|
||||||
- script:
|
|
||||||
# Test Documentation boxes -
|
|
||||||
# !!! <TYPE>: is not allowed!
|
|
||||||
# !!! <TYPE> "title" - Title needs to be quoted!
|
|
||||||
- grep -Er '^!{3}\s\S+:|^!{3}\s\S+\s[^"]' docs/*; test $? -ne 0
|
|
||||||
name: doc syntax
|
|
||||||
- script: mypy freqtrade scripts
|
|
||||||
name: mypy
|
|
||||||
|
|
||||||
notifications:
|
|
||||||
slack:
|
|
||||||
secure: bKLXmOrx8e2aPZl7W8DA5BdPAXWGpI5UzST33oc1G/thegXcDVmHBTJrBs4sZak6bgAclQQrdZIsRd2eFYzHLalJEaw6pk7hoAw8SvLnZO0ZurWboz7qg2+aZZXfK4eKl/VUe4sM9M4e/qxjkK+yWG7Marg69c4v1ypF7ezUi1fPYILYw8u0paaiX0N5UX8XNlXy+PBlga2MxDjUY70MuajSZhPsY2pDUvYnMY1D/7XN3cFW0g+3O8zXjF0IF4q1Z/1ASQe+eYjKwPQacE+O8KDD+ZJYoTOFBAPllrtpO1jnOPFjNGf3JIbVMZw4bFjIL0mSQaiSUaUErbU3sFZ5Or79rF93XZ81V7uEZ55vD8KMfR2CB1cQJcZcj0v50BxLo0InkFqa0Y8Nra3sbpV4fV5Oe8pDmomPJrNFJnX6ULQhQ1gTCe0M5beKgVms5SITEpt4/Y0CmLUr6iHDT0CUiyMIRWAXdIgbGh1jfaWOMksybeRevlgDsIsNBjXmYI1Sw2ZZR2Eo2u4R6zyfyjOMLwYJ3vgq9IrACv2w5nmf0+oguMWHf6iWi2hiOqhlAN1W74+3HsYQcqnuM3LGOmuCnPprV1oGBqkPXjIFGpy21gNx4vHfO1noLUyJnMnlu2L7SSuN1CdLsnjJ1hVjpJjPfqB4nn8g12x87TqM1bOm+3Q=
|
|
||||||
cache:
|
|
||||||
pip: True
|
|
||||||
directories:
|
|
||||||
- $HOME/dependencies
|
|
@ -1,4 +1,4 @@
|
|||||||
FROM python:3.9.9-slim-bullseye as base
|
FROM python:3.10.0-slim-bullseye as base
|
||||||
|
|
||||||
# Setup env
|
# Setup env
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
|
@ -197,7 +197,7 @@ To run this bot we recommend you a cloud instance with a minimum of:
|
|||||||
|
|
||||||
### Software requirements
|
### Software requirements
|
||||||
|
|
||||||
- [Python 3.7.x](http://docs.python-guide.org/en/latest/starting/installation/)
|
- [Python >= 3.7](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||||
- [pip](https://pip.pypa.io/en/stable/installing/)
|
- [pip](https://pip.pypa.io/en/stable/installing/)
|
||||||
- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
|
- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
|
||||||
- [TA-Lib](https://mrjbq7.github.io/ta-lib/install.html)
|
- [TA-Lib](https://mrjbq7.github.io/ta-lib/install.html)
|
||||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.22-cp310-cp310-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.22-cp310-cp310-win_amd64.whl
Normal file
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.22-cp37-cp37m-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.22-cp37-cp37m-win_amd64.whl
Normal file
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.22-cp38-cp38-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.22-cp38-cp38-win_amd64.whl
Normal file
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.22-cp39-cp39-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.22-cp39-cp39-win_amd64.whl
Normal file
Binary file not shown.
@ -1,19 +1,21 @@
|
|||||||
# Downloads don't work automatically, since the URL is regenerated via javascript.
|
# Downloads don't work automatically, since the URL is regenerated via javascript.
|
||||||
# Downloaded from https://www.lfd.uci.edu/~gohlke/pythonlibs/#ta-lib
|
# Downloaded from https://www.lfd.uci.edu/~gohlke/pythonlibs/#ta-lib
|
||||||
|
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip wheel
|
||||||
|
|
||||||
$pyv = python -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"
|
$pyv = python -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"
|
||||||
|
|
||||||
if ($pyv -eq '3.7') {
|
if ($pyv -eq '3.7') {
|
||||||
pip install build_helpers\TA_Lib-0.4.21-cp37-cp37m-win_amd64.whl
|
pip install build_helpers\TA_Lib-0.4.22-cp37-cp37m-win_amd64.whl
|
||||||
}
|
}
|
||||||
if ($pyv -eq '3.8') {
|
if ($pyv -eq '3.8') {
|
||||||
pip install build_helpers\TA_Lib-0.4.21-cp38-cp38-win_amd64.whl
|
pip install build_helpers\TA_Lib-0.4.22-cp38-cp38-win_amd64.whl
|
||||||
}
|
}
|
||||||
if ($pyv -eq '3.9') {
|
if ($pyv -eq '3.9') {
|
||||||
pip install build_helpers\TA_Lib-0.4.21-cp39-cp39-win_amd64.whl
|
pip install build_helpers\TA_Lib-0.4.22-cp39-cp39-win_amd64.whl
|
||||||
|
}
|
||||||
|
if ($pyv -eq '3.10') {
|
||||||
|
pip install build_helpers\TA_Lib-0.4.22-cp310-cp310-win_amd64.whl
|
||||||
}
|
}
|
||||||
|
|
||||||
pip install -r requirements-dev.txt
|
pip install -r requirements-dev.txt
|
||||||
pip install -e .
|
pip install -e .
|
||||||
|
@ -13,7 +13,7 @@ A sample of this can be found below, which is identical to the Default Hyperopt
|
|||||||
|
|
||||||
``` python
|
``` python
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
|
@ -176,12 +176,15 @@ Log messages are send to `syslog` with the `user` facility. So you can see them
|
|||||||
On many systems `syslog` (`rsyslog`) fetches data from `journald` (and vice versa), so both `--logfile syslog` or `--logfile journald` can be used and the messages be viewed with both `journalctl` and a syslog viewer utility. You can combine this in any way which suites you better.
|
On many systems `syslog` (`rsyslog`) fetches data from `journald` (and vice versa), so both `--logfile syslog` or `--logfile journald` can be used and the messages be viewed with both `journalctl` and a syslog viewer utility. You can combine this in any way which suites you better.
|
||||||
|
|
||||||
For `rsyslog` the messages from the bot can be redirected into a separate dedicated log file. To achieve this, add
|
For `rsyslog` the messages from the bot can be redirected into a separate dedicated log file. To achieve this, add
|
||||||
|
|
||||||
```
|
```
|
||||||
if $programname startswith "freqtrade" then -/var/log/freqtrade.log
|
if $programname startswith "freqtrade" then -/var/log/freqtrade.log
|
||||||
```
|
```
|
||||||
|
|
||||||
to one of the rsyslog configuration files, for example at the end of the `/etc/rsyslog.d/50-default.conf`.
|
to one of the rsyslog configuration files, for example at the end of the `/etc/rsyslog.d/50-default.conf`.
|
||||||
|
|
||||||
For `syslog` (`rsyslog`), the reduction mode can be switched on. This will reduce the number of repeating messages. For instance, multiple bot Heartbeat messages will be reduced to a single message when nothing else happens with the bot. To achieve this, set in `/etc/rsyslog.conf`:
|
For `syslog` (`rsyslog`), the reduction mode can be switched on. This will reduce the number of repeating messages. For instance, multiple bot Heartbeat messages will be reduced to a single message when nothing else happens with the bot. To achieve this, set in `/etc/rsyslog.conf`:
|
||||||
|
|
||||||
```
|
```
|
||||||
# Filter duplicated messages
|
# Filter duplicated messages
|
||||||
$RepeatedMsgReduction on
|
$RepeatedMsgReduction on
|
||||||
|
@ -493,8 +493,8 @@ Since backtesting lacks some detailed information about what happens within a ca
|
|||||||
- ROI applies before trailing-stop, ensuring profits are "top-capped" at ROI if both ROI and trailing stop applies
|
- ROI applies before trailing-stop, ensuring profits are "top-capped" at ROI if both ROI and trailing stop applies
|
||||||
- Sell-reason does not explain if a trade was positive or negative, just what triggered the sell (this can look odd if negative ROI values are used)
|
- Sell-reason does not explain if a trade was positive or negative, just what triggered the sell (this can look odd if negative ROI values are used)
|
||||||
- Evaluation sequence (if multiple signals happen on the same candle)
|
- Evaluation sequence (if multiple signals happen on the same candle)
|
||||||
- ROI (if not stoploss)
|
|
||||||
- Sell-signal
|
- Sell-signal
|
||||||
|
- ROI (if not stoploss)
|
||||||
- Stoploss
|
- Stoploss
|
||||||
|
|
||||||
Taking these assumptions, backtesting tries to mirror real trading as closely as possible. However, backtesting will **never** replace running a strategy in dry-run mode.
|
Taking these assumptions, backtesting tries to mirror real trading as closely as possible. However, backtesting will **never** replace running a strategy in dry-run mode.
|
||||||
|
@ -137,6 +137,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
|||||||
| `exchange.markets_refresh_interval` | The interval in minutes in which markets are reloaded. <br>*Defaults to `60` minutes.* <br> **Datatype:** Positive Integer
|
| `exchange.markets_refresh_interval` | The interval in minutes in which markets are reloaded. <br>*Defaults to `60` minutes.* <br> **Datatype:** Positive Integer
|
||||||
| `exchange.skip_pair_validation` | Skip pairlist validation on startup.<br>*Defaults to `false`<br> **Datatype:** Boolean
|
| `exchange.skip_pair_validation` | Skip pairlist validation on startup.<br>*Defaults to `false`<br> **Datatype:** Boolean
|
||||||
| `exchange.skip_open_order_update` | Skips open order updates on startup should the exchange cause problems. Only relevant in live conditions.<br>*Defaults to `false`<br> **Datatype:** Boolean
|
| `exchange.skip_open_order_update` | Skips open order updates on startup should the exchange cause problems. Only relevant in live conditions.<br>*Defaults to `false`<br> **Datatype:** Boolean
|
||||||
|
| `exchange.unknown_fee_rate` | Fallback value to use when calculating trading fees. This can be useful for exchanges which have fees in non-tradable currencies. The value provided here will be multiplied with the "fee cost".<br>*Defaults to `None`<br> **Datatype:** float
|
||||||
| `exchange.log_responses` | Log relevant exchange responses. For debug mode only - use with care.<br>*Defaults to `false`<br> **Datatype:** Boolean
|
| `exchange.log_responses` | Log relevant exchange responses. For debug mode only - use with care.<br>*Defaults to `false`<br> **Datatype:** Boolean
|
||||||
| `edge.*` | Please refer to [edge configuration document](edge.md) for detailed explanation.
|
| `edge.*` | Please refer to [edge configuration document](edge.md) for detailed explanation.
|
||||||
| `experimental.block_bad_exchanges` | Block exchanges known to not work with freqtrade. Leave on default unless you want to test if that exchange works now. <br>*Defaults to `true`.* <br> **Datatype:** Boolean
|
| `experimental.block_bad_exchanges` | Block exchanges known to not work with freqtrade. Leave on default unless you want to test if that exchange works now. <br>*Defaults to `true`.* <br> **Datatype:** Boolean
|
||||||
|
@ -324,9 +324,8 @@ jupyter nbconvert --ClearOutputPreprocessor.enabled=True --to markdown freqtrade
|
|||||||
This documents some decisions taken for the CI Pipeline.
|
This documents some decisions taken for the CI Pipeline.
|
||||||
|
|
||||||
* CI runs on all OS variants, Linux (ubuntu), macOS and Windows.
|
* CI runs on all OS variants, Linux (ubuntu), macOS and Windows.
|
||||||
* Docker images are build for the branches `stable` and `develop`.
|
* Docker images are build for the branches `stable` and `develop`, and are built as multiarch builds, supporting multiple platforms via the same tag.
|
||||||
* Docker images containing Plot dependencies are also available as `stable_plot` and `develop_plot`.
|
* Docker images containing Plot dependencies are also available as `stable_plot` and `develop_plot`.
|
||||||
* Raspberry PI Docker images are postfixed with `_pi` - so tags will be `:stable_pi` and `develop_pi`.
|
|
||||||
* Docker images contain a file, `/freqtrade/freqtrade_commit` containing the commit this image is based of.
|
* Docker images contain a file, `/freqtrade/freqtrade_commit` containing the commit this image is based of.
|
||||||
* Full docker image rebuilds are run once a week via schedule.
|
* Full docker image rebuilds are run once a week via schedule.
|
||||||
* Deployments run on ubuntu.
|
* Deployments run on ubuntu.
|
||||||
|
@ -199,6 +199,11 @@ OKEX requires a passphrase for each api key, you will therefore need to add this
|
|||||||
!!! Warning
|
!!! Warning
|
||||||
OKEX only provides 100 candles per api call. Therefore, the strategy will only have a pretty low amount of data available in backtesting mode.
|
OKEX only provides 100 candles per api call. Therefore, the strategy will only have a pretty low amount of data available in backtesting mode.
|
||||||
|
|
||||||
|
## Gate.io
|
||||||
|
|
||||||
|
Gate.io allows the use of `POINT` to pay for fees. As this is not a tradable currency (no regular market available), automatic fee calculations will fail (and default to a fee of 0).
|
||||||
|
The configuration parameter `exchange.unknown_fee_rate` can be used to specify the exchange rate between Point and the stake currency. Obviously, changing the stake-currency will also require changes to this value.
|
||||||
|
|
||||||
## All exchanges
|
## All exchanges
|
||||||
|
|
||||||
Should you experience constant errors with Nonce (like `InvalidNonce`), it is best to regenerate the API keys. Resetting Nonce is difficult and it's usually easier to regenerate the API keys.
|
Should you experience constant errors with Nonce (like `InvalidNonce`), it is best to regenerate the API keys. Resetting Nonce is difficult and it's usually easier to regenerate the API keys.
|
||||||
|
@ -196,7 +196,7 @@ Trade count is used as a tie breaker.
|
|||||||
You can use the `minutes` parameter to only consider performance of the past X minutes (rolling window).
|
You can use the `minutes` parameter to only consider performance of the past X minutes (rolling window).
|
||||||
Not defining this parameter (or setting it to 0) will use all-time performance.
|
Not defining this parameter (or setting it to 0) will use all-time performance.
|
||||||
|
|
||||||
The optional `min_profit` parameter defines the minimum profit a pair must have to be considered.
|
The optional `min_profit` (as ratio -> a setting of `0.01` corresponds to 1%) parameter defines the minimum profit a pair must have to be considered.
|
||||||
Pairs below this level will be filtered out.
|
Pairs below this level will be filtered out.
|
||||||
Using this parameter without `minutes` is highly discouraged, as it can lead to an empty pairlist without a way to recover.
|
Using this parameter without `minutes` is highly discouraged, as it can lead to an empty pairlist without a way to recover.
|
||||||
|
|
||||||
@ -206,7 +206,7 @@ Using this parameter without `minutes` is highly discouraged, as it can lead to
|
|||||||
{
|
{
|
||||||
"method": "PerformanceFilter",
|
"method": "PerformanceFilter",
|
||||||
"minutes": 1440, // rolling 24h
|
"minutes": 1440, // rolling 24h
|
||||||
"min_profit": 0.01
|
"min_profit": 0.01 // minimal profit 1%
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
```
|
```
|
||||||
|
@ -420,16 +420,3 @@ open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10
|
|||||||
```
|
```
|
||||||
|
|
||||||
If this file is inexistent, then you're probably on a different version of MacOS, so you may need to consult the internet for specific resolution details.
|
If this file is inexistent, then you're probably on a different version of MacOS, so you may need to consult the internet for specific resolution details.
|
||||||
|
|
||||||
### MacOS installation error with python 3.9
|
|
||||||
|
|
||||||
When using python 3.9 on macOS, it's currently necessary to install some os-level modules to allow dependencies to compile.
|
|
||||||
The errors you'll see happen during installation and are related to the installation of `tables` or `blosc`.
|
|
||||||
|
|
||||||
You can install the necessary libraries with the following command:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
brew install hdf5 c-blosc
|
|
||||||
```
|
|
||||||
|
|
||||||
After this, please run the installation (script) again.
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
mkdocs==1.2.3
|
mkdocs==1.2.3
|
||||||
mkdocs-material==8.0.4
|
mkdocs-material==8.1.3
|
||||||
mdx_truly_sane_lists==1.2
|
mdx_truly_sane_lists==1.2
|
||||||
pymdown-extensions==9.1
|
pymdown-extensions==9.1
|
||||||
|
@ -112,6 +112,7 @@ Possible parameters are:
|
|||||||
* `open_date`
|
* `open_date`
|
||||||
* `stake_amount`
|
* `stake_amount`
|
||||||
* `stake_currency`
|
* `stake_currency`
|
||||||
|
* `base_currency`
|
||||||
* `fiat_currency`
|
* `fiat_currency`
|
||||||
* `order_type`
|
* `order_type`
|
||||||
* `current_rate`
|
* `current_rate`
|
||||||
@ -132,6 +133,7 @@ Possible parameters are:
|
|||||||
* `open_date`
|
* `open_date`
|
||||||
* `stake_amount`
|
* `stake_amount`
|
||||||
* `stake_currency`
|
* `stake_currency`
|
||||||
|
* `base_currency`
|
||||||
* `fiat_currency`
|
* `fiat_currency`
|
||||||
* `order_type`
|
* `order_type`
|
||||||
* `current_rate`
|
* `current_rate`
|
||||||
@ -152,6 +154,7 @@ Possible parameters are:
|
|||||||
* `open_date`
|
* `open_date`
|
||||||
* `stake_amount`
|
* `stake_amount`
|
||||||
* `stake_currency`
|
* `stake_currency`
|
||||||
|
* `base_currency`
|
||||||
* `fiat_currency`
|
* `fiat_currency`
|
||||||
* `order_type`
|
* `order_type`
|
||||||
* `current_rate`
|
* `current_rate`
|
||||||
@ -173,6 +176,7 @@ Possible parameters are:
|
|||||||
* `profit_amount`
|
* `profit_amount`
|
||||||
* `profit_ratio`
|
* `profit_ratio`
|
||||||
* `stake_currency`
|
* `stake_currency`
|
||||||
|
* `base_currency`
|
||||||
* `fiat_currency`
|
* `fiat_currency`
|
||||||
* `sell_reason`
|
* `sell_reason`
|
||||||
* `order_type`
|
* `order_type`
|
||||||
@ -197,6 +201,7 @@ Possible parameters are:
|
|||||||
* `profit_amount`
|
* `profit_amount`
|
||||||
* `profit_ratio`
|
* `profit_ratio`
|
||||||
* `stake_currency`
|
* `stake_currency`
|
||||||
|
* `base_currency`
|
||||||
* `fiat_currency`
|
* `fiat_currency`
|
||||||
* `sell_reason`
|
* `sell_reason`
|
||||||
* `order_type`
|
* `order_type`
|
||||||
@ -221,6 +226,7 @@ Possible parameters are:
|
|||||||
* `profit_amount`
|
* `profit_amount`
|
||||||
* `profit_ratio`
|
* `profit_ratio`
|
||||||
* `stake_currency`
|
* `stake_currency`
|
||||||
|
* `base_currency`
|
||||||
* `fiat_currency`
|
* `fiat_currency`
|
||||||
* `sell_reason`
|
* `sell_reason`
|
||||||
* `order_type`
|
* `order_type`
|
||||||
|
@ -23,9 +23,9 @@ git clone https://github.com/freqtrade/freqtrade.git
|
|||||||
|
|
||||||
Install ta-lib according to the [ta-lib documentation](https://github.com/mrjbq7/ta-lib#windows).
|
Install ta-lib according to the [ta-lib documentation](https://github.com/mrjbq7/ta-lib#windows).
|
||||||
|
|
||||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), there is also a repository of unofficial pre-compiled windows Wheels [here](https://www.lfd.uci.edu/~gohlke/pythonlibs/#ta-lib), which need to be downloaded and installed using `pip install TA_Lib-0.4.21-cp38-cp38-win_amd64.whl` (make sure to use the version matching your python version).
|
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), there is also a repository of unofficial pre-compiled windows Wheels [here](https://www.lfd.uci.edu/~gohlke/pythonlibs/#ta-lib), which need to be downloaded and installed using `pip install TA_Lib‑0.4.22‑cp38‑cp38‑win_amd64.whl` (make sure to use the version matching your python version).
|
||||||
|
|
||||||
Freqtrade provides these dependencies for the latest 2 Python versions (3.7 and 3.8) and for 64bit Windows.
|
Freqtrade provides these dependencies for the latest 3 Python versions (3.7, 3.8, 3.9 and 3.10) and for 64bit Windows.
|
||||||
Other versions must be downloaded from the above link.
|
Other versions must be downloaded from the above link.
|
||||||
|
|
||||||
``` powershell
|
``` powershell
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
from cachetools.ttl import TTLCache
|
from cachetools import TTLCache
|
||||||
|
|
||||||
|
|
||||||
class PeriodicCache(TTLCache):
|
class PeriodicCache(TTLCache):
|
||||||
|
@ -401,6 +401,7 @@ CONF_SCHEMA = {
|
|||||||
},
|
},
|
||||||
'uniqueItems': True
|
'uniqueItems': True
|
||||||
},
|
},
|
||||||
|
'unknown_fee_rate': {'type': 'number'},
|
||||||
'outdated_offset': {'type': 'integer', 'minimum': 1},
|
'outdated_offset': {'type': 'integer', 'minimum': 1},
|
||||||
'markets_refresh_interval': {'type': 'integer'},
|
'markets_refresh_interval': {'type': 'integer'},
|
||||||
'ccxt_config': {'type': 'object'},
|
'ccxt_config': {'type': 'object'},
|
||||||
|
@ -328,6 +328,7 @@ def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
|||||||
:param column: Column in the original dataframes to use
|
:param column: Column in the original dataframes to use
|
||||||
:return: DataFrame with the column renamed to the dict key, and a column
|
:return: DataFrame with the column renamed to the dict key, and a column
|
||||||
named mean, containing the mean of all pairs.
|
named mean, containing the mean of all pairs.
|
||||||
|
:raise: ValueError if no data is provided.
|
||||||
"""
|
"""
|
||||||
df_comb = pd.concat([data[pair].set_index('date').rename(
|
df_comb = pd.concat([data[pair].set_index('date').rename(
|
||||||
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
||||||
|
@ -254,7 +254,7 @@ class IDataHandler(ABC):
|
|||||||
enddate = pairdf.iloc[-1]['date']
|
enddate = pairdf.iloc[-1]['date']
|
||||||
|
|
||||||
if timerange_startup:
|
if timerange_startup:
|
||||||
self._validate_pairdata(pair, pairdf, timerange_startup)
|
self._validate_pairdata(pair, pairdf, timeframe, timerange_startup)
|
||||||
pairdf = trim_dataframe(pairdf, timerange_startup)
|
pairdf = trim_dataframe(pairdf, timerange_startup)
|
||||||
if self._check_empty_df(pairdf, pair, timeframe, warn_no_data):
|
if self._check_empty_df(pairdf, pair, timeframe, warn_no_data):
|
||||||
return pairdf
|
return pairdf
|
||||||
@ -281,7 +281,7 @@ class IDataHandler(ABC):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _validate_pairdata(self, pair, pairdata: DataFrame, timerange: TimeRange):
|
def _validate_pairdata(self, pair, pairdata: DataFrame, timeframe: str, timerange: TimeRange):
|
||||||
"""
|
"""
|
||||||
Validates pairdata for missing data at start end end and logs warnings.
|
Validates pairdata for missing data at start end end and logs warnings.
|
||||||
:param pairdata: Dataframe to validate
|
:param pairdata: Dataframe to validate
|
||||||
@ -291,12 +291,12 @@ class IDataHandler(ABC):
|
|||||||
if timerange.starttype == 'date':
|
if timerange.starttype == 'date':
|
||||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
||||||
if pairdata.iloc[0]['date'] > start:
|
if pairdata.iloc[0]['date'] > start:
|
||||||
logger.warning(f"Missing data at start for pair {pair}, "
|
logger.warning(f"Missing data at start for pair {pair} at {timeframe}, "
|
||||||
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}")
|
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}")
|
||||||
if timerange.stoptype == 'date':
|
if timerange.stoptype == 'date':
|
||||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
||||||
if pairdata.iloc[-1]['date'] < stop:
|
if pairdata.iloc[-1]['date'] < stop:
|
||||||
logger.warning(f"Missing data at end for pair {pair}, "
|
logger.warning(f"Missing data at end for pair {pair} at {timeframe}, "
|
||||||
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")
|
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ from freqtrade.exchange.exchange import Exchange
|
|||||||
# isort: on
|
# isort: on
|
||||||
from freqtrade.exchange.bibox import Bibox
|
from freqtrade.exchange.bibox import Bibox
|
||||||
from freqtrade.exchange.binance import Binance
|
from freqtrade.exchange.binance import Binance
|
||||||
|
from freqtrade.exchange.bitpanda import Bitpanda
|
||||||
from freqtrade.exchange.bittrex import Bittrex
|
from freqtrade.exchange.bittrex import Bittrex
|
||||||
from freqtrade.exchange.bybit import Bybit
|
from freqtrade.exchange.bybit import Bybit
|
||||||
from freqtrade.exchange.coinbasepro import Coinbasepro
|
from freqtrade.exchange.coinbasepro import Coinbasepro
|
||||||
|
37
freqtrade/exchange/bitpanda.py
Normal file
37
freqtrade/exchange/bitpanda.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
""" Bitpanda exchange subclass """
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
from freqtrade.exchange import Exchange
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Bitpanda(Exchange):
|
||||||
|
"""
|
||||||
|
Bitpanda exchange class. Contains adjustments needed for Freqtrade to work
|
||||||
|
with this exchange.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_trades_for_order(self, order_id: str, pair: str, since: datetime,
|
||||||
|
params: Optional[Dict] = None) -> List:
|
||||||
|
"""
|
||||||
|
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
||||||
|
The "since" argument passed in is coming from the database and is in UTC,
|
||||||
|
as timezone-native datetime object.
|
||||||
|
From the python documentation:
|
||||||
|
> Naive datetime instances are assumed to represent local time
|
||||||
|
Therefore, calling "since.timestamp()" will get the UTC timestamp, after applying the
|
||||||
|
transformation from local timezone to UTC.
|
||||||
|
This works for timezones UTC+ since then the result will contain trades from a few hours
|
||||||
|
instead of from the last 5 seconds, however fails for UTC- timezones,
|
||||||
|
since we're then asking for trades with a "since" argument in the future.
|
||||||
|
|
||||||
|
:param order_id order_id: Order-id as given when creating the order
|
||||||
|
:param pair: Pair the order is for
|
||||||
|
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
||||||
|
"""
|
||||||
|
params = {'to': int(datetime.now(timezone.utc).timestamp() * 1000)}
|
||||||
|
return super().get_trades_for_order(order_id, pair, since, params)
|
@ -4,9 +4,20 @@ import time
|
|||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
from freqtrade.exceptions import DDosProtection, RetryableOrderError, TemporaryError
|
from freqtrade.exceptions import DDosProtection, RetryableOrderError, TemporaryError
|
||||||
|
from freqtrade.mixins import LoggingMixin
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
__logging_mixin = None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_logging_mixin():
|
||||||
|
# Logging-mixin to cache kucoin responses
|
||||||
|
# Only to be used in retrier
|
||||||
|
global __logging_mixin
|
||||||
|
if not __logging_mixin:
|
||||||
|
__logging_mixin = LoggingMixin(logger)
|
||||||
|
return __logging_mixin
|
||||||
|
|
||||||
|
|
||||||
# Maximum default retry count.
|
# Maximum default retry count.
|
||||||
@ -77,28 +88,33 @@ def calculate_backoff(retrycount, max_retries):
|
|||||||
def retrier_async(f):
|
def retrier_async(f):
|
||||||
async def wrapper(*args, **kwargs):
|
async def wrapper(*args, **kwargs):
|
||||||
count = kwargs.pop('count', API_RETRY_COUNT)
|
count = kwargs.pop('count', API_RETRY_COUNT)
|
||||||
|
kucoin = args[0].name == "Kucoin" # Check if the exchange is KuCoin.
|
||||||
try:
|
try:
|
||||||
return await f(*args, **kwargs)
|
return await f(*args, **kwargs)
|
||||||
except TemporaryError as ex:
|
except TemporaryError as ex:
|
||||||
logger.warning('%s() returned exception: "%s"', f.__name__, ex)
|
msg = f'{f.__name__}() returned exception: "{ex}". '
|
||||||
if count > 0:
|
if count > 0:
|
||||||
logger.warning('retrying %s() still for %s times', f.__name__, count)
|
msg += f'Retrying still for {count} times.'
|
||||||
count -= 1
|
count -= 1
|
||||||
kwargs.update({'count': count})
|
kwargs['count'] = count
|
||||||
if isinstance(ex, DDosProtection):
|
if isinstance(ex, DDosProtection):
|
||||||
if "kucoin" in str(ex) and "429000" in str(ex):
|
if kucoin and "429000" in str(ex):
|
||||||
# Temporary fix for 429000 error on kucoin
|
# Temporary fix for 429000 error on kucoin
|
||||||
# see https://github.com/freqtrade/freqtrade/issues/5700 for details.
|
# see https://github.com/freqtrade/freqtrade/issues/5700 for details.
|
||||||
logger.warning(
|
_get_logging_mixin().log_once(
|
||||||
f"Kucoin 429 error, avoid triggering DDosProtection backoff delay. "
|
f"Kucoin 429 error, avoid triggering DDosProtection backoff delay. "
|
||||||
f"{count} tries left before giving up")
|
f"{count} tries left before giving up", logmethod=logger.warning)
|
||||||
|
# Reset msg to avoid logging too many times.
|
||||||
|
msg = ''
|
||||||
else:
|
else:
|
||||||
backoff_delay = calculate_backoff(count + 1, API_RETRY_COUNT)
|
backoff_delay = calculate_backoff(count + 1, API_RETRY_COUNT)
|
||||||
logger.info(f"Applying DDosProtection backoff delay: {backoff_delay}")
|
logger.info(f"Applying DDosProtection backoff delay: {backoff_delay}")
|
||||||
await asyncio.sleep(backoff_delay)
|
await asyncio.sleep(backoff_delay)
|
||||||
|
if msg:
|
||||||
|
logger.warning(msg)
|
||||||
return await wrapper(*args, **kwargs)
|
return await wrapper(*args, **kwargs)
|
||||||
else:
|
else:
|
||||||
logger.warning('Giving up retrying: %s()', f.__name__)
|
logger.warning(msg + 'Giving up.')
|
||||||
raise ex
|
raise ex
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
@ -111,9 +127,9 @@ def retrier(_func=None, retries=API_RETRY_COUNT):
|
|||||||
try:
|
try:
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
except (TemporaryError, RetryableOrderError) as ex:
|
except (TemporaryError, RetryableOrderError) as ex:
|
||||||
logger.warning('%s() returned exception: "%s"', f.__name__, ex)
|
msg = f'{f.__name__}() returned exception: "{ex}". '
|
||||||
if count > 0:
|
if count > 0:
|
||||||
logger.warning('retrying %s() still for %s times', f.__name__, count)
|
logger.warning(msg + f'Retrying still for {count} times.')
|
||||||
count -= 1
|
count -= 1
|
||||||
kwargs.update({'count': count})
|
kwargs.update({'count': count})
|
||||||
if isinstance(ex, (DDosProtection, RetryableOrderError)):
|
if isinstance(ex, (DDosProtection, RetryableOrderError)):
|
||||||
@ -123,7 +139,7 @@ def retrier(_func=None, retries=API_RETRY_COUNT):
|
|||||||
time.sleep(backoff_delay)
|
time.sleep(backoff_delay)
|
||||||
return wrapper(*args, **kwargs)
|
return wrapper(*args, **kwargs)
|
||||||
else:
|
else:
|
||||||
logger.warning('Giving up retrying: %s()', f.__name__)
|
logger.warning(msg + 'Giving up.')
|
||||||
raise ex
|
raise ex
|
||||||
return wrapper
|
return wrapper
|
||||||
# Support both @retrier and @retrier(retries=2) syntax
|
# Support both @retrier and @retrier(retries=2) syntax
|
||||||
|
@ -89,6 +89,8 @@ class Exchange:
|
|||||||
self._api_async: ccxt_async.Exchange = None
|
self._api_async: ccxt_async.Exchange = None
|
||||||
self._markets: Dict = {}
|
self._markets: Dict = {}
|
||||||
self._leverage_brackets: Dict = {}
|
self._leverage_brackets: Dict = {}
|
||||||
|
self.loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(self.loop)
|
||||||
|
|
||||||
self._config.update(config)
|
self._config.update(config)
|
||||||
|
|
||||||
@ -188,8 +190,10 @@ class Exchange:
|
|||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
logger.debug("Exchange object destroyed, closing async loop")
|
logger.debug("Exchange object destroyed, closing async loop")
|
||||||
if self._api_async and inspect.iscoroutinefunction(self._api_async.close):
|
if (self._api_async and inspect.iscoroutinefunction(self._api_async.close)
|
||||||
asyncio.get_event_loop().run_until_complete(self._api_async.close())
|
and self._api_async.session):
|
||||||
|
logger.info("Closing async ccxt session.")
|
||||||
|
self.loop.run_until_complete(self._api_async.close())
|
||||||
|
|
||||||
def _init_ccxt(self, exchange_config: Dict[str, Any], ccxt_module: CcxtModuleType = ccxt,
|
def _init_ccxt(self, exchange_config: Dict[str, Any], ccxt_module: CcxtModuleType = ccxt,
|
||||||
ccxt_kwargs: Dict = {}) -> ccxt.Exchange:
|
ccxt_kwargs: Dict = {}) -> ccxt.Exchange:
|
||||||
@ -379,7 +383,7 @@ class Exchange:
|
|||||||
def _load_async_markets(self, reload: bool = False) -> None:
|
def _load_async_markets(self, reload: bool = False) -> None:
|
||||||
try:
|
try:
|
||||||
if self._api_async:
|
if self._api_async:
|
||||||
asyncio.get_event_loop().run_until_complete(
|
self.loop.run_until_complete(
|
||||||
self._api_async.load_markets(reload=reload))
|
self._api_async.load_markets(reload=reload))
|
||||||
|
|
||||||
except (asyncio.TimeoutError, ccxt.BaseError) as e:
|
except (asyncio.TimeoutError, ccxt.BaseError) as e:
|
||||||
@ -1194,7 +1198,8 @@ class Exchange:
|
|||||||
# Fee handling
|
# Fee handling
|
||||||
|
|
||||||
@retrier
|
@retrier
|
||||||
def get_trades_for_order(self, order_id: str, pair: str, since: datetime) -> List:
|
def get_trades_for_order(self, order_id: str, pair: str, since: datetime,
|
||||||
|
params: Optional[Dict] = None) -> List:
|
||||||
"""
|
"""
|
||||||
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
||||||
The "since" argument passed in is coming from the database and is in UTC,
|
The "since" argument passed in is coming from the database and is in UTC,
|
||||||
@ -1218,8 +1223,10 @@ class Exchange:
|
|||||||
try:
|
try:
|
||||||
# Allow 5s offset to catch slight time offsets (discovered in #1185)
|
# Allow 5s offset to catch slight time offsets (discovered in #1185)
|
||||||
# since needs to be int in milliseconds
|
# since needs to be int in milliseconds
|
||||||
|
_params = params if params else {}
|
||||||
my_trades = self._api.fetch_my_trades(
|
my_trades = self._api.fetch_my_trades(
|
||||||
pair, int((since.replace(tzinfo=timezone.utc).timestamp() - 5) * 1000))
|
pair, int((since.replace(tzinfo=timezone.utc).timestamp() - 5) * 1000),
|
||||||
|
params=_params)
|
||||||
matched_trades = [trade for trade in my_trades if trade['order'] == order_id]
|
matched_trades = [trade for trade in my_trades if trade['order'] == order_id]
|
||||||
|
|
||||||
self._log_exchange_response('get_trades_for_order', matched_trades)
|
self._log_exchange_response('get_trades_for_order', matched_trades)
|
||||||
@ -1297,9 +1304,11 @@ class Exchange:
|
|||||||
tick = self.fetch_ticker(comb)
|
tick = self.fetch_ticker(comb)
|
||||||
|
|
||||||
fee_to_quote_rate = safe_value_fallback2(tick, tick, 'last', 'ask')
|
fee_to_quote_rate = safe_value_fallback2(tick, tick, 'last', 'ask')
|
||||||
return round((order['fee']['cost'] * fee_to_quote_rate) / order['cost'], 8)
|
|
||||||
except ExchangeError:
|
except ExchangeError:
|
||||||
return None
|
fee_to_quote_rate = self._config['exchange'].get('unknown_fee_rate', None)
|
||||||
|
if not fee_to_quote_rate:
|
||||||
|
return None
|
||||||
|
return round((order['fee']['cost'] * fee_to_quote_rate) / order['cost'], 8)
|
||||||
|
|
||||||
def extract_cost_curr_rate(self, order: Dict) -> Tuple[float, str, Optional[float]]:
|
def extract_cost_curr_rate(self, order: Dict) -> Tuple[float, str, Optional[float]]:
|
||||||
"""
|
"""
|
||||||
@ -1327,7 +1336,7 @@ class Exchange:
|
|||||||
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||||
:return: List with candle (OHLCV) data
|
:return: List with candle (OHLCV) data
|
||||||
"""
|
"""
|
||||||
pair, _, _, data = asyncio.get_event_loop().run_until_complete(
|
pair, _, _, data = self.loop.run_until_complete(
|
||||||
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
self._async_get_historic_ohlcv(pair=pair, timeframe=timeframe,
|
||||||
since_ms=since_ms, is_new_pair=is_new_pair,
|
since_ms=since_ms, is_new_pair=is_new_pair,
|
||||||
candle_type=candle_type))
|
candle_type=candle_type))
|
||||||
@ -1436,8 +1445,10 @@ class Exchange:
|
|||||||
results_df = {}
|
results_df = {}
|
||||||
# Chunk requests into batches of 100 to avoid overwelming ccxt Throttling
|
# Chunk requests into batches of 100 to avoid overwelming ccxt Throttling
|
||||||
for input_coro in chunks(input_coroutines, 100):
|
for input_coro in chunks(input_coroutines, 100):
|
||||||
results = asyncio.get_event_loop().run_until_complete(
|
async def gather_stuff():
|
||||||
asyncio.gather(*input_coro, return_exceptions=True))
|
return await asyncio.gather(*input_coro, return_exceptions=True)
|
||||||
|
|
||||||
|
results = self.loop.run_until_complete(gather_stuff())
|
||||||
|
|
||||||
for res in results:
|
for res in results:
|
||||||
if isinstance(res, Exception):
|
if isinstance(res, Exception):
|
||||||
@ -1692,7 +1703,7 @@ class Exchange:
|
|||||||
if not self.exchange_has("fetchTrades"):
|
if not self.exchange_has("fetchTrades"):
|
||||||
raise OperationalException("This exchange does not support downloading Trades.")
|
raise OperationalException("This exchange does not support downloading Trades.")
|
||||||
|
|
||||||
return asyncio.get_event_loop().run_until_complete(
|
return self.loop.run_until_complete(
|
||||||
self._async_get_trade_history(pair=pair, since=since,
|
self._async_get_trade_history(pair=pair, since=since,
|
||||||
until=until, from_id=from_id))
|
until=until, from_id=from_id))
|
||||||
|
|
||||||
|
@ -150,6 +150,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
|
|
||||||
self.rpc.cleanup()
|
self.rpc.cleanup()
|
||||||
cleanup_db()
|
cleanup_db()
|
||||||
|
self.exchange.close()
|
||||||
|
|
||||||
def startup(self) -> None:
|
def startup(self) -> None:
|
||||||
"""
|
"""
|
||||||
@ -841,7 +842,7 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
trades_closed += 1
|
trades_closed += 1
|
||||||
|
|
||||||
except DependencyException as exception:
|
except DependencyException as exception:
|
||||||
logger.warning('Unable to exit trade %s: %s', trade.pair, exception)
|
logger.warning(f'Unable to exit trade {trade.pair}: {exception}')
|
||||||
|
|
||||||
# Updating wallets if any trade occurred
|
# Updating wallets if any trade occurred
|
||||||
if trades_closed:
|
if trades_closed:
|
||||||
@ -1099,9 +1100,13 @@ class FreqtradeBot(LoggingMixin):
|
|||||||
if max_timeouts > 0 and canceled_count >= max_timeouts:
|
if max_timeouts > 0 and canceled_count >= max_timeouts:
|
||||||
logger.warning(f'Emergencyselling trade {trade}, as the sell order '
|
logger.warning(f'Emergencyselling trade {trade}, as the sell order '
|
||||||
f'timed out {max_timeouts} times.')
|
f'timed out {max_timeouts} times.')
|
||||||
self.execute_trade_exit(
|
try:
|
||||||
trade, order.get('price'),
|
self.execute_trade_exit(
|
||||||
sell_reason=SellCheckTuple(sell_type=SellType.EMERGENCY_SELL))
|
trade, order.get('price'),
|
||||||
|
sell_reason=SellCheckTuple(sell_type=SellType.EMERGENCY_SELL))
|
||||||
|
except DependencyException as exception:
|
||||||
|
logger.warning(
|
||||||
|
f'Unable to emergency sell trade {trade.pair}: {exception}')
|
||||||
|
|
||||||
def cancel_all_open_orders(self) -> None:
|
def cancel_all_open_orders(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -258,6 +258,9 @@ class Backtesting:
|
|||||||
Helper function to convert a processed dataframes into lists for performance reasons.
|
Helper function to convert a processed dataframes into lists for performance reasons.
|
||||||
|
|
||||||
Used by backtest() - so keep this optimized for performance.
|
Used by backtest() - so keep this optimized for performance.
|
||||||
|
|
||||||
|
:param processed: a processed dictionary with format {pair, data}, which gets cleared to
|
||||||
|
optimize memory usage!
|
||||||
"""
|
"""
|
||||||
# Every change to this headers list must evaluate further usages of the resulting tuple
|
# Every change to this headers list must evaluate further usages of the resulting tuple
|
||||||
# and eventually change the constants for indexes at the top
|
# and eventually change the constants for indexes at the top
|
||||||
@ -267,7 +270,8 @@ class Backtesting:
|
|||||||
self.progress.init_step(BacktestState.CONVERT, len(processed))
|
self.progress.init_step(BacktestState.CONVERT, len(processed))
|
||||||
|
|
||||||
# Create dict with data
|
# Create dict with data
|
||||||
for pair, pair_data in processed.items():
|
for pair in processed.keys():
|
||||||
|
pair_data = processed[pair]
|
||||||
self.check_abort()
|
self.check_abort()
|
||||||
self.progress.increment()
|
self.progress.increment()
|
||||||
|
|
||||||
@ -299,6 +303,9 @@ class Backtesting:
|
|||||||
# Convert from Pandas to list for performance reasons
|
# Convert from Pandas to list for performance reasons
|
||||||
# (Looping Pandas is slow.)
|
# (Looping Pandas is slow.)
|
||||||
data[pair] = df_analyzed[headers].values.tolist()
|
data[pair] = df_analyzed[headers].values.tolist()
|
||||||
|
|
||||||
|
# Do not hold on to old data to reduce memory usage
|
||||||
|
processed[pair] = pair_data = None
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _get_close_rate(self, sell_row: Tuple, trade: LocalTrade, sell: SellCheckTuple,
|
def _get_close_rate(self, sell_row: Tuple, trade: LocalTrade, sell: SellCheckTuple,
|
||||||
@ -577,7 +584,8 @@ class Backtesting:
|
|||||||
Of course try to not have ugly code. By some accessor are sometime slower than functions.
|
Of course try to not have ugly code. By some accessor are sometime slower than functions.
|
||||||
Avoid extensive logging in this method and functions it calls.
|
Avoid extensive logging in this method and functions it calls.
|
||||||
|
|
||||||
:param processed: a processed dictionary with format {pair, data}
|
:param processed: a processed dictionary with format {pair, data}, which gets cleared to
|
||||||
|
optimize memory usage!
|
||||||
:param start_date: backtesting timerange start datetime
|
:param start_date: backtesting timerange start datetime
|
||||||
:param end_date: backtesting timerange end datetime
|
:param end_date: backtesting timerange end datetime
|
||||||
:param max_open_trades: maximum number of concurrent trades, <= 0 means unlimited
|
:param max_open_trades: maximum number of concurrent trades, <= 0 means unlimited
|
||||||
|
@ -422,6 +422,7 @@ class Hyperopt:
|
|||||||
self.backtesting.exchange.close()
|
self.backtesting.exchange.close()
|
||||||
self.backtesting.exchange._api = None # type: ignore
|
self.backtesting.exchange._api = None # type: ignore
|
||||||
self.backtesting.exchange._api_async = None # type: ignore
|
self.backtesting.exchange._api_async = None # type: ignore
|
||||||
|
self.backtesting.exchange.loop = None # type: ignore
|
||||||
# self.backtesting.exchange = None # type: ignore
|
# self.backtesting.exchange = None # type: ignore
|
||||||
self.backtesting.pairlists = None # type: ignore
|
self.backtesting.pairlists = None # type: ignore
|
||||||
|
|
||||||
|
@ -461,7 +461,12 @@ def generate_candlestick_graph(pair: str, data: pd.DataFrame, trades: pd.DataFra
|
|||||||
def generate_profit_graph(pairs: str, data: Dict[str, pd.DataFrame],
|
def generate_profit_graph(pairs: str, data: Dict[str, pd.DataFrame],
|
||||||
trades: pd.DataFrame, timeframe: str, stake_currency: str) -> go.Figure:
|
trades: pd.DataFrame, timeframe: str, stake_currency: str) -> go.Figure:
|
||||||
# Combine close-values for all pairs, rename columns to "pair"
|
# Combine close-values for all pairs, rename columns to "pair"
|
||||||
df_comb = combine_dataframes_with_mean(data, "close")
|
try:
|
||||||
|
df_comb = combine_dataframes_with_mean(data, "close")
|
||||||
|
except ValueError:
|
||||||
|
raise OperationalException(
|
||||||
|
"No data found. Please make sure that data is available for "
|
||||||
|
"the timerange and pairs selected.")
|
||||||
|
|
||||||
# Trim trades to available OHLCV data
|
# Trim trades to available OHLCV data
|
||||||
trades = extract_trades_of_period(df_comb, trades, date_index=True)
|
trades = extract_trades_of_period(df_comb, trades, date_index=True)
|
||||||
|
@ -68,14 +68,14 @@ class PerformanceFilter(IPairList):
|
|||||||
# - then pair name alphametically
|
# - then pair name alphametically
|
||||||
sorted_df = list_df.merge(performance, on='pair', how='left')\
|
sorted_df = list_df.merge(performance, on='pair', how='left')\
|
||||||
.fillna(0).sort_values(by=['count', 'pair'], ascending=True)\
|
.fillna(0).sort_values(by=['count', 'pair'], ascending=True)\
|
||||||
.sort_values(by=['profit'], ascending=False)
|
.sort_values(by=['profit_ratio'], ascending=False)
|
||||||
if self._min_profit is not None:
|
if self._min_profit is not None:
|
||||||
removed = sorted_df[sorted_df['profit'] < self._min_profit]
|
removed = sorted_df[sorted_df['profit_ratio'] < self._min_profit]
|
||||||
for _, row in removed.iterrows():
|
for _, row in removed.iterrows():
|
||||||
self.log_once(
|
self.log_once(
|
||||||
f"Removing pair {row['pair']} since {row['profit']} is "
|
f"Removing pair {row['pair']} since {row['profit_ratio']} is "
|
||||||
f"below {self._min_profit}", logger.info)
|
f"below {self._min_profit}", logger.info)
|
||||||
sorted_df = sorted_df[sorted_df['profit'] >= self._min_profit]
|
sorted_df = sorted_df[sorted_df['profit_ratio'] >= self._min_profit]
|
||||||
|
|
||||||
pairlist = sorted_df['pair'].tolist()
|
pairlist = sorted_df['pair'].tolist()
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ from typing import Any, Dict, List, Optional
|
|||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from cachetools.ttl import TTLCache
|
from cachetools import TTLCache
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from freqtrade.constants import ListPairsWithTimeframes
|
from freqtrade.constants import ListPairsWithTimeframes
|
||||||
|
@ -8,7 +8,7 @@ from functools import partial
|
|||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
from cachetools.ttl import TTLCache
|
from cachetools import TTLCache
|
||||||
|
|
||||||
from freqtrade.constants import ListPairsWithTimeframes
|
from freqtrade.constants import ListPairsWithTimeframes
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
|
@ -6,7 +6,7 @@ from copy import deepcopy
|
|||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
from cachetools.ttl import TTLCache
|
from cachetools import TTLCache
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from freqtrade.constants import ListPairsWithTimeframes
|
from freqtrade.constants import ListPairsWithTimeframes
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
PairList manager class
|
PairList manager class
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from copy import deepcopy
|
from functools import partial
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
|
|
||||||
from cachetools import TTLCache, cached
|
from cachetools import TTLCache, cached
|
||||||
@ -10,6 +10,7 @@ from cachetools import TTLCache, cached
|
|||||||
from freqtrade.constants import ListPairsWithTimeframes
|
from freqtrade.constants import ListPairsWithTimeframes
|
||||||
from freqtrade.enums import CandleType
|
from freqtrade.enums import CandleType
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
|
from freqtrade.mixins import LoggingMixin
|
||||||
from freqtrade.plugins.pairlist.IPairList import IPairList
|
from freqtrade.plugins.pairlist.IPairList import IPairList
|
||||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||||
from freqtrade.resolvers import PairListResolver
|
from freqtrade.resolvers import PairListResolver
|
||||||
@ -18,7 +19,7 @@ from freqtrade.resolvers import PairListResolver
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PairListManager():
|
class PairListManager(LoggingMixin):
|
||||||
|
|
||||||
def __init__(self, exchange, config: dict) -> None:
|
def __init__(self, exchange, config: dict) -> None:
|
||||||
self._exchange = exchange
|
self._exchange = exchange
|
||||||
@ -42,6 +43,9 @@ class PairListManager():
|
|||||||
if not self._pairlist_handlers:
|
if not self._pairlist_handlers:
|
||||||
raise OperationalException("No Pairlist Handlers defined")
|
raise OperationalException("No Pairlist Handlers defined")
|
||||||
|
|
||||||
|
refresh_period = config.get('pairlist_refresh_period', 3600)
|
||||||
|
LoggingMixin.__init__(self, logger, refresh_period)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def whitelist(self) -> List[str]:
|
def whitelist(self) -> List[str]:
|
||||||
"""The current whitelist"""
|
"""The current whitelist"""
|
||||||
@ -109,9 +113,10 @@ class PairListManager():
|
|||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
logger.error(f"Pair blacklist contains an invalid Wildcard: {err}")
|
logger.error(f"Pair blacklist contains an invalid Wildcard: {err}")
|
||||||
return []
|
return []
|
||||||
for pair in deepcopy(pairlist):
|
log_once = partial(self.log_once, logmethod=logmethod)
|
||||||
|
for pair in pairlist.copy():
|
||||||
if pair in blacklist:
|
if pair in blacklist:
|
||||||
logmethod(f"Pair {pair} in your blacklist. Removing it from whitelist...")
|
log_once(f"Pair {pair} in your blacklist. Removing it from whitelist...")
|
||||||
pairlist.remove(pair)
|
pairlist.remove(pair)
|
||||||
return pairlist
|
return pairlist
|
||||||
|
|
||||||
|
@ -33,6 +33,9 @@ async def api_start_backtest(bt_settings: BacktestRequest, background_tasks: Bac
|
|||||||
if settings[setting] is not None:
|
if settings[setting] is not None:
|
||||||
btconfig[setting] = settings[setting]
|
btconfig[setting] = settings[setting]
|
||||||
|
|
||||||
|
# Force dry-run for backtesting
|
||||||
|
btconfig['dry_run'] = True
|
||||||
|
|
||||||
# Start backtesting
|
# Start backtesting
|
||||||
# Initialize backtesting object
|
# Initialize backtesting object
|
||||||
def run_backtest():
|
def run_backtest():
|
||||||
|
@ -162,7 +162,7 @@ class ShowConfig(BaseModel):
|
|||||||
trailing_stop_positive_offset: Optional[float]
|
trailing_stop_positive_offset: Optional[float]
|
||||||
trailing_only_offset_is_reached: Optional[bool]
|
trailing_only_offset_is_reached: Optional[bool]
|
||||||
unfilledtimeout: UnfilledTimeout
|
unfilledtimeout: UnfilledTimeout
|
||||||
order_types: OrderTypes
|
order_types: Optional[OrderTypes]
|
||||||
use_custom_stoploss: Optional[bool]
|
use_custom_stoploss: Optional[bool]
|
||||||
timeframe: Optional[str]
|
timeframe: Optional[str]
|
||||||
timeframe_ms: int
|
timeframe_ms: int
|
||||||
|
@ -3,7 +3,7 @@ from copy import deepcopy
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends, Query
|
||||||
from fastapi.exceptions import HTTPException
|
from fastapi.exceptions import HTTPException
|
||||||
|
|
||||||
from freqtrade import __version__
|
from freqtrade import __version__
|
||||||
@ -31,7 +31,8 @@ logger = logging.getLogger(__name__)
|
|||||||
# Pre-1.1, no version was provided
|
# Pre-1.1, no version was provided
|
||||||
# Version increments should happen in "small" steps (1.1, 1.12, ...) unless big changes happen.
|
# Version increments should happen in "small" steps (1.1, 1.12, ...) unless big changes happen.
|
||||||
# 1.11: forcebuy and forcesell accept ordertype
|
# 1.11: forcebuy and forcesell accept ordertype
|
||||||
API_VERSION = 1.11
|
# 1.12: add blacklist delete endpoint
|
||||||
|
API_VERSION = 1.12
|
||||||
|
|
||||||
# Public API, requires no auth.
|
# Public API, requires no auth.
|
||||||
router_public = APIRouter()
|
router_public = APIRouter()
|
||||||
@ -158,6 +159,13 @@ def blacklist_post(payload: BlacklistPayload, rpc: RPC = Depends(get_rpc)):
|
|||||||
return rpc._rpc_blacklist(payload.blacklist)
|
return rpc._rpc_blacklist(payload.blacklist)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete('/blacklist', response_model=BlacklistResponse, tags=['info', 'pairlist'])
|
||||||
|
def blacklist_delete(pairs_to_delete: List[str] = Query([]), rpc: RPC = Depends(get_rpc)):
|
||||||
|
"""Provide a list of pairs to delete from the blacklist"""
|
||||||
|
|
||||||
|
return rpc._rpc_blacklist_delete(pairs_to_delete)
|
||||||
|
|
||||||
|
|
||||||
@router.get('/whitelist', response_model=WhitelistResponse, tags=['info', 'pairlist'])
|
@router.get('/whitelist', response_model=WhitelistResponse, tags=['info', 'pairlist'])
|
||||||
def whitelist(rpc: RPC = Depends(get_rpc)):
|
def whitelist(rpc: RPC = Depends(get_rpc)):
|
||||||
return rpc._rpc_whitelist()
|
return rpc._rpc_whitelist()
|
||||||
|
@ -47,7 +47,7 @@ class UvicornServer(uvicorn.Server):
|
|||||||
else:
|
else:
|
||||||
asyncio.set_event_loop(uvloop.new_event_loop())
|
asyncio.set_event_loop(uvloop.new_event_loop())
|
||||||
try:
|
try:
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_running_loop()
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
# When running in a thread, we'll not have an eventloop yet.
|
# When running in a thread, we'll not have an eventloop yet.
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
|
@ -7,7 +7,7 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
|
|
||||||
from cachetools.ttl import TTLCache
|
from cachetools import TTLCache
|
||||||
from pycoingecko import CoinGeckoAPI
|
from pycoingecko import CoinGeckoAPI
|
||||||
from requests.exceptions import RequestException
|
from requests.exceptions import RequestException
|
||||||
|
|
||||||
|
@ -863,6 +863,20 @@ class RPC:
|
|||||||
}
|
}
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
def _rpc_blacklist_delete(self, delete: List[str]) -> Dict:
|
||||||
|
""" Removes pairs from currently active blacklist """
|
||||||
|
errors = {}
|
||||||
|
for pair in delete:
|
||||||
|
if pair in self._freqtrade.pairlists.blacklist:
|
||||||
|
self._freqtrade.pairlists.blacklist.remove(pair)
|
||||||
|
else:
|
||||||
|
errors[pair] = {
|
||||||
|
'error_msg': f"Pair {pair} is not in the current blacklist."
|
||||||
|
}
|
||||||
|
resp = self._rpc_blacklist()
|
||||||
|
resp['errors'] = errors
|
||||||
|
return resp
|
||||||
|
|
||||||
def _rpc_blacklist(self, add: List[str] = None) -> Dict:
|
def _rpc_blacklist(self, add: List[str] = None) -> Dict:
|
||||||
""" Returns the currently active blacklist"""
|
""" Returns the currently active blacklist"""
|
||||||
errors = {}
|
errors = {}
|
||||||
|
@ -60,6 +60,10 @@ class RPCManager:
|
|||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
logger.info('Sending rpc message: %s', msg)
|
logger.info('Sending rpc message: %s', msg)
|
||||||
|
if 'pair' in msg:
|
||||||
|
msg.update({
|
||||||
|
'base_currency': self._rpc._freqtrade.exchange.get_pair_base_currency(msg['pair'])
|
||||||
|
})
|
||||||
for mod in self.registered_modules:
|
for mod in self.registered_modules:
|
||||||
logger.debug('Forwarding message to rpc.%s', mod.name)
|
logger.debug('Forwarding message to rpc.%s', mod.name)
|
||||||
try:
|
try:
|
||||||
|
@ -111,9 +111,9 @@ class Telegram(RPCHandler):
|
|||||||
r'/daily$', r'/daily \d+$', r'/profit$', r'/profit \d+',
|
r'/daily$', r'/daily \d+$', r'/profit$', r'/profit \d+',
|
||||||
r'/stats$', r'/count$', r'/locks$', r'/balance$',
|
r'/stats$', r'/count$', r'/locks$', r'/balance$',
|
||||||
r'/stopbuy$', r'/reload_config$', r'/show_config$',
|
r'/stopbuy$', r'/reload_config$', r'/show_config$',
|
||||||
r'/logs$', r'/whitelist$', r'/blacklist$', r'/edge$',
|
r'/logs$', r'/whitelist$', r'/blacklist$', r'/bl_delete$',
|
||||||
r'/weekly$', r'/weekly \d+$', r'/monthly$', r'/monthly \d+$',
|
r'/weekly$', r'/weekly \d+$', r'/monthly$', r'/monthly \d+$',
|
||||||
r'/forcebuy$', r'/help$', r'/version$']
|
r'/forcebuy$', r'/edge$', r'/help$', r'/version$']
|
||||||
# Create keys for generation
|
# Create keys for generation
|
||||||
valid_keys_print = [k.replace('$', '') for k in valid_keys]
|
valid_keys_print = [k.replace('$', '') for k in valid_keys]
|
||||||
|
|
||||||
@ -170,6 +170,7 @@ class Telegram(RPCHandler):
|
|||||||
CommandHandler('stopbuy', self._stopbuy),
|
CommandHandler('stopbuy', self._stopbuy),
|
||||||
CommandHandler('whitelist', self._whitelist),
|
CommandHandler('whitelist', self._whitelist),
|
||||||
CommandHandler('blacklist', self._blacklist),
|
CommandHandler('blacklist', self._blacklist),
|
||||||
|
CommandHandler(['blacklist_delete', 'bl_delete'], self._blacklist_delete),
|
||||||
CommandHandler('logs', self._logs),
|
CommandHandler('logs', self._logs),
|
||||||
CommandHandler('edge', self._edge),
|
CommandHandler('edge', self._edge),
|
||||||
CommandHandler('help', self._help),
|
CommandHandler('help', self._help),
|
||||||
@ -199,8 +200,8 @@ class Telegram(RPCHandler):
|
|||||||
|
|
||||||
self._updater.start_polling(
|
self._updater.start_polling(
|
||||||
bootstrap_retries=-1,
|
bootstrap_retries=-1,
|
||||||
timeout=30,
|
timeout=20,
|
||||||
read_latency=60,
|
read_latency=60, # Assumed transmission latency
|
||||||
drop_pending_updates=True,
|
drop_pending_updates=True,
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
@ -213,6 +214,7 @@ class Telegram(RPCHandler):
|
|||||||
Stops all running telegram threads.
|
Stops all running telegram threads.
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
|
# This can take up to `timeout` from the call to `start_polling`.
|
||||||
self._updater.stop()
|
self._updater.stop()
|
||||||
|
|
||||||
def _format_buy_msg(self, msg: Dict[str, Any]) -> str:
|
def _format_buy_msg(self, msg: Dict[str, Any]) -> str:
|
||||||
@ -1178,22 +1180,28 @@ class Telegram(RPCHandler):
|
|||||||
Handler for /blacklist
|
Handler for /blacklist
|
||||||
Shows the currently active blacklist
|
Shows the currently active blacklist
|
||||||
"""
|
"""
|
||||||
try:
|
self.send_blacklist_msg(self._rpc._rpc_blacklist(context.args))
|
||||||
|
|
||||||
blacklist = self._rpc._rpc_blacklist(context.args)
|
def send_blacklist_msg(self, blacklist: Dict):
|
||||||
errmsgs = []
|
errmsgs = []
|
||||||
for pair, error in blacklist['errors'].items():
|
for pair, error in blacklist['errors'].items():
|
||||||
errmsgs.append(f"Error adding `{pair}` to blacklist: `{error['error_msg']}`")
|
errmsgs.append(f"Error adding `{pair}` to blacklist: `{error['error_msg']}`")
|
||||||
if errmsgs:
|
if errmsgs:
|
||||||
self._send_msg('\n'.join(errmsgs))
|
self._send_msg('\n'.join(errmsgs))
|
||||||
|
|
||||||
message = f"Blacklist contains {blacklist['length']} pairs\n"
|
message = f"Blacklist contains {blacklist['length']} pairs\n"
|
||||||
message += f"`{', '.join(blacklist['blacklist'])}`"
|
message += f"`{', '.join(blacklist['blacklist'])}`"
|
||||||
|
|
||||||
logger.debug(message)
|
logger.debug(message)
|
||||||
self._send_msg(message)
|
self._send_msg(message)
|
||||||
except RPCException as e:
|
|
||||||
self._send_msg(str(e))
|
@authorized_only
|
||||||
|
def _blacklist_delete(self, update: Update, context: CallbackContext) -> None:
|
||||||
|
"""
|
||||||
|
Handler for /bl_delete
|
||||||
|
Deletes pair(s) from current blacklist
|
||||||
|
"""
|
||||||
|
self.send_blacklist_msg(self._rpc._rpc_blacklist_delete(context.args or []))
|
||||||
|
|
||||||
@authorized_only
|
@authorized_only
|
||||||
def _logs(self, update: Update, context: CallbackContext) -> None:
|
def _logs(self, update: Update, context: CallbackContext) -> None:
|
||||||
@ -1274,6 +1282,8 @@ class Telegram(RPCHandler):
|
|||||||
"*/whitelist:* `Show current whitelist` \n"
|
"*/whitelist:* `Show current whitelist` \n"
|
||||||
"*/blacklist [pair]:* `Show current blacklist, or adds one or more pairs "
|
"*/blacklist [pair]:* `Show current blacklist, or adds one or more pairs "
|
||||||
"to the blacklist.` \n"
|
"to the blacklist.` \n"
|
||||||
|
"*/blacklist_delete [pairs]| /bl_delete [pairs]:* "
|
||||||
|
"`Delete pair / pattern from blacklist. Will reset on reload_conf.` \n"
|
||||||
"*/reload_config:* `Reload configuration file` \n"
|
"*/reload_config:* `Reload configuration file` \n"
|
||||||
"*/unlock <pair|id>:* `Unlock this Pair (or this lock id if it's numeric)`\n"
|
"*/unlock <pair|id>:* `Unlock this Pair (or this lock id if it's numeric)`\n"
|
||||||
|
|
||||||
|
@ -811,23 +811,20 @@ class IStrategy(ABC, HyperStrategyMixin):
|
|||||||
custom_reason = custom_reason[:CUSTOM_SELL_MAX_LENGTH]
|
custom_reason = custom_reason[:CUSTOM_SELL_MAX_LENGTH]
|
||||||
else:
|
else:
|
||||||
custom_reason = None
|
custom_reason = None
|
||||||
# TODO: return here if exit-signal should be favored over ROI
|
if sell_signal in (SellType.CUSTOM_SELL, SellType.SELL_SIGNAL):
|
||||||
|
logger.debug(f"{trade.pair} - Sell signal received. "
|
||||||
|
f"sell_type=SellType.{sell_signal.name}" +
|
||||||
|
(f", custom_reason={custom_reason}" if custom_reason else ""))
|
||||||
|
return SellCheckTuple(sell_type=sell_signal, sell_reason=custom_reason)
|
||||||
|
|
||||||
# Start evaluations
|
|
||||||
# Sequence:
|
# Sequence:
|
||||||
# ROI (if not stoploss)
|
|
||||||
# Exit-signal
|
# Exit-signal
|
||||||
|
# ROI (if not stoploss)
|
||||||
# Stoploss
|
# Stoploss
|
||||||
if roi_reached and stoplossflag.sell_type != SellType.STOP_LOSS:
|
if roi_reached and stoplossflag.sell_type != SellType.STOP_LOSS:
|
||||||
logger.debug(f"{trade.pair} - Required profit reached. sell_type=SellType.ROI")
|
logger.debug(f"{trade.pair} - Required profit reached. sell_type=SellType.ROI")
|
||||||
return SellCheckTuple(sell_type=SellType.ROI)
|
return SellCheckTuple(sell_type=SellType.ROI)
|
||||||
|
|
||||||
if sell_signal != SellType.NONE:
|
|
||||||
logger.debug(f"{trade.pair} - Sell signal received. "
|
|
||||||
f"sell_type=SellType.{sell_signal.name}" +
|
|
||||||
(f", custom_reason={custom_reason}" if custom_reason else ""))
|
|
||||||
return SellCheckTuple(sell_type=sell_signal, sell_reason=custom_reason)
|
|
||||||
|
|
||||||
if stoplossflag.sell_flag:
|
if stoplossflag.sell_flag:
|
||||||
|
|
||||||
logger.debug(f"{trade.pair} - Stoploss hit. sell_type={stoplossflag.sell_type}")
|
logger.debug(f"{trade.pair} - Stoploss hit. sell_type={stoplossflag.sell_type}")
|
||||||
|
@ -260,8 +260,8 @@ class Wallets:
|
|||||||
if self._log:
|
if self._log:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Adjusted stake amount for pair {pair} is more than 30% bigger than "
|
f"Adjusted stake amount for pair {pair} is more than 30% bigger than "
|
||||||
f"the desired stake ({stake_amount} * 1.3 > {max_stake_amount}), "
|
f"the desired stake amount of ({stake_amount:.8f} * 1.3 = "
|
||||||
f"ignoring trade."
|
f"{stake_amount * 1.3:.8f}) < {min_stake_amount}), ignoring trade."
|
||||||
)
|
)
|
||||||
return 0
|
return 0
|
||||||
stake_amount = min_stake_amount
|
stake_amount = min_stake_amount
|
||||||
|
@ -85,9 +85,12 @@ class Worker:
|
|||||||
|
|
||||||
# Log state transition
|
# Log state transition
|
||||||
if state != old_state:
|
if state != old_state:
|
||||||
self.freqtrade.notify_status(f'{state.name.lower()}')
|
|
||||||
|
|
||||||
logger.info(f"Changing state to: {state.name}")
|
if old_state != State.RELOAD_CONFIG:
|
||||||
|
self.freqtrade.notify_status(f'{state.name.lower()}')
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Changing state{f' from {old_state.name}' if old_state else ''} to: {state.name}")
|
||||||
if state == State.RUNNING:
|
if state == State.RUNNING:
|
||||||
self.freqtrade.startup()
|
self.freqtrade.startup()
|
||||||
|
|
||||||
|
@ -23,6 +23,7 @@ exclude = '''
|
|||||||
line_length = 100
|
line_length = 100
|
||||||
multi_line_output=0
|
multi_line_output=0
|
||||||
lines_after_imports=2
|
lines_after_imports=2
|
||||||
|
skip_glob = ["**/.env*", "**/env/*", "**/.venv/*", "**/docs/*"]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["setuptools >= 46.4.0", "wheel"]
|
requires = ["setuptools >= 46.4.0", "wheel"]
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
coveralls==3.3.1
|
coveralls==3.3.1
|
||||||
flake8==4.0.1
|
flake8==4.0.1
|
||||||
flake8-tidy-imports==4.5.0
|
flake8-tidy-imports==4.5.0
|
||||||
mypy==0.910
|
mypy==0.930
|
||||||
pytest==6.2.5
|
pytest==6.2.5
|
||||||
pytest-asyncio==0.16.0
|
pytest-asyncio==0.16.0
|
||||||
pytest-cov==3.0.0
|
pytest-cov==3.0.0
|
||||||
@ -14,7 +14,7 @@ pytest-mock==3.6.1
|
|||||||
pytest-random-order==1.0.4
|
pytest-random-order==1.0.4
|
||||||
isort==5.10.1
|
isort==5.10.1
|
||||||
# For datetime mocking
|
# For datetime mocking
|
||||||
time-machine==2.4.1
|
time-machine==2.5.0
|
||||||
|
|
||||||
# Convert jupyter notebooks to markdown documents
|
# Convert jupyter notebooks to markdown documents
|
||||||
nbconvert==6.3.0
|
nbconvert==6.3.0
|
||||||
@ -22,8 +22,8 @@ nbconvert==6.3.0
|
|||||||
# mypy types
|
# mypy types
|
||||||
types-cachetools==4.2.6
|
types-cachetools==4.2.6
|
||||||
types-filelock==3.2.1
|
types-filelock==3.2.1
|
||||||
types-requests==2.26.1
|
types-requests==2.26.2
|
||||||
types-tabulate==0.8.3
|
types-tabulate==0.8.3
|
||||||
|
|
||||||
# Extensions to datetime library
|
# Extensions to datetime library
|
||||||
types-python-dateutil==2.8.3
|
types-python-dateutil==2.8.4
|
@ -3,9 +3,9 @@
|
|||||||
|
|
||||||
# Required for hyperopt
|
# Required for hyperopt
|
||||||
scipy==1.7.3
|
scipy==1.7.3
|
||||||
scikit-learn==1.0.1
|
scikit-learn==1.0.2
|
||||||
scikit-optimize==0.9.0
|
scikit-optimize==0.9.0
|
||||||
filelock==3.4.0
|
filelock==3.4.2
|
||||||
joblib==1.1.0
|
joblib==1.1.0
|
||||||
psutil==5.8.0
|
psutil==5.8.0
|
||||||
progressbar2==3.55.0
|
progressbar2==3.55.0
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# Include all requirements to run the bot.
|
# Include all requirements to run the bot.
|
||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
|
||||||
plotly==5.4.0
|
plotly==5.5.0
|
||||||
|
|
||||||
|
@ -1,24 +1,25 @@
|
|||||||
numpy==1.21.4
|
numpy==1.21.5; python_version <= '3.7'
|
||||||
pandas==1.3.4
|
numpy==1.22.0; python_version > '3.7'
|
||||||
|
pandas==1.3.5
|
||||||
pandas-ta==0.3.14b
|
pandas-ta==0.3.14b
|
||||||
|
|
||||||
ccxt==1.63.1
|
ccxt==1.65.25
|
||||||
# Pin cryptography for now due to rust build errors with piwheels
|
# Pin cryptography for now due to rust build errors with piwheels
|
||||||
cryptography==36.0.0
|
cryptography==36.0.1
|
||||||
aiohttp==3.8.1
|
aiohttp==3.8.1
|
||||||
SQLAlchemy==1.4.27
|
SQLAlchemy==1.4.29
|
||||||
python-telegram-bot==13.8.1
|
python-telegram-bot==13.9
|
||||||
arrow==1.2.1
|
arrow==1.2.1
|
||||||
cachetools==4.2.2
|
cachetools==4.2.2
|
||||||
requests==2.26.0
|
requests==2.26.0
|
||||||
urllib3==1.26.7
|
urllib3==1.26.7
|
||||||
jsonschema==4.2.1
|
jsonschema==4.3.2
|
||||||
TA-Lib==0.4.21
|
TA-Lib==0.4.22
|
||||||
technical==1.3.0
|
technical==1.3.0
|
||||||
tabulate==0.8.9
|
tabulate==0.8.9
|
||||||
pycoingecko==2.2.0
|
pycoingecko==2.2.0
|
||||||
jinja2==3.0.3
|
jinja2==3.0.3
|
||||||
tables==3.6.1
|
tables==3.7.0
|
||||||
blosc==1.10.6
|
blosc==1.10.6
|
||||||
|
|
||||||
# find first, C search in arrays
|
# find first, C search in arrays
|
||||||
@ -31,8 +32,8 @@ python-rapidjson==1.5
|
|||||||
sdnotify==0.3.2
|
sdnotify==0.3.2
|
||||||
|
|
||||||
# API Server
|
# API Server
|
||||||
fastapi==0.70.0
|
fastapi==0.70.1
|
||||||
uvicorn==0.15.0
|
uvicorn==0.16.0
|
||||||
pyjwt==2.3.0
|
pyjwt==2.3.0
|
||||||
aiofiles==0.8.0
|
aiofiles==0.8.0
|
||||||
psutil==5.8.0
|
psutil==5.8.0
|
||||||
@ -41,7 +42,7 @@ psutil==5.8.0
|
|||||||
colorama==0.4.4
|
colorama==0.4.4
|
||||||
# Building config files interactively
|
# Building config files interactively
|
||||||
questionary==1.10.0
|
questionary==1.10.0
|
||||||
prompt-toolkit==3.0.23
|
prompt-toolkit==3.0.24
|
||||||
# Extensions to datetime library
|
# Extensions to datetime library
|
||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ classifiers =
|
|||||||
Programming Language :: Python :: 3.7
|
Programming Language :: Python :: 3.7
|
||||||
Programming Language :: Python :: 3.8
|
Programming Language :: Python :: 3.8
|
||||||
Programming Language :: Python :: 3.9
|
Programming Language :: Python :: 3.9
|
||||||
|
Programming Language :: Python :: 3.10
|
||||||
Operating System :: MacOS
|
Operating System :: MacOS
|
||||||
Operating System :: Unix
|
Operating System :: Unix
|
||||||
Topic :: Office/Business :: Financial :: Investment
|
Topic :: Office/Business :: Financial :: Investment
|
||||||
|
8
setup.sh
8
setup.sh
@ -25,7 +25,7 @@ function check_installed_python() {
|
|||||||
exit 2
|
exit 2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
for v in 9 8 7
|
for v in 9 10 8 7
|
||||||
do
|
do
|
||||||
PYTHON="python3.${v}"
|
PYTHON="python3.${v}"
|
||||||
which $PYTHON
|
which $PYTHON
|
||||||
@ -36,7 +36,7 @@ function check_installed_python() {
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "No usable python found. Please make sure to have python3.7 or newer installed"
|
echo "No usable python found. Please make sure to have python3.7 or newer installed."
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -205,7 +205,7 @@ function config() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function install() {
|
function install() {
|
||||||
|
|
||||||
echo_block "Installing mandatory dependencies"
|
echo_block "Installing mandatory dependencies"
|
||||||
|
|
||||||
if [ "$(uname -s)" == "Darwin" ]; then
|
if [ "$(uname -s)" == "Darwin" ]; then
|
||||||
@ -219,7 +219,7 @@ function install() {
|
|||||||
install_redhat
|
install_redhat
|
||||||
else
|
else
|
||||||
echo "This script does not support your OS."
|
echo "This script does not support your OS."
|
||||||
echo "If you have Python version 3.7 - 3.9, pip, virtualenv, ta-lib you can continue."
|
echo "If you have Python version 3.7 - 3.10, pip, virtualenv, ta-lib you can continue."
|
||||||
echo "Wait 10 seconds to continue the next install steps or use ctrl+c to interrupt this shell."
|
echo "Wait 10 seconds to continue the next install steps or use ctrl+c to interrupt this shell."
|
||||||
sleep 10
|
sleep 10
|
||||||
fi
|
fi
|
||||||
|
@ -4,7 +4,6 @@ import logging
|
|||||||
import re
|
import re
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from functools import reduce
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional, Tuple
|
from typing import Optional, Tuple
|
||||||
from unittest.mock import MagicMock, Mock, PropertyMock
|
from unittest.mock import MagicMock, Mock, PropertyMock
|
||||||
@ -54,17 +53,23 @@ def pytest_configure(config):
|
|||||||
|
|
||||||
|
|
||||||
def log_has(line, logs):
|
def log_has(line, logs):
|
||||||
# caplog mocker returns log as a tuple: ('freqtrade.something', logging.WARNING, 'foobar')
|
"""Check if line is found on some caplog's message."""
|
||||||
# and we want to match line against foobar in the tuple
|
return any(line == message for message in logs.messages)
|
||||||
return reduce(lambda a, b: a or b,
|
|
||||||
filter(lambda x: x[2] == line, logs.record_tuples),
|
|
||||||
False)
|
|
||||||
|
|
||||||
|
|
||||||
def log_has_re(line, logs):
|
def log_has_re(line, logs):
|
||||||
return reduce(lambda a, b: a or b,
|
"""Check if line matches some caplog's message."""
|
||||||
filter(lambda x: re.match(line, x[2]), logs.record_tuples),
|
return any(re.match(line, message) for message in logs.messages)
|
||||||
False)
|
|
||||||
|
|
||||||
|
def num_log_has(line, logs):
|
||||||
|
"""Check how many times line is found in caplog's messages."""
|
||||||
|
return sum(line == message for message in logs.messages)
|
||||||
|
|
||||||
|
|
||||||
|
def num_log_has_re(line, logs):
|
||||||
|
"""Check how many times line matches caplog's messages."""
|
||||||
|
return sum(bool(re.match(line, message)) for message in logs.messages)
|
||||||
|
|
||||||
|
|
||||||
def get_args(args):
|
def get_args(args):
|
||||||
|
@ -235,6 +235,13 @@ def test_combine_dataframes_with_mean(testdatadir):
|
|||||||
assert "mean" in df.columns
|
assert "mean" in df.columns
|
||||||
|
|
||||||
|
|
||||||
|
def test_combine_dataframes_with_mean_no_data(testdatadir):
|
||||||
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
||||||
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='6m')
|
||||||
|
with pytest.raises(ValueError, match=r"No objects to concatenate"):
|
||||||
|
combine_dataframes_with_mean(data)
|
||||||
|
|
||||||
|
|
||||||
def test_create_cum_profit(testdatadir):
|
def test_create_cum_profit(testdatadir):
|
||||||
filename = testdatadir / "backtest-result_test.json"
|
filename = testdatadir / "backtest-result_test.json"
|
||||||
bt_data = load_backtest_data(filename)
|
bt_data = load_backtest_data(filename)
|
||||||
|
@ -356,7 +356,7 @@ def test_load_partial_missing(testdatadir, caplog) -> None:
|
|||||||
assert td != len(data['UNITTEST/BTC'])
|
assert td != len(data['UNITTEST/BTC'])
|
||||||
start_real = data['UNITTEST/BTC'].iloc[0, 0]
|
start_real = data['UNITTEST/BTC'].iloc[0, 0]
|
||||||
assert log_has(f'Missing data at start for pair '
|
assert log_has(f'Missing data at start for pair '
|
||||||
f'UNITTEST/BTC, data starts at {start_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
f'UNITTEST/BTC at 5m, data starts at {start_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
||||||
caplog)
|
caplog)
|
||||||
# Make sure we start fresh - test missing data at end
|
# Make sure we start fresh - test missing data at end
|
||||||
caplog.clear()
|
caplog.clear()
|
||||||
@ -371,7 +371,7 @@ def test_load_partial_missing(testdatadir, caplog) -> None:
|
|||||||
# Shift endtime with +5 - as last candle is dropped (partial candle)
|
# Shift endtime with +5 - as last candle is dropped (partial candle)
|
||||||
end_real = arrow.get(data['UNITTEST/BTC'].iloc[-1, 0]).shift(minutes=5)
|
end_real = arrow.get(data['UNITTEST/BTC'].iloc[-1, 0]).shift(minutes=5)
|
||||||
assert log_has(f'Missing data at end for pair '
|
assert log_has(f'Missing data at end for pair '
|
||||||
f'UNITTEST/BTC, data ends at {end_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
f'UNITTEST/BTC at 5m, data ends at {end_real.strftime("%Y-%m-%d %H:%M:%S")}',
|
||||||
caplog)
|
caplog)
|
||||||
|
|
||||||
|
|
||||||
|
47
tests/exchange/test_bitpanda.py
Normal file
47
tests/exchange/test_bitpanda.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
from tests.conftest import get_patched_exchange
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_trades_for_order(default_conf, mocker):
|
||||||
|
exchange_name = 'bitpanda'
|
||||||
|
order_id = 'ABCD-ABCD'
|
||||||
|
since = datetime(2018, 5, 5, 0, 0, 0)
|
||||||
|
default_conf["dry_run"] = False
|
||||||
|
mocker.patch('freqtrade.exchange.Exchange.exchange_has', return_value=True)
|
||||||
|
api_mock = MagicMock()
|
||||||
|
|
||||||
|
api_mock.fetch_my_trades = MagicMock(return_value=[{'id': 'TTR67E-3PFBD-76IISV',
|
||||||
|
'order': 'ABCD-ABCD',
|
||||||
|
'info': {'pair': 'XLTCZBTC',
|
||||||
|
'time': 1519860024.4388,
|
||||||
|
'type': 'buy',
|
||||||
|
'ordertype': 'limit',
|
||||||
|
'price': '20.00000',
|
||||||
|
'cost': '38.62000',
|
||||||
|
'fee': '0.06179',
|
||||||
|
'vol': '5',
|
||||||
|
'id': 'ABCD-ABCD'},
|
||||||
|
'timestamp': 1519860024438,
|
||||||
|
'datetime': '2018-02-28T23:20:24.438Z',
|
||||||
|
'symbol': 'LTC/BTC',
|
||||||
|
'type': 'limit',
|
||||||
|
'side': 'buy',
|
||||||
|
'price': 165.0,
|
||||||
|
'amount': 0.2340606,
|
||||||
|
'fee': {'cost': 0.06179, 'currency': 'BTC'}
|
||||||
|
}])
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||||
|
|
||||||
|
orders = exchange.get_trades_for_order(order_id, 'LTC/BTC', since)
|
||||||
|
assert len(orders) == 1
|
||||||
|
assert orders[0]['price'] == 165
|
||||||
|
assert api_mock.fetch_my_trades.call_count == 1
|
||||||
|
# since argument should be
|
||||||
|
assert isinstance(api_mock.fetch_my_trades.call_args[0][1], int)
|
||||||
|
assert api_mock.fetch_my_trades.call_args[0][0] == 'LTC/BTC'
|
||||||
|
# Same test twice, hardcoded number and doing the same calculation
|
||||||
|
assert api_mock.fetch_my_trades.call_args[0][1] == 1525478395000
|
||||||
|
# bitpanda requires "to" argument.
|
||||||
|
assert 'to' in api_mock.fetch_my_trades.call_args[1]['params']
|
@ -21,7 +21,7 @@ from freqtrade.exchange.exchange import (market_is_active, timeframe_to_minutes,
|
|||||||
timeframe_to_next_date, timeframe_to_prev_date,
|
timeframe_to_next_date, timeframe_to_prev_date,
|
||||||
timeframe_to_seconds)
|
timeframe_to_seconds)
|
||||||
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
|
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
|
||||||
from tests.conftest import get_mock_coro, get_patched_exchange, log_has, log_has_re
|
from tests.conftest import get_mock_coro, get_patched_exchange, log_has, log_has_re, num_log_has_re
|
||||||
|
|
||||||
|
|
||||||
# Make sure to always keep one exchange here which is NOT subclassed!!
|
# Make sure to always keep one exchange here which is NOT subclassed!!
|
||||||
@ -1824,6 +1824,44 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
|
|||||||
(arrow.utcnow().int_timestamp - 2000) * 1000)
|
(arrow.utcnow().int_timestamp - 2000) * 1000)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test__async_kucoin_get_candle_history(default_conf, mocker, caplog):
|
||||||
|
caplog.set_level(logging.INFO)
|
||||||
|
api_mock = MagicMock()
|
||||||
|
api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.DDoSProtection(
|
||||||
|
"kucoin GET https://openapi-v2.kucoin.com/api/v1/market/candles?"
|
||||||
|
"symbol=ETH-BTC&type=5min&startAt=1640268735&endAt=1640418735"
|
||||||
|
"429 Too Many Requests" '{"code":"429000","msg":"Too Many Requests"}'))
|
||||||
|
exchange = get_patched_exchange(mocker, default_conf, api_mock, id="kucoin")
|
||||||
|
|
||||||
|
msg = "Kucoin 429 error, avoid triggering DDosProtection backoff delay"
|
||||||
|
assert not num_log_has_re(msg, caplog)
|
||||||
|
|
||||||
|
for _ in range(3):
|
||||||
|
with pytest.raises(DDosProtection, match=r'429 Too Many Requests'):
|
||||||
|
await exchange._async_get_candle_history(
|
||||||
|
"ETH/BTC", "5m", (arrow.utcnow().int_timestamp - 2000) * 1000, count=3)
|
||||||
|
assert num_log_has_re(msg, caplog) == 3
|
||||||
|
|
||||||
|
caplog.clear()
|
||||||
|
# Test regular non-kucoin message
|
||||||
|
api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.DDoSProtection(
|
||||||
|
"kucoin GET https://openapi-v2.kucoin.com/api/v1/market/candles?"
|
||||||
|
"symbol=ETH-BTC&type=5min&startAt=1640268735&endAt=1640418735"
|
||||||
|
"429 Too Many Requests" '{"code":"2222222","msg":"Too Many Requests"}'))
|
||||||
|
|
||||||
|
msg = r'_async_get_candle_history\(\) returned exception: .*'
|
||||||
|
msg2 = r'Applying DDosProtection backoff delay: .*'
|
||||||
|
with patch('freqtrade.exchange.common.asyncio.sleep', get_mock_coro(None)):
|
||||||
|
for _ in range(3):
|
||||||
|
with pytest.raises(DDosProtection, match=r'429 Too Many Requests'):
|
||||||
|
await exchange._async_get_candle_history(
|
||||||
|
"ETH/BTC", "5m", (arrow.utcnow().int_timestamp - 2000) * 1000, count=3)
|
||||||
|
# Expect the "returned exception" message 12 times (4 retries * 3 (loop))
|
||||||
|
assert num_log_has_re(msg, caplog) == 12
|
||||||
|
assert num_log_has_re(msg2, caplog) == 9
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
|
async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
|
||||||
""" Test empty exchange result """
|
""" Test empty exchange result """
|
||||||
@ -3088,39 +3126,49 @@ def test_extract_cost_curr_rate(mocker, default_conf, order, expected) -> None:
|
|||||||
assert ex.extract_cost_curr_rate(order) == expected
|
assert ex.extract_cost_curr_rate(order) == expected
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("order,expected", [
|
@pytest.mark.parametrize("order,unknown_fee_rate,expected", [
|
||||||
# Using base-currency
|
# Using base-currency
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
||||||
'fee': {'currency': 'ETH', 'cost': 0.004, 'rate': None}}, 0.1),
|
'fee': {'currency': 'ETH', 'cost': 0.004, 'rate': None}}, None, 0.1),
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.05, 'cost': 0.05,
|
({'symbol': 'ETH/BTC', 'amount': 0.05, 'cost': 0.05,
|
||||||
'fee': {'currency': 'ETH', 'cost': 0.004, 'rate': None}}, 0.08),
|
'fee': {'currency': 'ETH', 'cost': 0.004, 'rate': None}}, None, 0.08),
|
||||||
# Using quote currency
|
# Using quote currency
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
||||||
'fee': {'currency': 'BTC', 'cost': 0.005}}, 0.1),
|
'fee': {'currency': 'BTC', 'cost': 0.005}}, None, 0.1),
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
||||||
'fee': {'currency': 'BTC', 'cost': 0.002, 'rate': None}}, 0.04),
|
'fee': {'currency': 'BTC', 'cost': 0.002, 'rate': None}}, None, 0.04),
|
||||||
# Using foreign currency
|
# Using foreign currency
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
||||||
'fee': {'currency': 'NEO', 'cost': 0.0012}}, 0.001944),
|
'fee': {'currency': 'NEO', 'cost': 0.0012}}, None, 0.001944),
|
||||||
({'symbol': 'ETH/BTC', 'amount': 2.21, 'cost': 0.02992561,
|
({'symbol': 'ETH/BTC', 'amount': 2.21, 'cost': 0.02992561,
|
||||||
'fee': {'currency': 'NEO', 'cost': 0.00027452}}, 0.00074305),
|
'fee': {'currency': 'NEO', 'cost': 0.00027452}}, None, 0.00074305),
|
||||||
# Rate included in return - return as is
|
# Rate included in return - return as is
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
||||||
'fee': {'currency': 'USDT', 'cost': 0.34, 'rate': 0.01}}, 0.01),
|
'fee': {'currency': 'USDT', 'cost': 0.34, 'rate': 0.01}}, None, 0.01),
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.05,
|
||||||
'fee': {'currency': 'USDT', 'cost': 0.34, 'rate': 0.005}}, 0.005),
|
'fee': {'currency': 'USDT', 'cost': 0.34, 'rate': 0.005}}, None, 0.005),
|
||||||
# 0.1% filled - no costs (kraken - #3431)
|
# 0.1% filled - no costs (kraken - #3431)
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.0,
|
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.0,
|
||||||
'fee': {'currency': 'BTC', 'cost': 0.0, 'rate': None}}, None),
|
'fee': {'currency': 'BTC', 'cost': 0.0, 'rate': None}}, None, None),
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.0,
|
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.0,
|
||||||
'fee': {'currency': 'ETH', 'cost': 0.0, 'rate': None}}, 0.0),
|
'fee': {'currency': 'ETH', 'cost': 0.0, 'rate': None}}, None, 0.0),
|
||||||
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.0,
|
({'symbol': 'ETH/BTC', 'amount': 0.04, 'cost': 0.0,
|
||||||
'fee': {'currency': 'NEO', 'cost': 0.0, 'rate': None}}, None),
|
'fee': {'currency': 'NEO', 'cost': 0.0, 'rate': None}}, None, None),
|
||||||
|
# Invalid pair combination - POINT/BTC is not a pair
|
||||||
|
({'symbol': 'POINT/BTC', 'amount': 0.04, 'cost': 0.5,
|
||||||
|
'fee': {'currency': 'POINT', 'cost': 2.0, 'rate': None}}, None, None),
|
||||||
|
({'symbol': 'POINT/BTC', 'amount': 0.04, 'cost': 0.5,
|
||||||
|
'fee': {'currency': 'POINT', 'cost': 2.0, 'rate': None}}, 1, 4.0),
|
||||||
|
({'symbol': 'POINT/BTC', 'amount': 0.04, 'cost': 0.5,
|
||||||
|
'fee': {'currency': 'POINT', 'cost': 2.0, 'rate': None}}, 2, 8.0),
|
||||||
])
|
])
|
||||||
def test_calculate_fee_rate(mocker, default_conf, order, expected) -> None:
|
def test_calculate_fee_rate(mocker, default_conf, order, expected, unknown_fee_rate) -> None:
|
||||||
mocker.patch('freqtrade.exchange.Exchange.fetch_ticker', return_value={'last': 0.081})
|
mocker.patch('freqtrade.exchange.Exchange.fetch_ticker', return_value={'last': 0.081})
|
||||||
|
if unknown_fee_rate:
|
||||||
|
default_conf['exchange']['unknown_fee_rate'] = unknown_fee_rate
|
||||||
|
|
||||||
ex = get_patched_exchange(mocker, default_conf)
|
ex = get_patched_exchange(mocker, default_conf)
|
||||||
|
|
||||||
assert ex.calculate_fee_rate(order) == expected
|
assert ex.calculate_fee_rate(order) == expected
|
||||||
|
|
||||||
|
|
||||||
|
@ -426,8 +426,6 @@ tc26 = BTContainer(data=[
|
|||||||
|
|
||||||
# Test 27: Sell with signal sell in candle 3 (ROI at signal candle)
|
# Test 27: Sell with signal sell in candle 3 (ROI at signal candle)
|
||||||
# Stoploss at 10% (irrelevant), ROI at 5% (will trigger) - Wins over Sell-signal
|
# Stoploss at 10% (irrelevant), ROI at 5% (will trigger) - Wins over Sell-signal
|
||||||
# TODO: figure out if sell-signal should win over ROI
|
|
||||||
# Sell-signal wins over stoploss
|
|
||||||
tc27 = BTContainer(data=[
|
tc27 = BTContainer(data=[
|
||||||
# D O H L C V B S
|
# D O H L C V B S
|
||||||
[0, 5000, 5025, 4975, 4987, 6172, 1, 0],
|
[0, 5000, 5025, 4975, 4987, 6172, 1, 0],
|
||||||
@ -436,8 +434,8 @@ tc27 = BTContainer(data=[
|
|||||||
[3, 5010, 5012, 4986, 5010, 6172, 0, 1], # sell-signal
|
[3, 5010, 5012, 4986, 5010, 6172, 0, 1], # sell-signal
|
||||||
[4, 5010, 5251, 4855, 4995, 6172, 0, 0], # Triggers ROI, sell-signal acted on
|
[4, 5010, 5251, 4855, 4995, 6172, 0, 0], # Triggers ROI, sell-signal acted on
|
||||||
[5, 4995, 4995, 4950, 4950, 6172, 0, 0]],
|
[5, 4995, 4995, 4950, 4950, 6172, 0, 0]],
|
||||||
stop_loss=-0.10, roi={"0": 0.05}, profit_perc=0.05, use_sell_signal=True,
|
stop_loss=-0.10, roi={"0": 0.05}, profit_perc=0.002, use_sell_signal=True,
|
||||||
trades=[BTrade(sell_reason=SellType.ROI, open_tick=1, close_tick=4)]
|
trades=[BTrade(sell_reason=SellType.SELL_SIGNAL, open_tick=1, close_tick=4)]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Test 28: trailing_stop should raise so candle 3 causes a stoploss
|
# Test 28: trailing_stop should raise so candle 3 causes a stoploss
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# pragma pylint: disable=missing-docstring, W0212, line-too-long, C0103, unused-argument
|
# pragma pylint: disable=missing-docstring, W0212, line-too-long, C0103, unused-argument
|
||||||
|
|
||||||
import random
|
import random
|
||||||
|
from copy import deepcopy
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest.mock import MagicMock, PropertyMock
|
from unittest.mock import MagicMock, PropertyMock
|
||||||
@ -666,7 +667,7 @@ def test_backtest_one(default_conf, fee, mocker, testdatadir) -> None:
|
|||||||
processed = backtesting.strategy.advise_all_indicators(data)
|
processed = backtesting.strategy.advise_all_indicators(data)
|
||||||
min_date, max_date = get_timerange(processed)
|
min_date, max_date = get_timerange(processed)
|
||||||
result = backtesting.backtest(
|
result = backtesting.backtest(
|
||||||
processed=processed,
|
processed=deepcopy(processed),
|
||||||
start_date=min_date,
|
start_date=min_date,
|
||||||
end_date=max_date,
|
end_date=max_date,
|
||||||
max_open_trades=10,
|
max_open_trades=10,
|
||||||
@ -908,7 +909,7 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir)
|
|||||||
processed = backtesting.strategy.advise_all_indicators(data)
|
processed = backtesting.strategy.advise_all_indicators(data)
|
||||||
min_date, max_date = get_timerange(processed)
|
min_date, max_date = get_timerange(processed)
|
||||||
backtest_conf = {
|
backtest_conf = {
|
||||||
'processed': processed,
|
'processed': deepcopy(processed),
|
||||||
'start_date': min_date,
|
'start_date': min_date,
|
||||||
'end_date': max_date,
|
'end_date': max_date,
|
||||||
'max_open_trades': 3,
|
'max_open_trades': 3,
|
||||||
@ -931,7 +932,7 @@ def test_backtest_multi_pair(default_conf, fee, mocker, tres, pair, testdatadir)
|
|||||||
) == len(data['NXT/BTC']) - 1 - backtesting.strategy.startup_candle_count
|
) == len(data['NXT/BTC']) - 1 - backtesting.strategy.startup_candle_count
|
||||||
|
|
||||||
backtest_conf = {
|
backtest_conf = {
|
||||||
'processed': processed,
|
'processed': deepcopy(processed),
|
||||||
'start_date': min_date,
|
'start_date': min_date,
|
||||||
'end_date': max_date,
|
'end_date': max_date,
|
||||||
'max_open_trades': 1,
|
'max_open_trades': 1,
|
||||||
|
@ -172,6 +172,7 @@ def test_start_no_hyperopt_allowed(mocker, hyperopt_conf, caplog) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_start_no_data(mocker, hyperopt_conf) -> None:
|
def test_start_no_data(mocker, hyperopt_conf) -> None:
|
||||||
|
hyperopt_conf['user_data_dir'] = Path("tests")
|
||||||
patched_configuration_load_config_file(mocker, hyperopt_conf)
|
patched_configuration_load_config_file(mocker, hyperopt_conf)
|
||||||
mocker.patch('freqtrade.data.history.load_pair_history', MagicMock(return_value=pd.DataFrame))
|
mocker.patch('freqtrade.data.history.load_pair_history', MagicMock(return_value=pd.DataFrame))
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@ -192,6 +193,12 @@ def test_start_no_data(mocker, hyperopt_conf) -> None:
|
|||||||
with pytest.raises(OperationalException, match='No data found. Terminating.'):
|
with pytest.raises(OperationalException, match='No data found. Terminating.'):
|
||||||
start_hyperopt(pargs)
|
start_hyperopt(pargs)
|
||||||
|
|
||||||
|
# Cleanup since that failed hyperopt start leaves a lockfile.
|
||||||
|
try:
|
||||||
|
Path(Hyperopt.get_lock_filename(hyperopt_conf)).unlink()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_start_filelock(mocker, hyperopt_conf, caplog) -> None:
|
def test_start_filelock(mocker, hyperopt_conf, caplog) -> None:
|
||||||
hyperopt_mock = MagicMock(side_effect=Timeout(Hyperopt.get_lock_filename(hyperopt_conf)))
|
hyperopt_mock = MagicMock(side_effect=Timeout(Hyperopt.get_lock_filename(hyperopt_conf)))
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# pragma pylint: disable=missing-docstring,C0103,protected-access
|
# pragma pylint: disable=missing-docstring,C0103,protected-access
|
||||||
|
|
||||||
|
import logging
|
||||||
import time
|
import time
|
||||||
from unittest.mock import MagicMock, PropertyMock
|
from unittest.mock import MagicMock, PropertyMock
|
||||||
|
|
||||||
@ -14,7 +15,7 @@ from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
|||||||
from freqtrade.plugins.pairlistmanager import PairListManager
|
from freqtrade.plugins.pairlistmanager import PairListManager
|
||||||
from freqtrade.resolvers import PairListResolver
|
from freqtrade.resolvers import PairListResolver
|
||||||
from tests.conftest import (create_mock_trades, get_patched_exchange, get_patched_freqtradebot,
|
from tests.conftest import (create_mock_trades, get_patched_exchange, get_patched_freqtradebot,
|
||||||
log_has, log_has_re)
|
log_has, log_has_re, num_log_has)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
@ -217,6 +218,34 @@ def test_invalid_blacklist(mocker, markets, static_pl_conf, caplog):
|
|||||||
log_has_re(r"Pair blacklist contains an invalid Wildcard.*", caplog)
|
log_has_re(r"Pair blacklist contains an invalid Wildcard.*", caplog)
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_logs_for_pairs_already_in_blacklist(mocker, markets, static_pl_conf, caplog):
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
freqtrade = get_patched_freqtradebot(mocker, static_pl_conf)
|
||||||
|
mocker.patch.multiple(
|
||||||
|
'freqtrade.exchange.Exchange',
|
||||||
|
exchange_has=MagicMock(return_value=True),
|
||||||
|
markets=PropertyMock(return_value=markets),
|
||||||
|
)
|
||||||
|
freqtrade.pairlists.refresh_pairlist()
|
||||||
|
whitelist = ['ETH/BTC', 'TKN/BTC']
|
||||||
|
caplog.clear()
|
||||||
|
caplog.set_level(logging.INFO)
|
||||||
|
|
||||||
|
# Ensure all except those in whitelist are removed.
|
||||||
|
assert set(whitelist) == set(freqtrade.pairlists.whitelist)
|
||||||
|
assert static_pl_conf['exchange']['pair_blacklist'] == freqtrade.pairlists.blacklist
|
||||||
|
# Ensure that log message wasn't generated.
|
||||||
|
assert not log_has('Pair BLK/BTC in your blacklist. Removing it from whitelist...', caplog)
|
||||||
|
|
||||||
|
for _ in range(3):
|
||||||
|
new_whitelist = freqtrade.pairlists.verify_blacklist(
|
||||||
|
whitelist + ['BLK/BTC'], logger.warning)
|
||||||
|
# Ensure that the pair is removed from the white list, and properly logged.
|
||||||
|
assert set(whitelist) == set(new_whitelist)
|
||||||
|
assert num_log_has('Pair BLK/BTC in your blacklist. Removing it from whitelist...',
|
||||||
|
caplog) == 1
|
||||||
|
|
||||||
|
|
||||||
def test_refresh_pairlist_dynamic(mocker, shitcoinmarkets, tickers, whitelist_conf):
|
def test_refresh_pairlist_dynamic(mocker, shitcoinmarkets, tickers, whitelist_conf):
|
||||||
|
|
||||||
mocker.patch.multiple(
|
mocker.patch.multiple(
|
||||||
@ -1106,33 +1135,34 @@ def test_pairlistmanager_no_pairlist(mocker, whitelist_conf):
|
|||||||
# Happy path: Descending order, all values filled
|
# Happy path: Descending order, all values filled
|
||||||
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
||||||
['ETH/BTC', 'TKN/BTC'],
|
['ETH/BTC', 'TKN/BTC'],
|
||||||
[{'pair': 'TKN/BTC', 'profit': 5, 'count': 3}, {'pair': 'ETH/BTC', 'profit': 4, 'count': 2}],
|
[{'pair': 'TKN/BTC', 'profit_ratio': 0.05, 'count': 3},
|
||||||
|
{'pair': 'ETH/BTC', 'profit_ratio': 0.04, 'count': 2}],
|
||||||
['TKN/BTC', 'ETH/BTC']),
|
['TKN/BTC', 'ETH/BTC']),
|
||||||
# Performance data outside allow list ignored
|
# Performance data outside allow list ignored
|
||||||
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
||||||
['ETH/BTC', 'TKN/BTC'],
|
['ETH/BTC', 'TKN/BTC'],
|
||||||
[{'pair': 'OTHER/BTC', 'profit': 5, 'count': 3},
|
[{'pair': 'OTHER/BTC', 'profit_ratio': 0.05, 'count': 3},
|
||||||
{'pair': 'ETH/BTC', 'profit': 4, 'count': 2}],
|
{'pair': 'ETH/BTC', 'profit_ratio': 0.04, 'count': 2}],
|
||||||
['ETH/BTC', 'TKN/BTC']),
|
['ETH/BTC', 'TKN/BTC']),
|
||||||
# Partial performance data missing and sorted between positive and negative profit
|
# Partial performance data missing and sorted between positive and negative profit
|
||||||
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
||||||
['ETH/BTC', 'TKN/BTC', 'LTC/BTC'],
|
['ETH/BTC', 'TKN/BTC', 'LTC/BTC'],
|
||||||
[{'pair': 'ETH/BTC', 'profit': -5, 'count': 100},
|
[{'pair': 'ETH/BTC', 'profit_ratio': -0.05, 'count': 100},
|
||||||
{'pair': 'TKN/BTC', 'profit': 4, 'count': 2}],
|
{'pair': 'TKN/BTC', 'profit_ratio': 0.04, 'count': 2}],
|
||||||
['TKN/BTC', 'LTC/BTC', 'ETH/BTC']),
|
['TKN/BTC', 'LTC/BTC', 'ETH/BTC']),
|
||||||
# Tie in performance data broken by count (ascending)
|
# Tie in performance data broken by count (ascending)
|
||||||
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
||||||
['ETH/BTC', 'TKN/BTC', 'LTC/BTC'],
|
['ETH/BTC', 'TKN/BTC', 'LTC/BTC'],
|
||||||
[{'pair': 'LTC/BTC', 'profit': -5.01, 'count': 101},
|
[{'pair': 'LTC/BTC', 'profit_ratio': -0.0501, 'count': 101},
|
||||||
{'pair': 'TKN/BTC', 'profit': -5.01, 'count': 2},
|
{'pair': 'TKN/BTC', 'profit_ratio': -0.0501, 'count': 2},
|
||||||
{'pair': 'ETH/BTC', 'profit': -5.01, 'count': 100}],
|
{'pair': 'ETH/BTC', 'profit_ratio': -0.0501, 'count': 100}],
|
||||||
['TKN/BTC', 'ETH/BTC', 'LTC/BTC']),
|
['TKN/BTC', 'ETH/BTC', 'LTC/BTC']),
|
||||||
# Tie in performance and count, broken by alphabetical sort
|
# Tie in performance and count, broken by alphabetical sort
|
||||||
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
([{"method": "StaticPairList"}, {"method": "PerformanceFilter"}],
|
||||||
['ETH/BTC', 'TKN/BTC', 'LTC/BTC'],
|
['ETH/BTC', 'TKN/BTC', 'LTC/BTC'],
|
||||||
[{'pair': 'LTC/BTC', 'profit': -5.01, 'count': 1},
|
[{'pair': 'LTC/BTC', 'profit_ratio': -0.0501, 'count': 1},
|
||||||
{'pair': 'TKN/BTC', 'profit': -5.01, 'count': 1},
|
{'pair': 'TKN/BTC', 'profit_ratio': -0.0501, 'count': 1},
|
||||||
{'pair': 'ETH/BTC', 'profit': -5.01, 'count': 1}],
|
{'pair': 'ETH/BTC', 'profit_ratio': -0.0501, 'count': 1}],
|
||||||
['ETH/BTC', 'LTC/BTC', 'TKN/BTC']),
|
['ETH/BTC', 'LTC/BTC', 'TKN/BTC']),
|
||||||
])
|
])
|
||||||
def test_performance_filter(mocker, whitelist_conf, pairlists, pair_allowlist, overall_performance,
|
def test_performance_filter(mocker, whitelist_conf, pairlists, pair_allowlist, overall_performance,
|
||||||
|
@ -440,7 +440,7 @@ def test_rpc_trade_statistics(default_conf, ticker, ticker_sell_up, fee,
|
|||||||
assert stats['trade_count'] == 2
|
assert stats['trade_count'] == 2
|
||||||
assert stats['first_trade_date'] == 'just now'
|
assert stats['first_trade_date'] == 'just now'
|
||||||
assert stats['latest_trade_date'] == 'just now'
|
assert stats['latest_trade_date'] == 'just now'
|
||||||
assert stats['avg_duration'] in ('0:00:00', '0:00:01')
|
assert stats['avg_duration'] in ('0:00:00', '0:00:01', '0:00:02')
|
||||||
assert stats['best_pair'] == 'ETH/BTC'
|
assert stats['best_pair'] == 'ETH/BTC'
|
||||||
assert prec_satoshi(stats['best_rate'], 6.2)
|
assert prec_satoshi(stats['best_rate'], 6.2)
|
||||||
|
|
||||||
@ -451,7 +451,7 @@ def test_rpc_trade_statistics(default_conf, ticker, ticker_sell_up, fee,
|
|||||||
assert stats['trade_count'] == 2
|
assert stats['trade_count'] == 2
|
||||||
assert stats['first_trade_date'] == 'just now'
|
assert stats['first_trade_date'] == 'just now'
|
||||||
assert stats['latest_trade_date'] == 'just now'
|
assert stats['latest_trade_date'] == 'just now'
|
||||||
assert stats['avg_duration'] in ('0:00:00', '0:00:01')
|
assert stats['avg_duration'] in ('0:00:00', '0:00:01', '0:00:02')
|
||||||
assert stats['best_pair'] == 'ETH/BTC'
|
assert stats['best_pair'] == 'ETH/BTC'
|
||||||
assert prec_satoshi(stats['best_rate'], 6.2)
|
assert prec_satoshi(stats['best_rate'], 6.2)
|
||||||
assert isnan(stats['profit_all_coin'])
|
assert isnan(stats['profit_all_coin'])
|
||||||
@ -1241,6 +1241,16 @@ def test_rpc_blacklist(mocker, default_conf) -> None:
|
|||||||
assert 'errors' in ret
|
assert 'errors' in ret
|
||||||
assert isinstance(ret['errors'], dict)
|
assert isinstance(ret['errors'], dict)
|
||||||
|
|
||||||
|
ret = rpc._rpc_blacklist_delete(["DOGE/BTC", 'HOT/BTC'])
|
||||||
|
|
||||||
|
assert 'StaticPairList' in ret['method']
|
||||||
|
assert len(ret['blacklist']) == 2
|
||||||
|
assert ret['blacklist'] == default_conf['exchange']['pair_blacklist']
|
||||||
|
assert ret['blacklist'] == ['ETH/BTC', 'XRP/.*']
|
||||||
|
assert ret['blacklist_expanded'] == ['ETH/BTC', 'XRP/BTC', 'XRP/USDT']
|
||||||
|
assert 'errors' in ret
|
||||||
|
assert isinstance(ret['errors'], dict)
|
||||||
|
|
||||||
|
|
||||||
def test_rpc_edge_disabled(mocker, default_conf) -> None:
|
def test_rpc_edge_disabled(mocker, default_conf) -> None:
|
||||||
mocker.patch('freqtrade.rpc.telegram.Telegram', MagicMock())
|
mocker.patch('freqtrade.rpc.telegram.Telegram', MagicMock())
|
||||||
|
@ -1016,6 +1016,38 @@ def test_api_blacklist(botclient, mocker):
|
|||||||
"errors": {},
|
"errors": {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rc = client_delete(client, f"{BASE_URI}/blacklist?pairs_to_delete=DOGE/BTC")
|
||||||
|
assert_response(rc)
|
||||||
|
assert rc.json() == {"blacklist": ["HOT/BTC", "ETH/BTC", "XRP/.*"],
|
||||||
|
"blacklist_expanded": ["ETH/BTC", "XRP/BTC", "XRP/USDT"],
|
||||||
|
"length": 3,
|
||||||
|
"method": ["StaticPairList"],
|
||||||
|
"errors": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
rc = client_delete(client, f"{BASE_URI}/blacklist?pairs_to_delete=NOTHING/BTC")
|
||||||
|
assert_response(rc)
|
||||||
|
assert rc.json() == {"blacklist": ["HOT/BTC", "ETH/BTC", "XRP/.*"],
|
||||||
|
"blacklist_expanded": ["ETH/BTC", "XRP/BTC", "XRP/USDT"],
|
||||||
|
"length": 3,
|
||||||
|
"method": ["StaticPairList"],
|
||||||
|
"errors": {
|
||||||
|
"NOTHING/BTC": {
|
||||||
|
"error_msg": "Pair NOTHING/BTC is not in the current blacklist."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
rc = client_delete(
|
||||||
|
client,
|
||||||
|
f"{BASE_URI}/blacklist?pairs_to_delete=HOT/BTC&pairs_to_delete=ETH/BTC")
|
||||||
|
assert_response(rc)
|
||||||
|
assert rc.json() == {"blacklist": ["XRP/.*"],
|
||||||
|
"blacklist_expanded": ["XRP/BTC", "XRP/USDT"],
|
||||||
|
"length": 1,
|
||||||
|
"method": ["StaticPairList"],
|
||||||
|
"errors": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_api_whitelist(botclient):
|
def test_api_whitelist(botclient):
|
||||||
ftbot, client = botclient
|
ftbot, client = botclient
|
||||||
|
@ -98,7 +98,7 @@ def test_telegram_init(default_conf, mocker, caplog) -> None:
|
|||||||
"['stats'], ['daily'], ['weekly'], ['monthly'], "
|
"['stats'], ['daily'], ['weekly'], ['monthly'], "
|
||||||
"['count'], ['locks'], ['unlock', 'delete_locks'], "
|
"['count'], ['locks'], ['unlock', 'delete_locks'], "
|
||||||
"['reload_config', 'reload_conf'], ['show_config', 'show_conf'], "
|
"['reload_config', 'reload_conf'], ['show_config', 'show_conf'], "
|
||||||
"['stopbuy'], ['whitelist'], ['blacklist'], "
|
"['stopbuy'], ['whitelist'], ['blacklist'], ['blacklist_delete', 'bl_delete'], "
|
||||||
"['logs'], ['edge'], ['help'], ['version']"
|
"['logs'], ['edge'], ['help'], ['version']"
|
||||||
"]")
|
"]")
|
||||||
|
|
||||||
@ -587,7 +587,7 @@ def test_monthly_handle(default_conf, update, ticker, limit_buy_order, fee,
|
|||||||
assert 'Monthly Profit over the last 2 months</b>:' in msg_mock.call_args_list[0][0][0]
|
assert 'Monthly Profit over the last 2 months</b>:' in msg_mock.call_args_list[0][0][0]
|
||||||
assert 'Month ' in msg_mock.call_args_list[0][0][0]
|
assert 'Month ' in msg_mock.call_args_list[0][0][0]
|
||||||
today = datetime.utcnow().date()
|
today = datetime.utcnow().date()
|
||||||
current_month = f"{today.year}-{today.month} "
|
current_month = f"{today.year}-{today.month:02} "
|
||||||
assert current_month in msg_mock.call_args_list[0][0][0]
|
assert current_month in msg_mock.call_args_list[0][0][0]
|
||||||
assert str(' 0.00006217 BTC') in msg_mock.call_args_list[0][0][0]
|
assert str(' 0.00006217 BTC') in msg_mock.call_args_list[0][0][0]
|
||||||
assert str(' 0.933 USD') in msg_mock.call_args_list[0][0][0]
|
assert str(' 0.933 USD') in msg_mock.call_args_list[0][0][0]
|
||||||
@ -958,6 +958,7 @@ def test_telegram_forcesell_handle(default_conf, update, ticker, fee,
|
|||||||
'profit_amount': 6.314e-05,
|
'profit_amount': 6.314e-05,
|
||||||
'profit_ratio': 0.0629778,
|
'profit_ratio': 0.0629778,
|
||||||
'stake_currency': 'BTC',
|
'stake_currency': 'BTC',
|
||||||
|
'base_currency': 'ETH',
|
||||||
'fiat_currency': 'USD',
|
'fiat_currency': 'USD',
|
||||||
'buy_tag': ANY,
|
'buy_tag': ANY,
|
||||||
'enter_tag': ANY,
|
'enter_tag': ANY,
|
||||||
@ -1025,6 +1026,7 @@ def test_telegram_forcesell_down_handle(default_conf, update, ticker, fee,
|
|||||||
'profit_amount': -5.497e-05,
|
'profit_amount': -5.497e-05,
|
||||||
'profit_ratio': -0.05482878,
|
'profit_ratio': -0.05482878,
|
||||||
'stake_currency': 'BTC',
|
'stake_currency': 'BTC',
|
||||||
|
'base_currency': 'ETH',
|
||||||
'fiat_currency': 'USD',
|
'fiat_currency': 'USD',
|
||||||
'buy_tag': ANY,
|
'buy_tag': ANY,
|
||||||
'enter_tag': ANY,
|
'enter_tag': ANY,
|
||||||
@ -1082,6 +1084,7 @@ def test_forcesell_all_handle(default_conf, update, ticker, fee, mocker) -> None
|
|||||||
'profit_amount': -4.09e-06,
|
'profit_amount': -4.09e-06,
|
||||||
'profit_ratio': -0.00408133,
|
'profit_ratio': -0.00408133,
|
||||||
'stake_currency': 'BTC',
|
'stake_currency': 'BTC',
|
||||||
|
'base_currency': 'ETH',
|
||||||
'fiat_currency': 'USD',
|
'fiat_currency': 'USD',
|
||||||
'buy_tag': ANY,
|
'buy_tag': ANY,
|
||||||
'enter_tag': ANY,
|
'enter_tag': ANY,
|
||||||
@ -1483,6 +1486,13 @@ def test_blacklist_static(default_conf, update, mocker) -> None:
|
|||||||
in msg_mock.call_args_list[0][0][0])
|
in msg_mock.call_args_list[0][0][0])
|
||||||
assert freqtradebot.pairlists.blacklist == ["DOGE/BTC", "HOT/BTC", "ETH/BTC", "XRP/.*"]
|
assert freqtradebot.pairlists.blacklist == ["DOGE/BTC", "HOT/BTC", "ETH/BTC", "XRP/.*"]
|
||||||
|
|
||||||
|
msg_mock.reset_mock()
|
||||||
|
context.args = ["DOGE/BTC"]
|
||||||
|
telegram._blacklist_delete(update=update, context=context)
|
||||||
|
assert msg_mock.call_count == 1
|
||||||
|
assert ("Blacklist contains 3 pairs\n`HOT/BTC, ETH/BTC, XRP/.*`"
|
||||||
|
in msg_mock.call_args_list[0][0][0])
|
||||||
|
|
||||||
|
|
||||||
def test_telegram_logs(default_conf, update, mocker) -> None:
|
def test_telegram_logs(default_conf, update, mocker) -> None:
|
||||||
mocker.patch.multiple(
|
mocker.patch.multiple(
|
||||||
|
@ -2091,7 +2091,7 @@ def test_handle_trade_roi(default_conf_usdt, ticker_usdt, limit_order_open, fee,
|
|||||||
# executing
|
# executing
|
||||||
# if ROI is reached we must sell
|
# if ROI is reached we must sell
|
||||||
caplog.clear()
|
caplog.clear()
|
||||||
patch_get_signal(freqtrade, enter_long=False, exit_long=not is_short, exit_short=is_short)
|
patch_get_signal(freqtrade)
|
||||||
assert freqtrade.handle_trade(trade)
|
assert freqtrade.handle_trade(trade)
|
||||||
assert log_has("ETH/USDT - Required profit reached. sell_type=SellType.ROI",
|
assert log_has("ETH/USDT - Required profit reached. sell_type=SellType.ROI",
|
||||||
caplog)
|
caplog)
|
||||||
@ -2416,10 +2416,20 @@ def test_check_handle_timedout_sell_usercustom(
|
|||||||
assert open_trade_usdt.is_open is True
|
assert open_trade_usdt.is_open is True
|
||||||
assert freqtrade.strategy.check_sell_timeout.call_count == 1
|
assert freqtrade.strategy.check_sell_timeout.call_count == 1
|
||||||
|
|
||||||
# 2nd canceled trade ...
|
# 2nd canceled trade - Fail execute sell
|
||||||
caplog.clear()
|
caplog.clear()
|
||||||
open_trade_usdt.open_order_id = 'order_id_2'
|
open_trade_usdt.open_order_id = 'order_id_2'
|
||||||
mocker.patch('freqtrade.persistence.Trade.get_exit_order_count', return_value=1)
|
mocker.patch('freqtrade.persistence.Trade.get_exit_order_count', return_value=1)
|
||||||
|
mocker.patch('freqtrade.freqtradebot.FreqtradeBot.execute_trade_exit',
|
||||||
|
side_effect=DependencyException)
|
||||||
|
freqtrade.check_handle_timedout()
|
||||||
|
assert log_has_re('Unable to emergency sell .*', caplog)
|
||||||
|
|
||||||
|
et_mock = mocker.patch('freqtrade.freqtradebot.FreqtradeBot.execute_trade_exit')
|
||||||
|
caplog.clear()
|
||||||
|
|
||||||
|
# 2nd canceled trade ...
|
||||||
|
open_trade_usdt.open_order_id = 'order_id_2'
|
||||||
freqtrade.check_handle_timedout()
|
freqtrade.check_handle_timedout()
|
||||||
assert log_has_re('Emergencyselling trade.*', caplog)
|
assert log_has_re('Emergencyselling trade.*', caplog)
|
||||||
assert et_mock.call_count == 1
|
assert et_mock.call_count == 1
|
||||||
@ -3602,9 +3612,9 @@ def test_ignore_roi_if_buy_signal(default_conf_usdt, limit_order, limit_order_op
|
|||||||
|
|
||||||
# Test if buy-signal is absent (should sell due to roi = true)
|
# Test if buy-signal is absent (should sell due to roi = true)
|
||||||
if is_short:
|
if is_short:
|
||||||
patch_get_signal(freqtrade, enter_long=False, exit_short=True)
|
patch_get_signal(freqtrade, enter_long=False, exit_short=False)
|
||||||
else:
|
else:
|
||||||
patch_get_signal(freqtrade, enter_long=False, exit_long=True)
|
patch_get_signal(freqtrade, enter_long=False, exit_long=False)
|
||||||
assert freqtrade.handle_trade(trade) is True
|
assert freqtrade.handle_trade(trade) is True
|
||||||
assert trade.sell_reason == SellType.ROI.value
|
assert trade.sell_reason == SellType.ROI.value
|
||||||
|
|
||||||
@ -3808,12 +3818,11 @@ def test_disable_ignore_roi_if_buy_signal(default_conf_usdt, limit_order, limit_
|
|||||||
trade.is_short = is_short
|
trade.is_short = is_short
|
||||||
trade.update(limit_order[enter_side(is_short)])
|
trade.update(limit_order[enter_side(is_short)])
|
||||||
# Sell due to min_roi_reached
|
# Sell due to min_roi_reached
|
||||||
patch_get_signal(freqtrade, enter_long=not is_short, exit_long=not is_short,
|
patch_get_signal(freqtrade, enter_long=not is_short, enter_short=is_short, exit_short=is_short)
|
||||||
enter_short=is_short, exit_short=is_short)
|
|
||||||
assert freqtrade.handle_trade(trade) is True
|
assert freqtrade.handle_trade(trade) is True
|
||||||
|
|
||||||
# Test if buy-signal is absent
|
# Test if buy-signal is absent
|
||||||
patch_get_signal(freqtrade, enter_long=False, exit_long=not is_short, exit_short=is_short)
|
patch_get_signal(freqtrade)
|
||||||
assert freqtrade.handle_trade(trade) is True
|
assert freqtrade.handle_trade(trade) is True
|
||||||
assert trade.sell_reason == SellType.ROI.value
|
assert trade.sell_reason == SellType.ROI.value
|
||||||
|
|
||||||
|
@ -185,16 +185,18 @@ def test_render_template_fallback(mocker):
|
|||||||
assert 'if self.dp' in val
|
assert 'if self.dp' in val
|
||||||
|
|
||||||
|
|
||||||
def test_parse_db_uri_for_logging() -> None:
|
@pytest.mark.parametrize('conn_url,expected', [
|
||||||
postgresql_conn_uri = "postgresql+psycopg2://scott123:scott123@host/dbname"
|
("postgresql+psycopg2://scott123:scott123@host:1245/dbname",
|
||||||
mariadb_conn_uri = "mariadb+mariadbconnector://app_user:Password123!@127.0.0.1:3306/company"
|
"postgresql+psycopg2://scott123:*****@host:1245/dbname"),
|
||||||
mysql_conn_uri = "mysql+pymysql://user:pass@some_mariadb/dbname?charset=utf8mb4"
|
("postgresql+psycopg2://scott123:scott123@host.name.com/dbname",
|
||||||
sqlite_conn_uri = "sqlite:////freqtrade/user_data/tradesv3.sqlite"
|
"postgresql+psycopg2://scott123:*****@host.name.com/dbname"),
|
||||||
censored_pwd = "*****"
|
("mariadb+mariadbconnector://app_user:Password123!@127.0.0.1:3306/company",
|
||||||
|
"mariadb+mariadbconnector://app_user:*****@127.0.0.1:3306/company"),
|
||||||
|
("mysql+pymysql://user:pass@some_mariadb/dbname?charset=utf8mb4",
|
||||||
|
"mysql+pymysql://user:*****@some_mariadb/dbname?charset=utf8mb4"),
|
||||||
|
("sqlite:////freqtrade/user_data/tradesv3.sqlite",
|
||||||
|
"sqlite:////freqtrade/user_data/tradesv3.sqlite"),
|
||||||
|
])
|
||||||
|
def test_parse_db_uri_for_logging(conn_url, expected) -> None:
|
||||||
|
|
||||||
def get_pwd(x): return x.split(':')[2].split('@')[0]
|
assert parse_db_uri_for_logging(conn_url) == expected
|
||||||
|
|
||||||
assert get_pwd(parse_db_uri_for_logging(postgresql_conn_uri)) == censored_pwd
|
|
||||||
assert get_pwd(parse_db_uri_for_logging(mariadb_conn_uri)) == censored_pwd
|
|
||||||
assert get_pwd(parse_db_uri_for_logging(mysql_conn_uri)) == censored_pwd
|
|
||||||
assert sqlite_conn_uri == parse_db_uri_for_logging(sqlite_conn_uri)
|
|
||||||
|
@ -43,7 +43,7 @@ def test_worker_stopped(mocker, default_conf, caplog) -> None:
|
|||||||
worker.freqtrade.state = State.STOPPED
|
worker.freqtrade.state = State.STOPPED
|
||||||
state = worker._worker(old_state=State.RUNNING)
|
state = worker._worker(old_state=State.RUNNING)
|
||||||
assert state is State.STOPPED
|
assert state is State.STOPPED
|
||||||
assert log_has('Changing state to: STOPPED', caplog)
|
assert log_has('Changing state from RUNNING to: STOPPED', caplog)
|
||||||
assert mock_throttle.call_count == 1
|
assert mock_throttle.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user