Compare commits
337 Commits
use-parque
...
develop
Author | SHA1 | Date | |
---|---|---|---|
|
0afd5a7385 | ||
|
2131205db6 | ||
|
b2b19915e6 | ||
|
bba6f8e133 | ||
|
a6d2233b95 | ||
|
9857675a5e | ||
|
4ab047dfa7 | ||
|
476ed938f5 | ||
|
40ffac9de0 | ||
|
b892d373cd | ||
|
c3647e49ad | ||
|
37ed37dc76 | ||
|
5cb688c112 | ||
|
3e394d0612 | ||
|
c4c2298686 | ||
|
8564dc10b2 | ||
|
3fb892fcb8 | ||
|
9968348324 | ||
|
fa293c54f8 | ||
|
95449ca886 | ||
|
70fa4a53cd | ||
|
467c63ff01 | ||
|
b8a9c200fe | ||
|
7c10af65a1 | ||
|
e2cd23b1d2 | ||
|
0d408d3d43 | ||
|
2309197771 | ||
|
66fe9abce0 | ||
|
200c18f3e4 | ||
|
351b5f6e65 | ||
|
605cc20a21 | ||
|
f73d2a5371 | ||
|
485a074674 | ||
|
865cf5232b | ||
|
95a24c3133 | ||
|
6833059c70 | ||
|
3833dc0b78 | ||
|
e0d3c771db | ||
|
5a18ab0784 | ||
|
1d66f82b1d | ||
|
2e765fe6d1 | ||
|
21ea02bbcf | ||
|
2ea0157197 | ||
|
03352f3b62 | ||
|
26eb4f7fe6 | ||
|
7e1f3aa545 | ||
|
14532e3a56 | ||
|
a449f7c78c | ||
|
8854ef8cba | ||
|
526943f29e | ||
|
df51111c33 | ||
|
dd8900a1c6 | ||
|
5404905d28 | ||
|
bed51fa790 | ||
|
f5a5c2d6b9 | ||
|
a102cfdfc9 | ||
|
be72670ca2 | ||
|
cf2cb94f8d | ||
|
fa3a81b022 | ||
|
7ff30c6df8 | ||
|
7751768b2e | ||
|
9c2cdd4fb9 | ||
|
69b9b35a08 | ||
|
c2c97d9f78 | ||
|
48d3c8e62e | ||
|
ac817b7808 | ||
|
4d4f4bf23e | ||
|
c083723698 | ||
|
f8d89c46e5 | ||
|
1952e453bb | ||
|
77985fa591 | ||
|
a75d891007 | ||
|
dae3f72be7 | ||
|
f03a99918a | ||
|
a655524221 | ||
|
26738370c7 | ||
|
fe02f611fb | ||
|
1b10a3a2bf | ||
|
92a060c5b4 | ||
|
096fd1916c | ||
|
fb09a16127 | ||
|
6b204c97ed | ||
|
0c4574b3b7 | ||
|
d9d9993179 | ||
|
7b494c8333 | ||
|
bc9454e0f9 | ||
|
36a0a14a23 | ||
|
c137666230 | ||
|
7fed0782d5 | ||
|
bd3b70293f | ||
|
30fc24bd8c | ||
|
7e3de178e1 | ||
|
0c9c9fff0e | ||
|
b96f6670e3 | ||
|
6e02743256 | ||
|
2b4fa92d09 | ||
|
be250230b6 | ||
|
5d33ffc015 | ||
|
b48498f27f | ||
|
e582d8bacb | ||
|
ff40ee655b | ||
|
57deaad806 | ||
|
7779b82277 | ||
|
2bd2058afa | ||
|
bf7936b0af | ||
|
8236bbfd48 | ||
|
4dc13ac16a | ||
|
eb5423469a | ||
|
43496d7929 | ||
|
92c70b6b90 | ||
|
77897c7d6b | ||
|
531861573a | ||
|
c9b904eb0e | ||
|
372f1cb37f | ||
|
a3acdd5240 | ||
|
e6a125719e | ||
|
78a1551798 | ||
|
6f79d14c9c | ||
|
28d8722fa7 | ||
|
2715b2ccf0 | ||
|
2ea575cb31 | ||
|
1b31c54162 | ||
|
e289c10b6c | ||
|
26ed1ca07c | ||
|
b1e20bcd1e | ||
|
12a73bc151 | ||
|
19e112f399 | ||
|
cccf4f305b | ||
|
dc7e834911 | ||
|
a630799984 | ||
|
916e1bbc7c | ||
|
631cb44f5c | ||
|
367186cc34 | ||
|
92f34f262e | ||
|
5e13b48648 | ||
|
6dfb1a1d14 | ||
|
f8330800d1 | ||
|
3ec7c72da1 | ||
|
355fde3bca | ||
|
fa7c29fe9f | ||
|
861c577138 | ||
|
e062a74e70 | ||
|
c330c493d5 | ||
|
8a49d62068 | ||
|
a642524928 | ||
|
eb96490c99 | ||
|
6282b42741 | ||
|
513df4515b | ||
|
411e21f430 | ||
|
f0b5f95fd6 | ||
|
736c396d98 | ||
|
5a7ca35c6b | ||
|
077a947972 | ||
|
8ac3a94358 | ||
|
dfbebdea9b | ||
|
b795a70102 | ||
|
026b6a39a9 | ||
|
2860e817bd | ||
|
19b78fbc22 | ||
|
cde432fef0 | ||
|
8ae44c204e | ||
|
ed0e7ead31 | ||
|
3928051baf | ||
|
e35c85000e | ||
|
3cabcabcbd | ||
|
85776db692 | ||
|
ce81af08d8 | ||
|
5aa6c1dfae | ||
|
4f4dfa2a59 | ||
|
90669e0ba9 | ||
|
bc9f6d30c1 | ||
|
4ae2333306 | ||
|
8c63e3dc4f | ||
|
b0dddd35ca | ||
|
96ba75179b | ||
|
2589717375 | ||
|
bc0816aa66 | ||
|
1743ad7946 | ||
|
9367cbcfd3 | ||
|
43a7b9236b | ||
|
4891174a71 | ||
|
8845f765db | ||
|
7e11bce4f4 | ||
|
8955e09175 | ||
|
d13ea71a58 | ||
|
b72f61080b | ||
|
75c31cc8cc | ||
|
1b3d9efedd | ||
|
2f8f60373e | ||
|
55781e7f10 | ||
|
72284317c2 | ||
|
80a27bc0db | ||
|
1c9abd9e35 | ||
|
c14ac8a205 | ||
|
b09fb5826f | ||
|
fb1541bdf6 | ||
|
444d18aa39 | ||
|
91ab4abba8 | ||
|
16057da6cc | ||
|
d97500581d | ||
|
f1e831a7b8 | ||
|
31a396bc25 | ||
|
7cdcd97c26 | ||
|
73b59df77b | ||
|
86aef7cf9d | ||
|
159090c0e7 | ||
|
0cb28f3d82 | ||
|
d0d0cbe1d1 | ||
|
02078456fc | ||
|
01dfb1cba8 | ||
|
ee205ddc86 | ||
|
298f5685ee | ||
|
486d8a48a0 | ||
|
d426077445 | ||
|
9aa455fcd4 | ||
|
d9c8b322ce | ||
|
68154a1f52 | ||
|
f7c1ee6d3e | ||
|
9c6a49436b | ||
|
75464c22f5 | ||
|
cdd44a4005 | ||
|
34313a7af6 | ||
|
c0a57d352f | ||
|
cbdd86d777 | ||
|
281dd7785e | ||
|
ad58bac810 | ||
|
8928d3616a | ||
|
e8cffeeffd | ||
|
76d289f0ce | ||
|
245ae99273 | ||
|
70ad7b42b1 | ||
|
0ece73578c | ||
|
8903ba5d89 | ||
|
eabd321281 | ||
|
45c6ae446f | ||
|
952e641213 | ||
|
c44b5b1b3a | ||
|
fc8625c5c5 | ||
|
36a005754a | ||
|
479aafc331 | ||
|
bdf19f1d66 | ||
|
0128b63c1c | ||
|
e16db814fa | ||
|
f81e3d8667 | ||
|
b9c7d338b3 | ||
|
4f93106755 | ||
|
02bccd0097 | ||
|
1ba01746a0 | ||
|
83a7d888bc | ||
|
eba82360fa | ||
|
3fa23860c0 | ||
|
a80afc8f1b | ||
|
97339e14cf | ||
|
443263803c | ||
|
9906e7d646 | ||
|
e8f040bfbd | ||
|
a4b617e482 | ||
|
c06cd38951 | ||
|
0a55753faf | ||
|
6b4d9f97c1 | ||
|
bf4aa91aab | ||
|
500c401b75 | ||
|
81a2cbb4eb | ||
|
0510cf4491 | ||
|
68728409aa | ||
|
c00ffcee59 | ||
|
9aec1ddb17 | ||
|
d98890f32e | ||
|
f659f8e309 | ||
|
54db239175 | ||
|
601c37f862 | ||
|
501e746c52 | ||
|
d04146d1b1 | ||
|
ea08931ab3 | ||
|
ddd1b5c0ff | ||
|
e08d8190ae | ||
|
fbf7049ac5 | ||
|
2a1a8c0e64 | ||
|
833aaf8e10 | ||
|
566346dd87 | ||
|
d0a33d2ee7 | ||
|
fab505be1b | ||
|
2f386913ac | ||
|
1c11a5f048 | ||
|
903a1dc3e5 | ||
|
6f9a8a089c | ||
|
8bee499328 | ||
|
719faab4b8 | ||
|
9f477aa3c9 | ||
|
61ac36c576 | ||
|
366c148c10 | ||
|
a49f62eecb | ||
|
fab9ff1294 | ||
|
1c91b4427b | ||
|
244662b1a4 | ||
|
4550447409 | ||
|
366740885a | ||
|
918889a2bd | ||
|
9c8c30b0e8 | ||
|
d7ea750823 | ||
|
b6096efadd | ||
|
b927c9dc01 | ||
|
523a58d3d6 | ||
|
0012fe36ca | ||
|
cb17b36981 | ||
|
f9fdf1c31b | ||
|
1cf0e7be24 | ||
|
8a9f2aedbb | ||
|
e88a0d5248 | ||
|
2ef11faba7 | ||
|
c9eee2944b | ||
|
6f962362f2 | ||
|
ba5de0cd00 | ||
|
3081b9402b | ||
|
1132fa6093 | ||
|
1597c3aa89 | ||
|
7d26df01b8 | ||
|
c8296ccb2d | ||
|
8d60327d60 | ||
|
04564dc134 | ||
|
6161b858c4 | ||
|
1921a07b89 | ||
|
b65ade51be | ||
|
dfbb2e2b35 | ||
|
1805db2b07 | ||
|
76fbec0c17 | ||
|
4241bff32a | ||
|
5dd60eda36 | ||
|
8acdd0b47c | ||
|
125085fbaf | ||
|
7eedcb9c14 | ||
|
e6e747bcd8 | ||
|
348a08f1c4 | ||
|
b1ac2bf515 | ||
|
751b205618 | ||
|
b262f0b374 | ||
|
a3dee9350f |
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@ -425,7 +425,7 @@ jobs:
|
||||
python setup.py sdist bdist_wheel
|
||||
|
||||
- name: Publish to PyPI (Test)
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.1
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.5
|
||||
if: (github.event_name == 'release')
|
||||
with:
|
||||
user: __token__
|
||||
@ -433,7 +433,7 @@ jobs:
|
||||
repository_url: https://test.pypi.org/legacy/
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.1
|
||||
uses: pypa/gh-action-pypi-publish@v1.8.5
|
||||
if: (github.event_name == 'release')
|
||||
with:
|
||||
user: __token__
|
||||
|
@ -13,12 +13,12 @@ repos:
|
||||
- id: mypy
|
||||
exclude: build_helpers
|
||||
additional_dependencies:
|
||||
- types-cachetools==5.3.0.4
|
||||
- types-cachetools==5.3.0.5
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.28.11.15
|
||||
- types-tabulate==0.9.0.1
|
||||
- types-python-dateutil==2.8.19.10
|
||||
- SQLAlchemy==2.0.7
|
||||
- types-requests==2.28.11.17
|
||||
- types-tabulate==0.9.0.2
|
||||
- types-python-dateutil==2.8.19.12
|
||||
- SQLAlchemy==2.0.9
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM python:3.10.10-slim-bullseye as base
|
||||
FROM python:3.10.11-slim-bullseye as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
@ -12,6 +12,7 @@ TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
|
||||
TAG_PLOT=${TAG}_plot
|
||||
TAG_FREQAI=${TAG}_freqai
|
||||
TAG_FREQAI_RL=${TAG_FREQAI}rl
|
||||
TAG_FREQAI_TORCH=${TAG_FREQAI}torch
|
||||
TAG_PI="${TAG}_pi"
|
||||
|
||||
TAG_ARM=${TAG}_arm
|
||||
@ -42,9 +43,9 @@ if [ $? -ne 0 ]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
|
||||
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
|
||||
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
|
||||
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
|
||||
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
|
||||
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
|
||||
|
||||
# Tag image for upload and next build step
|
||||
docker tag freqtrade:$TAG_ARM ${CACHE_IMAGE}:$TAG_ARM
|
||||
@ -84,6 +85,10 @@ docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI}
|
||||
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
|
||||
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_RL}
|
||||
|
||||
# Create special Torch tag - which is identical to the RL tag.
|
||||
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_TORCH} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
|
||||
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_TORCH}
|
||||
|
||||
# copy images to ghcr.io
|
||||
|
||||
alias crane="docker run --rm -i -v $(pwd)/.crane:/home/nonroot/.docker/ gcr.io/go-containerregistry/crane"
|
||||
@ -93,6 +98,7 @@ chmod a+rwx .crane
|
||||
echo "${GHCR_TOKEN}" | crane auth login ghcr.io -u "${GHCR_USERNAME}" --password-stdin
|
||||
|
||||
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_RL}
|
||||
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_TORCH}
|
||||
crane copy ${IMAGE_NAME}:${TAG_FREQAI} ${GHCR_IMAGE_NAME}:${TAG_FREQAI}
|
||||
crane copy ${IMAGE_NAME}:${TAG_PLOT} ${GHCR_IMAGE_NAME}:${TAG_PLOT}
|
||||
crane copy ${IMAGE_NAME}:${TAG} ${GHCR_IMAGE_NAME}:${TAG}
|
||||
|
@ -58,9 +58,9 @@ fi
|
||||
# Tag image for upload and next build step
|
||||
docker tag freqtrade:$TAG ${CACHE_IMAGE}:$TAG
|
||||
|
||||
docker build --cache-from freqtrade:${TAG} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG} -t freqtrade:${TAG_PLOT} -f docker/Dockerfile.plot .
|
||||
docker build --cache-from freqtrade:${TAG} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG} -t freqtrade:${TAG_FREQAI} -f docker/Dockerfile.freqai .
|
||||
docker build --cache-from freqtrade:${TAG_FREQAI} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_FREQAI} -t freqtrade:${TAG_FREQAI_RL} -f docker/Dockerfile.freqai_rl .
|
||||
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_PLOT} -f docker/Dockerfile.plot .
|
||||
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_FREQAI} -f docker/Dockerfile.freqai .
|
||||
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI} -t freqtrade:${TAG_FREQAI_RL} -f docker/Dockerfile.freqai_rl .
|
||||
|
||||
docker tag freqtrade:$TAG_PLOT ${CACHE_IMAGE}:$TAG_PLOT
|
||||
docker tag freqtrade:$TAG_FREQAI ${CACHE_IMAGE}:$TAG_FREQAI
|
||||
|
BIN
docs/assets/freqai_pytorch-diagram.png
Normal file
BIN
docs/assets/freqai_pytorch-diagram.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 18 KiB |
@ -274,19 +274,20 @@ A backtesting result will look like that:
|
||||
| XRP/BTC | 35 | 0.66 | 22.96 | 0.00114897 | 11.48 | 3:49:00 | 12 0 23 34.3 |
|
||||
| ZEC/BTC | 22 | -0.46 | -10.18 | -0.00050971 | -5.09 | 2:22:00 | 7 0 15 31.8 |
|
||||
| TOTAL | 429 | 0.36 | 152.41 | 0.00762792 | 76.20 | 4:12:00 | 186 0 243 43.4 |
|
||||
========================================================= EXIT REASON STATS ==========================================================
|
||||
| Exit Reason | Exits | Wins | Draws | Losses |
|
||||
|:-------------------|--------:|------:|-------:|--------:|
|
||||
| trailing_stop_loss | 205 | 150 | 0 | 55 |
|
||||
| stop_loss | 166 | 0 | 0 | 166 |
|
||||
| exit_signal | 56 | 36 | 0 | 20 |
|
||||
| force_exit | 2 | 0 | 0 | 2 |
|
||||
====================================================== LEFT OPEN TRADES REPORT ======================================================
|
||||
| Pair | Entries | Avg Profit % | Cum Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Win Draw Loss Win% |
|
||||
|:---------|---------:|---------------:|---------------:|-----------------:|---------------:|:---------------|--------------------:|
|
||||
| ADA/BTC | 1 | 0.89 | 0.89 | 0.00004434 | 0.44 | 6:00:00 | 1 0 0 100 |
|
||||
| LTC/BTC | 1 | 0.68 | 0.68 | 0.00003421 | 0.34 | 2:00:00 | 1 0 0 100 |
|
||||
| TOTAL | 2 | 0.78 | 1.57 | 0.00007855 | 0.78 | 4:00:00 | 2 0 0 100 |
|
||||
==================== EXIT REASON STATS ====================
|
||||
| Exit Reason | Exits | Wins | Draws | Losses |
|
||||
|:-------------------|--------:|------:|-------:|--------:|
|
||||
| trailing_stop_loss | 205 | 150 | 0 | 55 |
|
||||
| stop_loss | 166 | 0 | 0 | 166 |
|
||||
| exit_signal | 56 | 36 | 0 | 20 |
|
||||
| force_exit | 2 | 0 | 0 | 2 |
|
||||
|
||||
================== SUMMARY METRICS ==================
|
||||
| Metric | Value |
|
||||
|-----------------------------+---------------------|
|
||||
|
@ -60,10 +60,10 @@ This loop will be repeated again and again until the bot is stopped.
|
||||
|
||||
* Load historic data for configured pairlist.
|
||||
* Calls `bot_start()` once.
|
||||
* Calls `bot_loop_start()` once.
|
||||
* Calculate indicators (calls `populate_indicators()` once per pair).
|
||||
* Calculate entry / exit signals (calls `populate_entry_trend()` and `populate_exit_trend()` once per pair).
|
||||
* Loops per candle simulating entry and exit points.
|
||||
* Calls `bot_loop_start()` strategy callback.
|
||||
* Check for Order timeouts, either via the `unfilledtimeout` configuration, or via `check_entry_timeout()` / `check_exit_timeout()` strategy callbacks.
|
||||
* Calls `adjust_entry_price()` strategy callback for open entry orders.
|
||||
* Check for trade entry signals (`enter_long` / `enter_short` columns).
|
||||
|
@ -236,3 +236,161 @@ If you want to predict multiple targets you must specify all labels in the same
|
||||
df['&s-up_or_down'] = np.where( df["close"].shift(-100) > df["close"], 'up', 'down')
|
||||
df['&s-up_or_down'] = np.where( df["close"].shift(-100) == df["close"], 'same', df['&s-up_or_down'])
|
||||
```
|
||||
|
||||
## PyTorch Module
|
||||
|
||||
### Quick start
|
||||
|
||||
The easiest way to quickly run a pytorch model is with the following command (for regression task):
|
||||
|
||||
```bash
|
||||
freqtrade trade --config config_examples/config_freqai.example.json --strategy FreqaiExampleStrategy --freqaimodel PyTorchMLPRegressor --strategy-path freqtrade/templates
|
||||
```
|
||||
|
||||
!!! note "Installation/docker"
|
||||
The PyTorch module requires large packages such as `torch`, which should be explicitly requested during `./setup.sh -i` by answering "y" to the question "Do you also want dependencies for freqai-rl or PyTorch (~700mb additional space required) [y/N]?".
|
||||
Users who prefer docker should ensure they use the docker image appended with `_freqaitorch`.
|
||||
|
||||
### Structure
|
||||
|
||||
#### Model
|
||||
|
||||
You can construct your own Neural Network architecture in PyTorch by simply defining your `nn.Module` class inside your custom [`IFreqaiModel` file](#using-different-prediction-models) and then using that class in your `def train()` function. Here is an example of logistic regression model implementation using PyTorch (should be used with nn.BCELoss criterion) for classification tasks.
|
||||
|
||||
```python
|
||||
|
||||
class LogisticRegression(nn.Module):
|
||||
def __init__(self, input_size: int):
|
||||
super().__init__()
|
||||
# Define your layers
|
||||
self.linear = nn.Linear(input_size, 1)
|
||||
self.activation = nn.Sigmoid()
|
||||
|
||||
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
||||
# Define the forward pass
|
||||
out = self.linear(x)
|
||||
out = self.activation(out)
|
||||
return out
|
||||
|
||||
class MyCoolPyTorchClassifier(BasePyTorchClassifier):
|
||||
"""
|
||||
This is a custom IFreqaiModel showing how a user might setup their own
|
||||
custom Neural Network architecture for their training.
|
||||
"""
|
||||
|
||||
@property
|
||||
def data_convertor(self) -> PyTorchDataConvertor:
|
||||
return DefaultPyTorchDataConvertor(target_tensor_type=torch.float)
|
||||
|
||||
def __init__(self, **kwargs) -> None:
|
||||
super().__init__(**kwargs)
|
||||
config = self.freqai_info.get("model_training_parameters", {})
|
||||
self.learning_rate: float = config.get("learning_rate", 3e-4)
|
||||
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
class_names = self.get_class_names()
|
||||
self.convert_label_column_to_int(data_dictionary, dk, class_names)
|
||||
n_features = data_dictionary["train_features"].shape[-1]
|
||||
model = LogisticRegression(
|
||||
input_dim=n_features
|
||||
)
|
||||
model.to(self.device)
|
||||
optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
|
||||
criterion = torch.nn.CrossEntropyLoss()
|
||||
init_model = self.get_init_model(dk.pair)
|
||||
trainer = PyTorchModelTrainer(
|
||||
model=model,
|
||||
optimizer=optimizer,
|
||||
criterion=criterion,
|
||||
model_meta_data={"class_names": class_names},
|
||||
device=self.device,
|
||||
init_model=init_model,
|
||||
data_convertor=self.data_convertor,
|
||||
**self.trainer_kwargs,
|
||||
)
|
||||
trainer.fit(data_dictionary, self.splits)
|
||||
return trainer
|
||||
|
||||
```
|
||||
|
||||
#### Trainer
|
||||
|
||||
The `PyTorchModelTrainer` performs the idiomatic PyTorch train loop:
|
||||
Define our model, loss function, and optimizer, and then move them to the appropriate device (GPU or CPU). Inside the loop, we iterate through the batches in the dataloader, move the data to the device, compute the prediction and loss, backpropagate, and update the model parameters using the optimizer.
|
||||
|
||||
In addition, the trainer is responsible for the following:
|
||||
- saving and loading the model
|
||||
- converting the data from `pandas.DataFrame` to `torch.Tensor`.
|
||||
|
||||
#### Integration with Freqai module
|
||||
|
||||
Like all freqai models, PyTorch models inherit `IFreqaiModel`. `IFreqaiModel` declares three abstract methods: `train`, `fit`, and `predict`. we implement these methods in three levels of hierarchy.
|
||||
From top to bottom:
|
||||
|
||||
1. `BasePyTorchModel` - Implements the `train` method. all `BasePyTorch*` inherit it. responsible for general data preparation (e.g., data normalization) and calling the `fit` method. Sets `device` attribute used by children classes. Sets `model_type` attribute used by the parent class.
|
||||
2. `BasePyTorch*` - Implements the `predict` method. Here, the `*` represents a group of algorithms, such as classifiers or regressors. responsible for data preprocessing, predicting, and postprocessing if needed.
|
||||
3. `PyTorch*Classifier` / `PyTorch*Regressor` - implements the `fit` method. responsible for the main train flaw, where we initialize the trainer and model objects.
|
||||
|
||||
![image](assets/freqai_pytorch-diagram.png)
|
||||
|
||||
#### Full example
|
||||
|
||||
Building a PyTorch regressor using MLP (multilayer perceptron) model, MSELoss criterion, and AdamW optimizer.
|
||||
|
||||
```python
|
||||
class PyTorchMLPRegressor(BasePyTorchRegressor):
|
||||
def __init__(self, **kwargs) -> None:
|
||||
super().__init__(**kwargs)
|
||||
config = self.freqai_info.get("model_training_parameters", {})
|
||||
self.learning_rate: float = config.get("learning_rate", 3e-4)
|
||||
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
n_features = data_dictionary["train_features"].shape[-1]
|
||||
model = PyTorchMLPModel(
|
||||
input_dim=n_features,
|
||||
output_dim=1,
|
||||
**self.model_kwargs
|
||||
)
|
||||
model.to(self.device)
|
||||
optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
|
||||
criterion = torch.nn.MSELoss()
|
||||
init_model = self.get_init_model(dk.pair)
|
||||
trainer = PyTorchModelTrainer(
|
||||
model=model,
|
||||
optimizer=optimizer,
|
||||
criterion=criterion,
|
||||
device=self.device,
|
||||
init_model=init_model,
|
||||
target_tensor_type=torch.float,
|
||||
**self.trainer_kwargs,
|
||||
)
|
||||
trainer.fit(data_dictionary)
|
||||
return trainer
|
||||
```
|
||||
|
||||
Here we create a `PyTorchMLPRegressor` class that implements the `fit` method. The `fit` method specifies the training building blocks: model, optimizer, criterion, and trainer. We inherit both `BasePyTorchRegressor` and `BasePyTorchModel`, where the former implements the `predict` method that is suitable for our regression task, and the latter implements the train method.
|
||||
|
||||
??? Note "Setting Class Names for Classifiers"
|
||||
When using classifiers, the user must declare the class names (or targets) by overriding the `IFreqaiModel.class_names` attribute. This is achieved by setting `self.freqai.class_names` in the FreqAI strategy inside the `set_freqai_targets` method.
|
||||
|
||||
For example, if you are using a binary classifier to predict price movements as up or down, you can set the class names as follows:
|
||||
```python
|
||||
def set_freqai_targets(self, dataframe: DataFrame, metadata: Dict, **kwargs):
|
||||
self.freqai.class_names = ["down", "up"]
|
||||
dataframe['&s-up_or_down'] = np.where(dataframe["close"].shift(-100) >
|
||||
dataframe["close"], 'up', 'down')
|
||||
|
||||
return dataframe
|
||||
```
|
||||
To see a full example, you can refer to the [classifier test strategy class](https://github.com/freqtrade/freqtrade/blob/develop/tests/strategy/strats/freqai_test_classifier.py).
|
||||
|
@ -6,8 +6,8 @@ Low level feature engineering is performed in the user strategy within a set of
|
||||
|
||||
| Function | Description |
|
||||
|---------------|-------------|
|
||||
| `feature_engineering__expand_all()` | This optional function will automatically expand the defined features on the config defined `indicator_periods_candles`, `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`.
|
||||
| `feature_engineering__expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `include_periods_candles`.
|
||||
| `feature_engineering_expand_all()` | This optional function will automatically expand the defined features on the config defined `indicator_periods_candles`, `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`.
|
||||
| `feature_engineering_expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `include_periods_candles`.
|
||||
| `feature_engineering_standard()` | This optional function will be called once with the dataframe of the base timeframe. This is the final function to be called, which means that the dataframe entering this function will contain all the features and columns from the base asset created by the other `feature_engineering_expand` functions. This function is a good place to do custom exotic feature extractions (e.g. tsfresh). This function is also a good place for any feature that should not be auto-expanded upon (e.g., day of the week).
|
||||
| `set_freqai_targets()` | Required function to set the targets for the model. All targets must be prepended with `&` to be recognized by the FreqAI internals.
|
||||
|
||||
@ -182,11 +182,11 @@ In total, the number of features the user of the presented example strat has cre
|
||||
$= 3 * 3 * 3 * 2 * 2 = 108$.
|
||||
|
||||
|
||||
### Gain finer control over `feature_engineering_*` functions with `metadata`
|
||||
### Gain finer control over `feature_engineering_*` functions with `metadata`
|
||||
|
||||
All `feature_engineering_*` and `set_freqai_targets()` functions are passed a `metadata` dictionary which contains information about the `pair`, `tf` (timeframe), and `period` that FreqAI is automating for feature building. As such, a user can use `metadata` inside `feature_engineering_*` functions as criteria for blocking/reserving features for certain timeframes, periods, pairs etc.
|
||||
|
||||
```py
|
||||
```python
|
||||
def feature_engineering_expand_all(self, dataframe, period, metadata, **kwargs):
|
||||
if metadata["tf"] == "1h":
|
||||
dataframe["%-roc-period"] = ta.ROC(dataframe, timeperiod=period)
|
||||
|
@ -46,7 +46,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
||||
| `outlier_protection_percentage` | Enable to prevent outlier detection methods from discarding too much data. If more than `outlier_protection_percentage` % of points are detected as outliers by the SVM or DBSCAN, FreqAI will log a warning message and ignore outlier detection, i.e., the original dataset will be kept intact. If the outlier protection is triggered, no predictions will be made based on the training dataset. <br> **Datatype:** Float. <br> Default: `30`.
|
||||
| `reverse_train_test_order` | Split the feature dataset (see below) and use the latest data split for training and test on historical split of the data. This allows the model to be trained up to the most recent data point, while avoiding overfitting. However, you should be careful to understand the unorthodox nature of this parameter before employing it. <br> **Datatype:** Boolean. <br> Default: `False` (no reversal).
|
||||
| `shuffle_after_split` | Split the data into train and test sets, and then shuffle both sets individually. <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||
| `buffer_train_data_candles` | Cut `buffer_train_data_candles` off the beginning and end of the training data *after* the indicators were populated. The main example use is when predicting maxima and minima, the argrelextrema function cannot know the maxima/minima at the edges of the timerange. To improve model accuracy, it is best to compute argrelextrema on the full timerange and then use this function to cut off the edges (buffer) by the kernel. In another case, if the targets are set to a shifted price movement, this buffer is unnecessary because the shifted candles at the end of the timerange will be NaN and FreqAI will automatically cut those off of the training dataset.<br> **Datatype:** Boolean. <br> Default: `False`.
|
||||
| `buffer_train_data_candles` | Cut `buffer_train_data_candles` off the beginning and end of the training data *after* the indicators were populated. The main example use is when predicting maxima and minima, the argrelextrema function cannot know the maxima/minima at the edges of the timerange. To improve model accuracy, it is best to compute argrelextrema on the full timerange and then use this function to cut off the edges (buffer) by the kernel. In another case, if the targets are set to a shifted price movement, this buffer is unnecessary because the shifted candles at the end of the timerange will be NaN and FreqAI will automatically cut those off of the training dataset.<br> **Datatype:** Integer. <br> Default: `0`.
|
||||
|
||||
### Data split parameters
|
||||
|
||||
@ -86,6 +86,27 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
||||
| `randomize_starting_position` | Randomize the starting point of each episode to avoid overfitting. <br> **Datatype:** bool. <br> Default: `False`.
|
||||
| `drop_ohlc_from_features` | Do not include the normalized ohlc data in the feature set passed to the agent during training (ohlc will still be used for driving the environment in all cases) <br> **Datatype:** Boolean. <br> **Default:** `False`
|
||||
|
||||
### PyTorch parameters
|
||||
|
||||
#### general
|
||||
|
||||
| Parameter | Description |
|
||||
|------------|-------------|
|
||||
| | **Model training parameters within the `freqai.model_training_parameters` sub dictionary**
|
||||
| `learning_rate` | Learning rate to be passed to the optimizer. <br> **Datatype:** float. <br> Default: `3e-4`.
|
||||
| `model_kwargs` | Parameters to be passed to the model class. <br> **Datatype:** dict. <br> Default: `{}`.
|
||||
| `trainer_kwargs` | Parameters to be passed to the trainer class. <br> **Datatype:** dict. <br> Default: `{}`.
|
||||
|
||||
#### trainer_kwargs
|
||||
|
||||
| Parameter | Description |
|
||||
|------------|-------------|
|
||||
| | **Model training parameters within the `freqai.model_training_parameters.model_kwargs` sub dictionary**
|
||||
| `max_iters` | The number of training iterations to run. iteration here refers to the number of times we call self.optimizer.step(). used to calculate n_epochs. <br> **Datatype:** int. <br> Default: `100`.
|
||||
| `batch_size` | The size of the batches to use during training.. <br> **Datatype:** int. <br> Default: `64`.
|
||||
| `max_n_eval_batches` | The maximum number batches to use for evaluation.. <br> **Datatype:** int, optional. <br> Default: `None`.
|
||||
|
||||
|
||||
### Additional parameters
|
||||
|
||||
| Parameter | Description |
|
||||
|
@ -55,7 +55,7 @@ where `ReinforcementLearner` will use the templated `ReinforcementLearner` from
|
||||
dataframe["&-action"] = 0
|
||||
```
|
||||
|
||||
Most of the function remains the same as for typical Regressors, however, the function above shows how the strategy must pass the raw price data to the agent so that it has access to raw OHLCV in the training environment:
|
||||
Most of the function remains the same as for typical Regressors, however, the function below shows how the strategy must pass the raw price data to the agent so that it has access to raw OHLCV in the training environment:
|
||||
|
||||
```python
|
||||
def feature_engineering_standard(self, dataframe, **kwargs):
|
||||
@ -180,7 +180,7 @@ As you begin to modify the strategy and the prediction model, you will quickly r
|
||||
|
||||
# you can use feature values from dataframe
|
||||
# Assumes the shifted RSI indicator has been generated in the strategy.
|
||||
rsi_now = self.raw_features[f"%-rsi-period-10_shift-1_{pair}_"
|
||||
rsi_now = self.raw_features[f"%-rsi-period_10_shift-1_{pair}_"
|
||||
f"{self.config['timeframe']}"].iloc[self._current_tick]
|
||||
|
||||
# reward agent for entering trades
|
||||
|
@ -128,6 +128,9 @@ The FreqAI specific parameter `label_period_candles` defines the offset (number
|
||||
|
||||
You can choose to adopt a continual learning scheme by setting `"continual_learning": true` in the config. By enabling `continual_learning`, after training an initial model from scratch, subsequent trainings will start from the final model state of the preceding training. This gives the new model a "memory" of the previous state. By default, this is set to `False` which means that all new models are trained from scratch, without input from previous models.
|
||||
|
||||
???+ danger "Continual learning enforces a constant parameter space"
|
||||
Since `continual_learning` means that the model parameter space *cannot* change between trainings, `principal_component_analysis` is automatically disabled when `continual_learning` is enabled. Hint: PCA changes the parameter space and the number of features, learn more about PCA [here](freqai-feature-engineering.md#data-dimensionality-reduction-with-principal-component-analysis).
|
||||
|
||||
## Hyperopt
|
||||
|
||||
You can hyperopt using the same command as for [typical Freqtrade hyperopt](hyperopt.md):
|
||||
|
@ -149,7 +149,7 @@ The below example assumes a timeframe of 1 hour:
|
||||
* Locks each pair after selling for an additional 5 candles (`CooldownPeriod`), giving other pairs a chance to get filled.
|
||||
* Stops trading for 4 hours (`4 * 1h candles`) if the last 2 days (`48 * 1h candles`) had 20 trades, which caused a max-drawdown of more than 20%. (`MaxDrawdown`).
|
||||
* Stops trading if more than 4 stoploss occur for all pairs within a 1 day (`24 * 1h candles`) limit (`StoplossGuard`).
|
||||
* Locks all pairs that had 4 Trades within the last 6 hours (`6 * 1h candles`) with a combined profit ratio of below 0.02 (<2%) (`LowProfitPairs`).
|
||||
* Locks all pairs that had 2 Trades within the last 6 hours (`6 * 1h candles`) with a combined profit ratio of below 0.02 (<2%) (`LowProfitPairs`).
|
||||
* Locks all pairs for 2 candles that had a profit of below 0.01 (<1%) within the last 24h (`24 * 1h candles`), a minimum of 4 trades.
|
||||
|
||||
``` python
|
||||
|
@ -42,14 +42,14 @@ Enable subscribing to an instance by adding the `external_message_consumer` sect
|
||||
| `producers` | **Required.** List of producers <br> **Datatype:** Array.
|
||||
| `producers.name` | **Required.** Name of this producer. This name must be used in calls to `get_producer_pairs()` and `get_producer_df()` if more than one producer is used.<br> **Datatype:** string
|
||||
| `producers.host` | **Required.** The hostname or IP address from your producer.<br> **Datatype:** string
|
||||
| `producers.port` | **Required.** The port matching the above host.<br> **Datatype:** string
|
||||
| `producers.port` | **Required.** The port matching the above host.<br>*Defaults to `8080`.*<br> **Datatype:** Integer
|
||||
| `producers.secure` | **Optional.** Use ssl in websockets connection. Default False.<br> **Datatype:** string
|
||||
| `producers.ws_token` | **Required.** `ws_token` as configured on the producer.<br> **Datatype:** string
|
||||
| | **Optional settings**
|
||||
| `wait_timeout` | Timeout until we ping again if no message is received. <br>*Defaults to `300`.*<br> **Datatype:** Integer - in seconds.
|
||||
| `wait_timeout` | Ping timeout <br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
|
||||
| `ping_timeout` | Ping timeout <br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
|
||||
| `sleep_time` | Sleep time before retrying to connect.<br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
|
||||
| `remove_entry_exit_signals` | Remove signal columns from the dataframe (set them to 0) on dataframe receipt.<br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
|
||||
| `remove_entry_exit_signals` | Remove signal columns from the dataframe (set them to 0) on dataframe receipt.<br>*Defaults to `False`.*<br> **Datatype:** Boolean.
|
||||
| `message_size_limit` | Size limit per message<br>*Defaults to `8`.*<br> **Datatype:** Integer - Megabytes.
|
||||
|
||||
Instead of (or as well as) calculating indicators in `populate_indicators()` the follower instance listens on the connection to a producer instance's messages (or multiple producer instances in advanced configurations) and requests the producer's most recently analyzed dataframes for each pair in the active whitelist.
|
||||
|
@ -1,6 +1,6 @@
|
||||
markdown==3.3.7
|
||||
mkdocs==1.4.2
|
||||
mkdocs-material==9.1.3
|
||||
mkdocs-material==9.1.6
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==9.10
|
||||
pymdown-extensions==9.11
|
||||
jinja2==3.1.2
|
||||
|
@ -9,9 +9,6 @@ This same command can also be used to update freqUI, should there be a new relea
|
||||
|
||||
Once the bot is started in trade / dry-run mode (with `freqtrade trade`) - the UI will be available under the configured port below (usually `http://127.0.0.1:8080`).
|
||||
|
||||
!!! info "Alpha release"
|
||||
FreqUI is still considered an alpha release - if you encounter bugs or inconsistencies please open a [FreqUI issue](https://github.com/freqtrade/frequi/issues/new/choose).
|
||||
|
||||
!!! Note "developers"
|
||||
Developers should not use this method, but instead use the method described in the [freqUI repository](https://github.com/freqtrade/frequi) to get the source-code of freqUI.
|
||||
|
||||
|
@ -23,10 +23,22 @@ These modes can be configured with these values:
|
||||
'stoploss_on_exchange_limit_ratio': 0.99
|
||||
```
|
||||
|
||||
!!! Note
|
||||
Stoploss on exchange is only supported for Binance (stop-loss-limit), Huobi (stop-limit), Kraken (stop-loss-market, stop-loss-limit), Gate (stop-limit), and Kucoin (stop-limit and stop-market) as of now.
|
||||
<ins>Do not set too low/tight stoploss value if using stop loss on exchange!</ins>
|
||||
If set to low/tight then you have greater risk of missing fill on the order and stoploss will not work.
|
||||
Stoploss on exchange is only supported for the following exchanges, and not all exchanges support both stop-limit and stop-market.
|
||||
The Order-type will be ignored if only one mode is available.
|
||||
|
||||
| Exchange | stop-loss type |
|
||||
|----------|-------------|
|
||||
| Binance | limit |
|
||||
| Binance Futures | market, limit |
|
||||
| Huobi | limit |
|
||||
| kraken | market, limit |
|
||||
| Gate | limit |
|
||||
| Okx | limit |
|
||||
| Kucoin | stop-limit, stop-market|
|
||||
|
||||
!!! Note "Tight stoploss"
|
||||
<ins>Do not set too low/tight stoploss value when using stop loss on exchange!</ins>
|
||||
If set to low/tight you will have greater risk of missing fill on the order and stoploss will not work.
|
||||
|
||||
### stoploss_on_exchange and stoploss_on_exchange_limit_ratio
|
||||
|
||||
|
@ -51,7 +51,8 @@ During hyperopt, this runs only once at startup.
|
||||
|
||||
## Bot loop start
|
||||
|
||||
A simple callback which is called once at the start of every bot throttling iteration (roughly every 5 seconds, unless configured differently).
|
||||
A simple callback which is called once at the start of every bot throttling iteration in dry/live mode (roughly every 5
|
||||
seconds, unless configured differently) or once per candle in backtest/hyperopt mode.
|
||||
This can be used to perform calculations which are pair independent (apply to all pairs), loading of external data, etc.
|
||||
|
||||
``` python
|
||||
@ -61,11 +62,12 @@ class AwesomeStrategy(IStrategy):
|
||||
|
||||
# ... populate_* methods
|
||||
|
||||
def bot_loop_start(self, **kwargs) -> None:
|
||||
def bot_loop_start(self, current_time: datetime, **kwargs) -> None:
|
||||
"""
|
||||
Called at the start of the bot iteration (one loop).
|
||||
Might be used to perform pair-independent tasks
|
||||
(e.g. gather some remote resource for comparison)
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
"""
|
||||
if self.config['runmode'].value in ('live', 'dry_run'):
|
||||
|
@ -279,6 +279,7 @@ Return a summary of your profit/loss and performance.
|
||||
> ∙ `33.095 EUR`
|
||||
>
|
||||
> **Total Trade Count:** `138`
|
||||
> **Bot started:** `2022-07-11 18:40:44`
|
||||
> **First Trade opened:** `3 days ago`
|
||||
> **Latest Trade opened:** `2 minutes ago`
|
||||
> **Avg. Duration:** `2:33:45`
|
||||
@ -292,6 +293,7 @@ The relative profit of `15.2 Σ%` is be based on the starting capital - so in th
|
||||
Starting capital is either taken from the `available_capital` setting, or calculated by using current wallet size - profits.
|
||||
Profit Factor is calculated as gross profits / gross losses - and should serve as an overall metric for the strategy.
|
||||
Max drawdown corresponds to the backtesting metric `Absolute Drawdown (Account)` - calculated as `(Absolute Drawdown) / (DrawdownHigh + startingBalance)`.
|
||||
Bot started date will refer to the date the bot was first started. For older bots, this will default to the first trade's open date.
|
||||
|
||||
### /forceexit <trade_id>
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
""" Freqtrade bot """
|
||||
__version__ = '2023.3.dev'
|
||||
__version__ = '2023.4.dev'
|
||||
|
||||
if 'dev' in __version__:
|
||||
from pathlib import Path
|
||||
|
@ -204,11 +204,14 @@ def start_list_data(args: Dict[str, Any]) -> None:
|
||||
pair, timeframe, candle_type,
|
||||
*dhc.ohlcv_data_min_max(pair, timeframe, candle_type)
|
||||
) for pair, timeframe, candle_type in paircombs]
|
||||
|
||||
print(tabulate([
|
||||
(pair, timeframe, candle_type,
|
||||
start.strftime(DATETIME_PRINT_FORMAT),
|
||||
end.strftime(DATETIME_PRINT_FORMAT))
|
||||
for pair, timeframe, candle_type, start, end in paircombs1
|
||||
for pair, timeframe, candle_type, start, end in sorted(
|
||||
paircombs1,
|
||||
key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2]))
|
||||
],
|
||||
headers=("Pair", "Timeframe", "Type", 'From', 'To'),
|
||||
tablefmt='psql', stralign='right'))
|
||||
|
@ -116,7 +116,7 @@ class TimeRange:
|
||||
:param text: value from --timerange
|
||||
:return: Start and End range period
|
||||
"""
|
||||
if text is None:
|
||||
if not text:
|
||||
return TimeRange(None, None, 0, 0)
|
||||
syntax = [(r'^-(\d{8})$', (None, 'date')),
|
||||
(r'^(\d{8})-$', ('date', None)),
|
||||
|
@ -36,9 +36,10 @@ AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList', 'ProducerPairList', '
|
||||
'AgeFilter', 'OffsetFilter', 'PerformanceFilter',
|
||||
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
|
||||
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
|
||||
AVAILABLE_PROTECTIONS = ['CooldownPeriod', 'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
|
||||
AVAILABLE_DATAHANDLERS_TRADES = ['json', 'jsongz', 'hdf5']
|
||||
AVAILABLE_DATAHANDLERS = AVAILABLE_DATAHANDLERS_TRADES + ['feather', 'parquet']
|
||||
AVAILABLE_PROTECTIONS = ['CooldownPeriod',
|
||||
'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
|
||||
AVAILABLE_DATAHANDLERS_TRADES = ['json', 'jsongz', 'hdf5', 'feather']
|
||||
AVAILABLE_DATAHANDLERS = AVAILABLE_DATAHANDLERS_TRADES + ['parquet']
|
||||
BACKTEST_BREAKDOWNS = ['day', 'week', 'month']
|
||||
BACKTEST_CACHE_AGE = ['none', 'day', 'week', 'month']
|
||||
BACKTEST_CACHE_DEFAULT = 'day'
|
||||
@ -63,6 +64,7 @@ USERPATH_FREQAIMODELS = 'freqaimodels'
|
||||
TELEGRAM_SETTING_OPTIONS = ['on', 'off', 'silent']
|
||||
WEBHOOK_FORMAT_OPTIONS = ['form', 'json', 'raw']
|
||||
FULL_DATAFRAME_THRESHOLD = 100
|
||||
CUSTOM_TAG_MAX_LENGTH = 255
|
||||
|
||||
ENV_VAR_PREFIX = 'FREQTRADE__'
|
||||
|
||||
@ -597,7 +599,7 @@ CONF_SCHEMA = {
|
||||
"model_type": {"type": "string", "default": "PPO"},
|
||||
"policy_type": {"type": "string", "default": "MlpPolicy"},
|
||||
"net_arch": {"type": "array", "default": [128, 128]},
|
||||
"randomize_startinng_position": {"type": "boolean", "default": False},
|
||||
"randomize_starting_position": {"type": "boolean", "default": False},
|
||||
"model_reward_parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@ -246,14 +246,8 @@ def _load_backtest_data_df_compatibility(df: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
Compatibility support for older backtest data.
|
||||
"""
|
||||
df['open_date'] = pd.to_datetime(df['open_date'],
|
||||
utc=True,
|
||||
infer_datetime_format=True
|
||||
)
|
||||
df['close_date'] = pd.to_datetime(df['close_date'],
|
||||
utc=True,
|
||||
infer_datetime_format=True
|
||||
)
|
||||
df['open_date'] = pd.to_datetime(df['open_date'], utc=True)
|
||||
df['close_date'] = pd.to_datetime(df['close_date'], utc=True)
|
||||
# Compatibility support for pre short Columns
|
||||
if 'is_short' not in df.columns:
|
||||
df['is_short'] = False
|
||||
|
@ -34,7 +34,7 @@ def ohlcv_to_dataframe(ohlcv: list, timeframe: str, pair: str, *,
|
||||
cols = DEFAULT_DATAFRAME_COLUMNS
|
||||
df = DataFrame(ohlcv, columns=cols)
|
||||
|
||||
df['date'] = to_datetime(df['date'], unit='ms', utc=True, infer_datetime_format=True)
|
||||
df['date'] = to_datetime(df['date'], unit='ms', utc=True)
|
||||
|
||||
# Some exchanges return int values for Volume and even for OHLC.
|
||||
# Convert them since TA-LIB indicators used in the strategy assume floats
|
||||
|
@ -21,6 +21,7 @@ from freqtrade.exchange import Exchange, timeframe_to_seconds
|
||||
from freqtrade.exchange.types import OrderBook
|
||||
from freqtrade.misc import append_candles_to_dataframe
|
||||
from freqtrade.rpc import RPCManager
|
||||
from freqtrade.rpc.rpc_types import RPCAnalyzedDFMsg
|
||||
from freqtrade.util import PeriodicCache
|
||||
|
||||
|
||||
@ -118,8 +119,7 @@ class DataProvider:
|
||||
:param new_candle: This is a new candle
|
||||
"""
|
||||
if self.__rpc:
|
||||
self.__rpc.send_msg(
|
||||
{
|
||||
msg: RPCAnalyzedDFMsg = {
|
||||
'type': RPCMessageType.ANALYZED_DF,
|
||||
'data': {
|
||||
'key': pair_key,
|
||||
@ -127,7 +127,7 @@ class DataProvider:
|
||||
'la': datetime.now(timezone.utc)
|
||||
}
|
||||
}
|
||||
)
|
||||
self.__rpc.send_msg(msg)
|
||||
if new_candle:
|
||||
self.__rpc.send_msg({
|
||||
'type': RPCMessageType.NEW_CANDLE,
|
||||
|
@ -4,7 +4,7 @@ from typing import Optional
|
||||
from pandas import DataFrame, read_feather, to_datetime
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList
|
||||
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList
|
||||
from freqtrade.enums import CandleType
|
||||
|
||||
from .idatahandler import IDataHandler
|
||||
@ -63,10 +63,7 @@ class FeatherDataHandler(IDataHandler):
|
||||
pairdata.columns = self._columns
|
||||
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||
pairdata['date'] = to_datetime(pairdata['date'],
|
||||
unit='ms',
|
||||
utc=True,
|
||||
infer_datetime_format=True)
|
||||
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
|
||||
return pairdata
|
||||
|
||||
def ohlcv_append(
|
||||
@ -92,12 +89,11 @@ class FeatherDataHandler(IDataHandler):
|
||||
:param data: List of Lists containing trade data,
|
||||
column sequence as in DEFAULT_TRADES_COLUMNS
|
||||
"""
|
||||
# filename = self._pair_trades_filename(self._datadir, pair)
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
self.create_dir_if_needed(filename)
|
||||
|
||||
raise NotImplementedError()
|
||||
# array = pa.array(data)
|
||||
# array
|
||||
# feather.write_feather(data, filename)
|
||||
tradesdata = DataFrame(data, columns=DEFAULT_TRADES_COLUMNS)
|
||||
tradesdata.to_feather(filename, compression_level=9, compression='lz4')
|
||||
|
||||
def trades_append(self, pair: str, data: TradeList):
|
||||
"""
|
||||
@ -116,14 +112,13 @@ class FeatherDataHandler(IDataHandler):
|
||||
:param timerange: Timerange to load trades for - currently not implemented
|
||||
:return: List of trades
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
# filename = self._pair_trades_filename(self._datadir, pair)
|
||||
# tradesdata = misc.file_load_json(filename)
|
||||
filename = self._pair_trades_filename(self._datadir, pair)
|
||||
if not filename.exists():
|
||||
return []
|
||||
|
||||
# if not tradesdata:
|
||||
# return []
|
||||
tradesdata = read_feather(filename)
|
||||
|
||||
# return tradesdata
|
||||
return tradesdata.values.tolist()
|
||||
|
||||
@classmethod
|
||||
def _get_file_extension(cls):
|
||||
|
@ -75,10 +75,7 @@ class JsonDataHandler(IDataHandler):
|
||||
return DataFrame(columns=self._columns)
|
||||
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||
pairdata['date'] = to_datetime(pairdata['date'],
|
||||
unit='ms',
|
||||
utc=True,
|
||||
infer_datetime_format=True)
|
||||
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
|
||||
return pairdata
|
||||
|
||||
def ohlcv_append(
|
||||
|
@ -62,10 +62,7 @@ class ParquetDataHandler(IDataHandler):
|
||||
pairdata.columns = self._columns
|
||||
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||
pairdata['date'] = to_datetime(pairdata['date'],
|
||||
unit='ms',
|
||||
utc=True,
|
||||
infer_datetime_format=True)
|
||||
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
|
||||
return pairdata
|
||||
|
||||
def ohlcv_append(
|
||||
|
@ -8,15 +8,15 @@ from freqtrade.exchange.bitpanda import Bitpanda
|
||||
from freqtrade.exchange.bittrex import Bittrex
|
||||
from freqtrade.exchange.bybit import Bybit
|
||||
from freqtrade.exchange.coinbasepro import Coinbasepro
|
||||
from freqtrade.exchange.exchange_utils import (amount_to_contract_precision, amount_to_contracts,
|
||||
amount_to_precision, available_exchanges,
|
||||
ccxt_exchanges, contracts_to_amount,
|
||||
date_minus_candles, is_exchange_known_ccxt,
|
||||
market_is_active, price_to_precision,
|
||||
timeframe_to_minutes, timeframe_to_msecs,
|
||||
timeframe_to_next_date, timeframe_to_prev_date,
|
||||
timeframe_to_seconds, validate_exchange,
|
||||
validate_exchanges)
|
||||
from freqtrade.exchange.exchange_utils import (ROUND_DOWN, ROUND_UP, amount_to_contract_precision,
|
||||
amount_to_contracts, amount_to_precision,
|
||||
available_exchanges, ccxt_exchanges,
|
||||
contracts_to_amount, date_minus_candles,
|
||||
is_exchange_known_ccxt, market_is_active,
|
||||
price_to_precision, timeframe_to_minutes,
|
||||
timeframe_to_msecs, timeframe_to_next_date,
|
||||
timeframe_to_prev_date, timeframe_to_seconds,
|
||||
validate_exchange, validate_exchanges)
|
||||
from freqtrade.exchange.gate import Gate
|
||||
from freqtrade.exchange.hitbtc import Hitbtc
|
||||
from freqtrade.exchange.huobi import Huobi
|
||||
|
@ -7,7 +7,6 @@ from typing import Dict, List, Optional, Tuple
|
||||
import arrow
|
||||
import ccxt
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||
from freqtrade.exchange import Exchange
|
||||
@ -49,26 +48,6 @@ class Binance(Exchange):
|
||||
(TradingMode.FUTURES, MarginMode.ISOLATED)
|
||||
]
|
||||
|
||||
def _get_params(
|
||||
self,
|
||||
side: BuySell,
|
||||
ordertype: str,
|
||||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = 'GTC',
|
||||
) -> Dict:
|
||||
params = super()._get_params(side, ordertype, leverage, reduceOnly, time_in_force)
|
||||
if (
|
||||
time_in_force == 'PO'
|
||||
and ordertype != 'market'
|
||||
and self.trading_mode == TradingMode.SPOT
|
||||
# Only spot can do post only orders
|
||||
):
|
||||
params.pop('timeInForce')
|
||||
params['postOnly'] = True
|
||||
|
||||
return params
|
||||
|
||||
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
|
||||
tickers = super().get_tickers(symbols=symbols, cached=cached)
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -30,13 +30,14 @@ from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFun
|
||||
RetryableOrderError, TemporaryError)
|
||||
from freqtrade.exchange.common import (API_FETCH_ORDER_RETRY_COUNT, remove_credentials, retrier,
|
||||
retrier_async)
|
||||
from freqtrade.exchange.exchange_utils import (CcxtModuleType, amount_to_contract_precision,
|
||||
amount_to_contracts, amount_to_precision,
|
||||
contracts_to_amount, date_minus_candles,
|
||||
is_exchange_known_ccxt, market_is_active,
|
||||
price_to_precision, timeframe_to_minutes,
|
||||
timeframe_to_msecs, timeframe_to_next_date,
|
||||
timeframe_to_prev_date, timeframe_to_seconds)
|
||||
from freqtrade.exchange.exchange_utils import (ROUND, ROUND_DOWN, ROUND_UP, CcxtModuleType,
|
||||
amount_to_contract_precision, amount_to_contracts,
|
||||
amount_to_precision, contracts_to_amount,
|
||||
date_minus_candles, is_exchange_known_ccxt,
|
||||
market_is_active, price_to_precision,
|
||||
timeframe_to_minutes, timeframe_to_msecs,
|
||||
timeframe_to_next_date, timeframe_to_prev_date,
|
||||
timeframe_to_seconds)
|
||||
from freqtrade.exchange.types import OHLCVResponse, OrderBook, Ticker, Tickers
|
||||
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
|
||||
safe_value_fallback2)
|
||||
@ -59,6 +60,7 @@ class Exchange:
|
||||
# or by specifying them in the configuration.
|
||||
_ft_has_default: Dict = {
|
||||
"stoploss_on_exchange": False,
|
||||
"stop_price_param": "stopPrice",
|
||||
"order_time_in_force": ["GTC"],
|
||||
"ohlcv_params": {},
|
||||
"ohlcv_candle_limit": 500,
|
||||
@ -80,6 +82,8 @@ class Exchange:
|
||||
"fee_cost_in_contracts": False, # Fee cost needs contract conversion
|
||||
"needs_trading_fees": False, # use fetch_trading_fees to cache fees
|
||||
"order_props_in_contracts": ['amount', 'cost', 'filled', 'remaining'],
|
||||
# Override createMarketBuyOrderRequiresPrice where ccxt has it wrong
|
||||
"marketOrderRequiresPrice": False,
|
||||
}
|
||||
_ft_has: Dict = {}
|
||||
_ft_has_futures: Dict = {}
|
||||
@ -205,6 +209,8 @@ class Exchange:
|
||||
and self._api_async.session):
|
||||
logger.debug("Closing async ccxt session.")
|
||||
self.loop.run_until_complete(self._api_async.close())
|
||||
if self.loop and not self.loop.is_closed():
|
||||
self.loop.close()
|
||||
|
||||
def validate_config(self, config):
|
||||
# Check if timeframe is available
|
||||
@ -730,12 +736,14 @@ class Exchange:
|
||||
"""
|
||||
return amount_to_precision(amount, self.get_precision_amount(pair), self.precisionMode)
|
||||
|
||||
def price_to_precision(self, pair: str, price: float) -> float:
|
||||
def price_to_precision(self, pair: str, price: float, *, rounding_mode: int = ROUND) -> float:
|
||||
"""
|
||||
Returns the price rounded up to the precision the Exchange accepts.
|
||||
Rounds up
|
||||
Returns the price rounded to the precision the Exchange accepts.
|
||||
The default price_rounding_mode in conf is ROUND.
|
||||
For stoploss calculations, must use ROUND_UP for longs, and ROUND_DOWN for shorts.
|
||||
"""
|
||||
return price_to_precision(price, self.get_precision_price(pair), self.precisionMode)
|
||||
return price_to_precision(price, self.get_precision_price(pair),
|
||||
self.precisionMode, rounding_mode=rounding_mode)
|
||||
|
||||
def price_get_one_pip(self, pair: str, price: float) -> float:
|
||||
"""
|
||||
@ -758,12 +766,12 @@ class Exchange:
|
||||
return self._get_stake_amount_limit(pair, price, stoploss, 'min', leverage)
|
||||
|
||||
def get_max_pair_stake_amount(self, pair: str, price: float, leverage: float = 1.0) -> float:
|
||||
max_stake_amount = self._get_stake_amount_limit(pair, price, 0.0, 'max')
|
||||
max_stake_amount = self._get_stake_amount_limit(pair, price, 0.0, 'max', leverage)
|
||||
if max_stake_amount is None:
|
||||
# * Should never be executed
|
||||
raise OperationalException(f'{self.name}.get_max_pair_stake_amount should'
|
||||
'never set max_stake_amount to None')
|
||||
return max_stake_amount / leverage
|
||||
return max_stake_amount
|
||||
|
||||
def _get_stake_amount_limit(
|
||||
self,
|
||||
@ -781,43 +789,41 @@ class Exchange:
|
||||
except KeyError:
|
||||
raise ValueError(f"Can't get market information for symbol {pair}")
|
||||
|
||||
if isMin:
|
||||
# reserve some percent defined in config (5% default) + stoploss
|
||||
margin_reserve: float = 1.0 + self._config.get('amount_reserve_percent',
|
||||
DEFAULT_AMOUNT_RESERVE_PERCENT)
|
||||
stoploss_reserve = (
|
||||
margin_reserve / (1 - abs(stoploss)) if abs(stoploss) != 1 else 1.5
|
||||
)
|
||||
# it should not be more than 50%
|
||||
stoploss_reserve = max(min(stoploss_reserve, 1.5), 1)
|
||||
else:
|
||||
margin_reserve = 1.0
|
||||
stoploss_reserve = 1.0
|
||||
|
||||
stake_limits = []
|
||||
limits = market['limits']
|
||||
if (limits['cost'][limit] is not None):
|
||||
stake_limits.append(
|
||||
self._contracts_to_amount(
|
||||
pair,
|
||||
limits['cost'][limit]
|
||||
)
|
||||
self._contracts_to_amount(pair, limits['cost'][limit]) * stoploss_reserve
|
||||
)
|
||||
|
||||
if (limits['amount'][limit] is not None):
|
||||
stake_limits.append(
|
||||
self._contracts_to_amount(
|
||||
pair,
|
||||
limits['amount'][limit] * price
|
||||
)
|
||||
self._contracts_to_amount(pair, limits['amount'][limit]) * price * margin_reserve
|
||||
)
|
||||
|
||||
if not stake_limits:
|
||||
return None if isMin else float('inf')
|
||||
|
||||
# reserve some percent defined in config (5% default) + stoploss
|
||||
amount_reserve_percent = 1.0 + self._config.get('amount_reserve_percent',
|
||||
DEFAULT_AMOUNT_RESERVE_PERCENT)
|
||||
amount_reserve_percent = (
|
||||
amount_reserve_percent / (1 - abs(stoploss)) if abs(stoploss) != 1 else 1.5
|
||||
)
|
||||
# it should not be more than 50%
|
||||
amount_reserve_percent = max(min(amount_reserve_percent, 1.5), 1)
|
||||
|
||||
# The value returned should satisfy both limits: for amount (base currency) and
|
||||
# for cost (quote, stake currency), so max() is used here.
|
||||
# See also #2575 at github.
|
||||
return self._get_stake_amount_considering_leverage(
|
||||
max(stake_limits) * amount_reserve_percent,
|
||||
max(stake_limits) if isMin else min(stake_limits),
|
||||
leverage or 1.0
|
||||
) if isMin else min(stake_limits)
|
||||
)
|
||||
|
||||
def _get_stake_amount_considering_leverage(self, stake_amount: float, leverage: float) -> float:
|
||||
"""
|
||||
@ -1038,6 +1044,13 @@ class Exchange:
|
||||
params.update({'reduceOnly': True})
|
||||
return params
|
||||
|
||||
def _order_needs_price(self, ordertype: str) -> bool:
|
||||
return (
|
||||
ordertype != 'market'
|
||||
or self._api.options.get("createMarketBuyOrderRequiresPrice", False)
|
||||
or self._ft_has.get('marketOrderRequiresPrice', False)
|
||||
)
|
||||
|
||||
def create_order(
|
||||
self,
|
||||
*,
|
||||
@ -1060,8 +1073,7 @@ class Exchange:
|
||||
try:
|
||||
# Set the precision for amount and price(rate) as accepted by the exchange
|
||||
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
||||
needs_price = (ordertype != 'market'
|
||||
or self._api.options.get("createMarketBuyOrderRequiresPrice", False))
|
||||
needs_price = self._order_needs_price(ordertype)
|
||||
rate_for_order = self.price_to_precision(pair, rate) if needs_price else None
|
||||
|
||||
if not reduceOnly:
|
||||
@ -1104,11 +1116,11 @@ class Exchange:
|
||||
"""
|
||||
if not self._ft_has.get('stoploss_on_exchange'):
|
||||
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
||||
|
||||
price_param = self._ft_has['stop_price_param']
|
||||
return (
|
||||
order.get('stopPrice', None) is None
|
||||
or ((side == "sell" and stop_loss > float(order['stopPrice'])) or
|
||||
(side == "buy" and stop_loss < float(order['stopPrice'])))
|
||||
order.get(price_param, None) is None
|
||||
or ((side == "sell" and stop_loss > float(order[price_param])) or
|
||||
(side == "buy" and stop_loss < float(order[price_param])))
|
||||
)
|
||||
|
||||
def _get_stop_order_type(self, user_order_type) -> Tuple[str, str]:
|
||||
@ -1148,8 +1160,8 @@ class Exchange:
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
params = self._params.copy()
|
||||
# Verify if stopPrice works for your exchange!
|
||||
params.update({'stopPrice': stop_price})
|
||||
# Verify if stopPrice works for your exchange, else configure stop_price_param
|
||||
params.update({self._ft_has['stop_price_param']: stop_price})
|
||||
return params
|
||||
|
||||
@retrier(retries=0)
|
||||
@ -1175,12 +1187,12 @@ class Exchange:
|
||||
|
||||
user_order_type = order_types.get('stoploss', 'market')
|
||||
ordertype, user_order_type = self._get_stop_order_type(user_order_type)
|
||||
|
||||
stop_price_norm = self.price_to_precision(pair, stop_price)
|
||||
round_mode = ROUND_DOWN if side == 'buy' else ROUND_UP
|
||||
stop_price_norm = self.price_to_precision(pair, stop_price, rounding_mode=round_mode)
|
||||
limit_rate = None
|
||||
if user_order_type == 'limit':
|
||||
limit_rate = self._get_stop_limit_rate(stop_price, order_types, side)
|
||||
limit_rate = self.price_to_precision(pair, limit_rate)
|
||||
limit_rate = self.price_to_precision(pair, limit_rate, rounding_mode=round_mode)
|
||||
|
||||
if self._config['dry_run']:
|
||||
dry_order = self.create_dry_run_order(
|
||||
|
@ -2,11 +2,12 @@
|
||||
Exchange support utils
|
||||
"""
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import ceil
|
||||
from math import ceil, floor
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import ccxt
|
||||
from ccxt import ROUND_DOWN, ROUND_UP, TICK_SIZE, TRUNCATE, decimal_to_precision
|
||||
from ccxt import (DECIMAL_PLACES, ROUND, ROUND_DOWN, ROUND_UP, SIGNIFICANT_DIGITS, TICK_SIZE,
|
||||
TRUNCATE, decimal_to_precision)
|
||||
|
||||
from freqtrade.exchange.common import BAD_EXCHANGES, EXCHANGE_HAS_OPTIONAL, EXCHANGE_HAS_REQUIRED
|
||||
from freqtrade.util import FtPrecise
|
||||
@ -219,35 +220,51 @@ def amount_to_contract_precision(
|
||||
return amount
|
||||
|
||||
|
||||
def price_to_precision(price: float, price_precision: Optional[float],
|
||||
precisionMode: Optional[int]) -> float:
|
||||
def price_to_precision(
|
||||
price: float,
|
||||
price_precision: Optional[float],
|
||||
precisionMode: Optional[int],
|
||||
*,
|
||||
rounding_mode: int = ROUND,
|
||||
) -> float:
|
||||
"""
|
||||
Returns the price rounded up to the precision the Exchange accepts.
|
||||
Returns the price rounded to the precision the Exchange accepts.
|
||||
Partial Re-implementation of ccxt internal method decimal_to_precision(),
|
||||
which does not support rounding up
|
||||
which does not support rounding up.
|
||||
For stoploss calculations, must use ROUND_UP for longs, and ROUND_DOWN for shorts.
|
||||
|
||||
TODO: If ccxt supports ROUND_UP for decimal_to_precision(), we could remove this and
|
||||
align with amount_to_precision().
|
||||
!!! Rounds up
|
||||
:param price: price to convert
|
||||
:param price_precision: price precision to use. Used from markets[pair]['precision']['price']
|
||||
:param precisionMode: precision mode to use. Should be used from precisionMode
|
||||
one of ccxt's DECIMAL_PLACES, SIGNIFICANT_DIGITS, or TICK_SIZE
|
||||
:param rounding_mode: rounding mode to use. Defaults to ROUND
|
||||
:return: price rounded up to the precision the Exchange accepts
|
||||
|
||||
"""
|
||||
if price_precision is not None and precisionMode is not None:
|
||||
# price = float(decimal_to_precision(price, rounding_mode=ROUND,
|
||||
# precision=price_precision,
|
||||
# counting_mode=self.precisionMode,
|
||||
# ))
|
||||
if precisionMode == TICK_SIZE:
|
||||
if rounding_mode == ROUND:
|
||||
ticks = price / price_precision
|
||||
rounded_ticks = round(ticks)
|
||||
return rounded_ticks * price_precision
|
||||
precision = FtPrecise(price_precision)
|
||||
price_str = FtPrecise(price)
|
||||
missing = price_str % precision
|
||||
if not missing == FtPrecise("0"):
|
||||
price = round(float(str(price_str - missing + precision)), 14)
|
||||
else:
|
||||
symbol_prec = price_precision
|
||||
big_price = price * pow(10, symbol_prec)
|
||||
price = ceil(big_price) / pow(10, symbol_prec)
|
||||
return round(float(str(price_str - missing + precision)), 14)
|
||||
return price
|
||||
elif precisionMode in (SIGNIFICANT_DIGITS, DECIMAL_PLACES):
|
||||
ndigits = round(price_precision)
|
||||
if rounding_mode == ROUND:
|
||||
return round(price, ndigits)
|
||||
ticks = price * (10**ndigits)
|
||||
if rounding_mode == ROUND_UP:
|
||||
return ceil(ticks) / (10**ndigits)
|
||||
if rounding_mode == TRUNCATE:
|
||||
return int(ticks) / (10**ndigits)
|
||||
if rounding_mode == ROUND_DOWN:
|
||||
return floor(ticks) / (10**ndigits)
|
||||
raise ValueError(f"Unknown rounding_mode {rounding_mode}")
|
||||
raise ValueError(f"Unknown precisionMode {precisionMode}")
|
||||
return price
|
||||
|
@ -5,7 +5,6 @@ from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.misc import safe_value_fallback2
|
||||
|
||||
@ -28,10 +27,12 @@ class Gate(Exchange):
|
||||
"order_time_in_force": ['GTC', 'IOC'],
|
||||
"stoploss_order_types": {"limit": "limit"},
|
||||
"stoploss_on_exchange": True,
|
||||
"marketOrderRequiresPrice": True,
|
||||
}
|
||||
|
||||
_ft_has_futures: Dict = {
|
||||
"needs_trading_fees": True,
|
||||
"marketOrderRequiresPrice": False,
|
||||
"tickers_have_bid_ask": False,
|
||||
"fee_cost_in_contracts": False, # Set explicitly to false for clarity
|
||||
"order_props_in_contracts": ['amount', 'filled', 'remaining'],
|
||||
@ -50,14 +51,6 @@ class Gate(Exchange):
|
||||
(TradingMode.FUTURES, MarginMode.ISOLATED)
|
||||
]
|
||||
|
||||
def validate_ordertypes(self, order_types: Dict) -> None:
|
||||
|
||||
if self.trading_mode != TradingMode.FUTURES:
|
||||
if any(v == 'market' for k, v in order_types.items()):
|
||||
raise OperationalException(
|
||||
f'Exchange {self.name} does not support market orders.')
|
||||
super().validate_stop_ordertypes(order_types)
|
||||
|
||||
def _get_params(
|
||||
self,
|
||||
side: BuySell,
|
||||
|
@ -12,6 +12,7 @@ from freqtrade.exceptions import (DDosProtection, InsufficientFundsError, Invali
|
||||
OperationalException, TemporaryError)
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.exchange.exchange_utils import ROUND_DOWN, ROUND_UP
|
||||
from freqtrade.exchange.types import Tickers
|
||||
|
||||
|
||||
@ -109,6 +110,7 @@ class Kraken(Exchange):
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
params.update({'reduceOnly': True})
|
||||
|
||||
round_mode = ROUND_DOWN if side == 'buy' else ROUND_UP
|
||||
if order_types.get('stoploss', 'market') == 'limit':
|
||||
ordertype = "stop-loss-limit"
|
||||
limit_price_pct = order_types.get('stoploss_on_exchange_limit_ratio', 0.99)
|
||||
@ -116,11 +118,11 @@ class Kraken(Exchange):
|
||||
limit_rate = stop_price * limit_price_pct
|
||||
else:
|
||||
limit_rate = stop_price * (2 - limit_price_pct)
|
||||
params['price2'] = self.price_to_precision(pair, limit_rate)
|
||||
params['price2'] = self.price_to_precision(pair, limit_rate, rounding_mode=round_mode)
|
||||
else:
|
||||
ordertype = "stop-loss"
|
||||
|
||||
stop_price = self.price_to_precision(pair, stop_price)
|
||||
stop_price = self.price_to_precision(pair, stop_price, rounding_mode=round_mode)
|
||||
|
||||
if self._config['dry_run']:
|
||||
dry_order = self.create_dry_run_order(
|
||||
|
@ -28,6 +28,7 @@ class Okx(Exchange):
|
||||
"funding_fee_timeframe": "8h",
|
||||
"stoploss_order_types": {"limit": "limit"},
|
||||
"stoploss_on_exchange": True,
|
||||
"stop_price_param": "stopLossPrice",
|
||||
}
|
||||
_ft_has_futures: Dict = {
|
||||
"tickers_have_quoteVolume": False,
|
||||
@ -162,29 +163,12 @@ class Okx(Exchange):
|
||||
return pair_tiers[-1]['maxNotional'] / leverage
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
|
||||
params = self._params.copy()
|
||||
# Verify if stopPrice works for your exchange!
|
||||
params.update({'stopLossPrice': stop_price})
|
||||
|
||||
params = super()._get_stop_params(side, ordertype, stop_price)
|
||||
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
|
||||
params['tdMode'] = self.margin_mode.value
|
||||
params['posSide'] = self._get_posSide(side, True)
|
||||
return params
|
||||
|
||||
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
|
||||
"""
|
||||
OKX uses non-default stoploss price naming.
|
||||
"""
|
||||
if not self._ft_has.get('stoploss_on_exchange'):
|
||||
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
||||
|
||||
return (
|
||||
order.get('stopLossPrice', None) is None
|
||||
or ((side == "sell" and stop_loss > float(order['stopLossPrice'])) or
|
||||
(side == "buy" and stop_loss < float(order['stopLossPrice'])))
|
||||
)
|
||||
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
||||
if self._config['dry_run']:
|
||||
return self.fetch_dry_run_order(order_id)
|
||||
|
@ -66,7 +66,7 @@ class Base3ActionRLEnv(BaseEnvironment):
|
||||
elif action == Actions.Sell.value and not self.can_short:
|
||||
self._update_total_profit()
|
||||
self._position = Positions.Neutral
|
||||
trade_type = "neutral"
|
||||
trade_type = "exit"
|
||||
self._last_trade_tick = None
|
||||
else:
|
||||
print("case not defined")
|
||||
@ -74,7 +74,7 @@ class Base3ActionRLEnv(BaseEnvironment):
|
||||
if trade_type is not None:
|
||||
self.trade_history.append(
|
||||
{'price': self.current_price(), 'index': self._current_tick,
|
||||
'type': trade_type})
|
||||
'type': trade_type, 'profit': self.get_unrealized_profit()})
|
||||
|
||||
if (self._total_profit < self.max_drawdown or
|
||||
self._total_unrealized_profit < self.max_drawdown):
|
||||
|
@ -52,16 +52,6 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
|
||||
trade_type = None
|
||||
if self.is_tradesignal(action):
|
||||
"""
|
||||
Action: Neutral, position: Long -> Close Long
|
||||
Action: Neutral, position: Short -> Close Short
|
||||
|
||||
Action: Long, position: Neutral -> Open Long
|
||||
Action: Long, position: Short -> Close Short and Open Long
|
||||
|
||||
Action: Short, position: Neutral -> Open Short
|
||||
Action: Short, position: Long -> Close Long and Open Short
|
||||
"""
|
||||
|
||||
if action == Actions.Neutral.value:
|
||||
self._position = Positions.Neutral
|
||||
@ -69,16 +59,16 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
self._last_trade_tick = None
|
||||
elif action == Actions.Long_enter.value:
|
||||
self._position = Positions.Long
|
||||
trade_type = "long"
|
||||
trade_type = "enter_long"
|
||||
self._last_trade_tick = self._current_tick
|
||||
elif action == Actions.Short_enter.value:
|
||||
self._position = Positions.Short
|
||||
trade_type = "short"
|
||||
trade_type = "enter_short"
|
||||
self._last_trade_tick = self._current_tick
|
||||
elif action == Actions.Exit.value:
|
||||
self._update_total_profit()
|
||||
self._position = Positions.Neutral
|
||||
trade_type = "neutral"
|
||||
trade_type = "exit"
|
||||
self._last_trade_tick = None
|
||||
else:
|
||||
print("case not defined")
|
||||
@ -86,7 +76,7 @@ class Base4ActionRLEnv(BaseEnvironment):
|
||||
if trade_type is not None:
|
||||
self.trade_history.append(
|
||||
{'price': self.current_price(), 'index': self._current_tick,
|
||||
'type': trade_type})
|
||||
'type': trade_type, 'profit': self.get_unrealized_profit()})
|
||||
|
||||
if (self._total_profit < self.max_drawdown or
|
||||
self._total_unrealized_profit < self.max_drawdown):
|
||||
|
@ -53,16 +53,6 @@ class Base5ActionRLEnv(BaseEnvironment):
|
||||
|
||||
trade_type = None
|
||||
if self.is_tradesignal(action):
|
||||
"""
|
||||
Action: Neutral, position: Long -> Close Long
|
||||
Action: Neutral, position: Short -> Close Short
|
||||
|
||||
Action: Long, position: Neutral -> Open Long
|
||||
Action: Long, position: Short -> Close Short and Open Long
|
||||
|
||||
Action: Short, position: Neutral -> Open Short
|
||||
Action: Short, position: Long -> Close Long and Open Short
|
||||
"""
|
||||
|
||||
if action == Actions.Neutral.value:
|
||||
self._position = Positions.Neutral
|
||||
@ -70,21 +60,21 @@ class Base5ActionRLEnv(BaseEnvironment):
|
||||
self._last_trade_tick = None
|
||||
elif action == Actions.Long_enter.value:
|
||||
self._position = Positions.Long
|
||||
trade_type = "long"
|
||||
trade_type = "enter_long"
|
||||
self._last_trade_tick = self._current_tick
|
||||
elif action == Actions.Short_enter.value:
|
||||
self._position = Positions.Short
|
||||
trade_type = "short"
|
||||
trade_type = "enter_short"
|
||||
self._last_trade_tick = self._current_tick
|
||||
elif action == Actions.Long_exit.value:
|
||||
self._update_total_profit()
|
||||
self._position = Positions.Neutral
|
||||
trade_type = "neutral"
|
||||
trade_type = "exit_long"
|
||||
self._last_trade_tick = None
|
||||
elif action == Actions.Short_exit.value:
|
||||
self._update_total_profit()
|
||||
self._position = Positions.Neutral
|
||||
trade_type = "neutral"
|
||||
trade_type = "exit_short"
|
||||
self._last_trade_tick = None
|
||||
else:
|
||||
print("case not defined")
|
||||
@ -92,7 +82,7 @@ class Base5ActionRLEnv(BaseEnvironment):
|
||||
if trade_type is not None:
|
||||
self.trade_history.append(
|
||||
{'price': self.current_price(), 'index': self._current_tick,
|
||||
'type': trade_type})
|
||||
'type': trade_type, 'profit': self.get_unrealized_profit()})
|
||||
|
||||
if (self._total_profit < self.max_drawdown or
|
||||
self._total_unrealized_profit < self.max_drawdown):
|
||||
|
147
freqtrade/freqai/base_models/BasePyTorchClassifier.py
Normal file
147
freqtrade/freqai/base_models/BasePyTorchClassifier.py
Normal file
@ -0,0 +1,147 @@
|
||||
import logging
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
import pandas as pd
|
||||
import torch
|
||||
from pandas import DataFrame
|
||||
from torch.nn import functional as F
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.freqai.base_models.BasePyTorchModel import BasePyTorchModel
|
||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BasePyTorchClassifier(BasePyTorchModel):
|
||||
"""
|
||||
A PyTorch implementation of a classifier.
|
||||
User must implement fit method
|
||||
|
||||
Important!
|
||||
|
||||
- User must declare the target class names in the strategy,
|
||||
under IStrategy.set_freqai_targets method.
|
||||
|
||||
for example, in your strategy:
|
||||
```
|
||||
def set_freqai_targets(self, dataframe: DataFrame, metadata: Dict, **kwargs):
|
||||
self.freqai.class_names = ["down", "up"]
|
||||
dataframe['&s-up_or_down'] = np.where(dataframe["close"].shift(-100) >
|
||||
dataframe["close"], 'up', 'down')
|
||||
|
||||
return dataframe
|
||||
"""
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.class_name_to_index = None
|
||||
self.index_to_class_name = None
|
||||
|
||||
def predict(
|
||||
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param unfiltered_df: Full dataframe for the current backtest period.
|
||||
:return:
|
||||
:pred_df: dataframe containing the predictions
|
||||
:do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove
|
||||
data (NaNs) or felt uncertain about data (PCA and DI index)
|
||||
:raises ValueError: if 'class_names' doesn't exist in model meta_data.
|
||||
"""
|
||||
|
||||
class_names = self.model.model_meta_data.get("class_names", None)
|
||||
if not class_names:
|
||||
raise ValueError(
|
||||
"Missing class names. "
|
||||
"self.model.model_meta_data['class_names'] is None."
|
||||
)
|
||||
|
||||
if not self.class_name_to_index:
|
||||
self.init_class_names_to_index_mapping(class_names)
|
||||
|
||||
dk.find_features(unfiltered_df)
|
||||
filtered_df, _ = dk.filter_features(
|
||||
unfiltered_df, dk.training_features_list, training_filter=False
|
||||
)
|
||||
filtered_df = dk.normalize_data_from_metadata(filtered_df)
|
||||
dk.data_dictionary["prediction_features"] = filtered_df
|
||||
self.data_cleaning_predict(dk)
|
||||
x = self.data_convertor.convert_x(
|
||||
dk.data_dictionary["prediction_features"],
|
||||
device=self.device
|
||||
)
|
||||
logits = self.model.model(x)
|
||||
probs = F.softmax(logits, dim=-1)
|
||||
predicted_classes = torch.argmax(probs, dim=-1)
|
||||
predicted_classes_str = self.decode_class_names(predicted_classes)
|
||||
pred_df_prob = DataFrame(probs.detach().numpy(), columns=class_names)
|
||||
pred_df = DataFrame(predicted_classes_str, columns=[dk.label_list[0]])
|
||||
pred_df = pd.concat([pred_df, pred_df_prob], axis=1)
|
||||
return (pred_df, dk.do_predict)
|
||||
|
||||
def encode_class_names(
|
||||
self,
|
||||
data_dictionary: Dict[str, pd.DataFrame],
|
||||
dk: FreqaiDataKitchen,
|
||||
class_names: List[str],
|
||||
):
|
||||
"""
|
||||
encode class name, str -> int
|
||||
assuming first column of *_labels data frame to be the target column
|
||||
containing the class names
|
||||
"""
|
||||
|
||||
target_column_name = dk.label_list[0]
|
||||
for split in self.splits:
|
||||
label_df = data_dictionary[f"{split}_labels"]
|
||||
self.assert_valid_class_names(label_df[target_column_name], class_names)
|
||||
label_df[target_column_name] = list(
|
||||
map(lambda x: self.class_name_to_index[x], label_df[target_column_name])
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def assert_valid_class_names(
|
||||
target_column: pd.Series,
|
||||
class_names: List[str]
|
||||
):
|
||||
non_defined_labels = set(target_column) - set(class_names)
|
||||
if len(non_defined_labels) != 0:
|
||||
raise OperationalException(
|
||||
f"Found non defined labels: {non_defined_labels}, ",
|
||||
f"expecting labels: {class_names}"
|
||||
)
|
||||
|
||||
def decode_class_names(self, class_ints: torch.Tensor) -> List[str]:
|
||||
"""
|
||||
decode class name, int -> str
|
||||
"""
|
||||
|
||||
return list(map(lambda x: self.index_to_class_name[x.item()], class_ints))
|
||||
|
||||
def init_class_names_to_index_mapping(self, class_names):
|
||||
self.class_name_to_index = {s: i for i, s in enumerate(class_names)}
|
||||
self.index_to_class_name = {i: s for i, s in enumerate(class_names)}
|
||||
logger.info(f"encoded class name to index: {self.class_name_to_index}")
|
||||
|
||||
def convert_label_column_to_int(
|
||||
self,
|
||||
data_dictionary: Dict[str, pd.DataFrame],
|
||||
dk: FreqaiDataKitchen,
|
||||
class_names: List[str]
|
||||
):
|
||||
self.init_class_names_to_index_mapping(class_names)
|
||||
self.encode_class_names(data_dictionary, dk, class_names)
|
||||
|
||||
def get_class_names(self) -> List[str]:
|
||||
if not self.class_names:
|
||||
raise ValueError(
|
||||
"self.class_names is empty, "
|
||||
"set self.freqai.class_names = ['class a', 'class b', 'class c'] "
|
||||
"inside IStrategy.set_freqai_targets method."
|
||||
)
|
||||
|
||||
return self.class_names
|
83
freqtrade/freqai/base_models/BasePyTorchModel.py
Normal file
83
freqtrade/freqai/base_models/BasePyTorchModel.py
Normal file
@ -0,0 +1,83 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from time import time
|
||||
from typing import Any
|
||||
|
||||
import torch
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||
from freqtrade.freqai.freqai_interface import IFreqaiModel
|
||||
from freqtrade.freqai.torch.PyTorchDataConvertor import PyTorchDataConvertor
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BasePyTorchModel(IFreqaiModel, ABC):
|
||||
"""
|
||||
Base class for PyTorch type models.
|
||||
User *must* inherit from this class and set fit() and predict() and
|
||||
data_convertor property.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(config=kwargs["config"])
|
||||
self.dd.model_type = "pytorch"
|
||||
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||
test_size = self.freqai_info.get('data_split_parameters', {}).get('test_size')
|
||||
self.splits = ["train", "test"] if test_size != 0 else ["train"]
|
||||
|
||||
def train(
|
||||
self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Any:
|
||||
"""
|
||||
Filter the training data and train a model to it. Train makes heavy use of the datakitchen
|
||||
for storing, saving, loading, and analyzing the data.
|
||||
:param unfiltered_df: Full dataframe for the current training period
|
||||
:return:
|
||||
:model: Trained model which can be used to inference (self.predict)
|
||||
"""
|
||||
|
||||
logger.info(f"-------------------- Starting training {pair} --------------------")
|
||||
|
||||
start_time = time()
|
||||
|
||||
features_filtered, labels_filtered = dk.filter_features(
|
||||
unfiltered_df,
|
||||
dk.training_features_list,
|
||||
dk.label_list,
|
||||
training_filter=True,
|
||||
)
|
||||
|
||||
# split data into train/test data.
|
||||
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
||||
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
|
||||
dk.fit_labels()
|
||||
# normalize all data based on train_dataset only
|
||||
data_dictionary = dk.normalize_data(data_dictionary)
|
||||
|
||||
# optional additional data cleaning/analysis
|
||||
self.data_cleaning_train(dk)
|
||||
|
||||
logger.info(
|
||||
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
|
||||
)
|
||||
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
|
||||
|
||||
model = self.fit(data_dictionary, dk)
|
||||
end_time = time()
|
||||
|
||||
logger.info(f"-------------------- Done training {pair} "
|
||||
f"({end_time - start_time:.2f} secs) --------------------")
|
||||
|
||||
return model
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def data_convertor(self) -> PyTorchDataConvertor:
|
||||
"""
|
||||
a class responsible for converting `*_features` & `*_labels` pandas dataframes
|
||||
to pytorch tensors.
|
||||
"""
|
||||
raise NotImplementedError("Abstract property")
|
49
freqtrade/freqai/base_models/BasePyTorchRegressor.py
Normal file
49
freqtrade/freqai/base_models/BasePyTorchRegressor.py
Normal file
@ -0,0 +1,49 @@
|
||||
import logging
|
||||
from typing import Tuple
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.freqai.base_models.BasePyTorchModel import BasePyTorchModel
|
||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BasePyTorchRegressor(BasePyTorchModel):
|
||||
"""
|
||||
A PyTorch implementation of a regressor.
|
||||
User must implement fit method
|
||||
"""
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def predict(
|
||||
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param unfiltered_df: Full dataframe for the current backtest period.
|
||||
:return:
|
||||
:pred_df: dataframe containing the predictions
|
||||
:do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove
|
||||
data (NaNs) or felt uncertain about data (PCA and DI index)
|
||||
"""
|
||||
|
||||
dk.find_features(unfiltered_df)
|
||||
filtered_df, _ = dk.filter_features(
|
||||
unfiltered_df, dk.training_features_list, training_filter=False
|
||||
)
|
||||
filtered_df = dk.normalize_data_from_metadata(filtered_df)
|
||||
dk.data_dictionary["prediction_features"] = filtered_df
|
||||
|
||||
self.data_cleaning_predict(dk)
|
||||
x = self.data_convertor.convert_x(
|
||||
dk.data_dictionary["prediction_features"],
|
||||
device=self.device
|
||||
)
|
||||
y = self.model.model(x)
|
||||
pred_df = DataFrame(y.detach().numpy(), columns=[dk.label_list[0]])
|
||||
return (pred_df, dk.do_predict)
|
@ -74,8 +74,8 @@ class FreqaiDataDrawer:
|
||||
self.historic_predictions: Dict[str, DataFrame] = {}
|
||||
self.full_path = full_path
|
||||
self.historic_predictions_path = Path(self.full_path / "historic_predictions.pkl")
|
||||
self.historic_predictions_folder = Path(self.full_path / "historic_predictions")
|
||||
self.historic_predictions_bkp_folder = Path(self.full_path / "historic_predictions_backup")
|
||||
self.historic_predictions_bkp_path = Path(
|
||||
self.full_path / "historic_predictions.backup.pkl")
|
||||
self.pair_dictionary_path = Path(self.full_path / "pair_dictionary.json")
|
||||
self.global_metadata_path = Path(self.full_path / "global_metadata.json")
|
||||
self.metric_tracker_path = Path(self.full_path / "metric_tracker.json")
|
||||
@ -163,12 +163,11 @@ class FreqaiDataDrawer:
|
||||
Locate and load a previously saved historic predictions.
|
||||
:return: bool - whether or not the drawer was located
|
||||
"""
|
||||
exists = self.historic_predictions_folder.exists()
|
||||
convert = self.historic_predictions_path.is_file()
|
||||
|
||||
exists = self.historic_predictions_path.is_file()
|
||||
if exists:
|
||||
try:
|
||||
self.load_historic_predictions_from_folder()
|
||||
with self.historic_predictions_path.open("rb") as fp:
|
||||
self.historic_predictions = cloudpickle.load(fp)
|
||||
logger.info(
|
||||
f"Found existing historic predictions at {self.full_path}, but beware "
|
||||
"that statistics may be inaccurate if the bot has been offline for "
|
||||
@ -176,54 +175,25 @@ class FreqaiDataDrawer:
|
||||
)
|
||||
except EOFError:
|
||||
logger.warning(
|
||||
'Historical prediction files were corrupted. Trying to load backup files.')
|
||||
self.load_historic_predictions_from_folder()
|
||||
logger.warning('FreqAI successfully loaded the backup '
|
||||
'historical predictions files.')
|
||||
|
||||
elif not exists and convert:
|
||||
logger.info("Converting your historic predictions pkl to parquet"
|
||||
"to improve performance.")
|
||||
with Path.open(self.historic_predictions_path, "rb") as fp:
|
||||
self.historic_predictions = cloudpickle.load(fp)
|
||||
self.save_historic_predictions_to_disk()
|
||||
exists = True
|
||||
'Historical prediction file was corrupted. Trying to load backup file.')
|
||||
with self.historic_predictions_bkp_path.open("rb") as fp:
|
||||
self.historic_predictions = cloudpickle.load(fp)
|
||||
logger.warning('FreqAI successfully loaded the backup historical predictions file.')
|
||||
|
||||
else:
|
||||
logger.warning(
|
||||
f"Follower could not find historic predictions at {self.full_path} "
|
||||
"sending null values back to strategy"
|
||||
)
|
||||
logger.info("Could not find existing historic_predictions, starting from scratch")
|
||||
|
||||
return exists
|
||||
|
||||
def load_historic_predictions_from_folder(self):
|
||||
"""
|
||||
Try to build the historic_predictions dictionary from parquet
|
||||
files in the historic_predictions_folder
|
||||
"""
|
||||
for file_path in self.historic_predictions_folder.glob("*.parquet"):
|
||||
key = file_path.stem
|
||||
key.replace("_", "/")
|
||||
self.historic_predictions[key] = pd.read_parquet(file_path)
|
||||
|
||||
return
|
||||
|
||||
def save_historic_predictions_to_disk(self):
|
||||
"""
|
||||
Save historic predictions pickle to disk
|
||||
"""
|
||||
|
||||
self.historic_predictions_folder.mkdir(parents=True, exist_ok=True)
|
||||
for key, value in self.historic_predictions.items():
|
||||
key = key.replace("/", "_")
|
||||
# pytest.set_trace()
|
||||
filename = Path(self.historic_predictions_folder / f"{key}.parquet")
|
||||
value.to_parquet(filename)
|
||||
with self.historic_predictions_path.open("wb") as fp:
|
||||
cloudpickle.dump(self.historic_predictions, fp, protocol=cloudpickle.DEFAULT_PROTOCOL)
|
||||
|
||||
# create a backup
|
||||
shutil.copytree(self.historic_predictions_folder,
|
||||
self.historic_predictions_bkp_folder, dirs_exist_ok=True)
|
||||
shutil.copy(self.historic_predictions_path, self.historic_predictions_bkp_path)
|
||||
|
||||
def save_metric_tracker_to_disk(self):
|
||||
"""
|
||||
@ -476,7 +446,7 @@ class FreqaiDataDrawer:
|
||||
dump(model, save_path / f"{dk.model_filename}_model.joblib")
|
||||
elif self.model_type == 'keras':
|
||||
model.save(save_path / f"{dk.model_filename}_model.h5")
|
||||
elif 'stable_baselines' in self.model_type or 'sb3_contrib' == self.model_type:
|
||||
elif self.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
|
||||
model.save(save_path / f"{dk.model_filename}_model.zip")
|
||||
|
||||
if dk.svm_model is not None:
|
||||
@ -526,7 +496,7 @@ class FreqaiDataDrawer:
|
||||
dk.training_features_list = dk.data["training_features_list"]
|
||||
dk.label_list = dk.data["label_list"]
|
||||
|
||||
def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any:
|
||||
def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any: # noqa: C901
|
||||
"""
|
||||
loads all data required to make a prediction on a sub-train time range
|
||||
:returns:
|
||||
@ -567,6 +537,11 @@ class FreqaiDataDrawer:
|
||||
self.model_type, self.freqai_info['rl_config']['model_type'])
|
||||
MODELCLASS = getattr(mod, self.freqai_info['rl_config']['model_type'])
|
||||
model = MODELCLASS.load(dk.data_path / f"{dk.model_filename}_model")
|
||||
elif self.model_type == 'pytorch':
|
||||
import torch
|
||||
zip = torch.load(dk.data_path / f"{dk.model_filename}_model.zip")
|
||||
model = zip["pytrainer"]
|
||||
model = model.load_from_checkpoint(zip)
|
||||
|
||||
if Path(dk.data_path / f"{dk.model_filename}_svm_model.joblib").is_file():
|
||||
dk.svm_model = load(dk.data_path / f"{dk.model_filename}_svm_model.joblib")
|
||||
@ -705,7 +680,7 @@ class FreqaiDataDrawer:
|
||||
Returns timerange information based on historic predictions file
|
||||
:return: timerange calculated from saved live data
|
||||
"""
|
||||
if not self.historic_predictions_folder.exists():
|
||||
if not self.historic_predictions_path.is_file():
|
||||
raise OperationalException(
|
||||
'Historic predictions not found. Historic predictions data is required '
|
||||
'to run backtest with the freqai-backtest-live-models option '
|
||||
|
@ -1291,7 +1291,7 @@ class FreqaiDataKitchen:
|
||||
|
||||
return dataframe
|
||||
|
||||
def use_strategy_to_populate_indicators(
|
||||
def use_strategy_to_populate_indicators( # noqa: C901
|
||||
self,
|
||||
strategy: IStrategy,
|
||||
corr_dataframes: dict = {},
|
||||
@ -1362,12 +1362,12 @@ class FreqaiDataKitchen:
|
||||
dataframe = self.populate_features(dataframe.copy(), corr_pair, strategy,
|
||||
corr_dataframes, base_dataframes, True)
|
||||
|
||||
dataframe = strategy.set_freqai_targets(dataframe.copy(), metadata=metadata)
|
||||
if self.live:
|
||||
dataframe = strategy.set_freqai_targets(dataframe.copy(), metadata=metadata)
|
||||
dataframe = self.remove_special_chars_from_feature_names(dataframe)
|
||||
|
||||
self.get_unique_classes_from_labels(dataframe)
|
||||
|
||||
dataframe = self.remove_special_chars_from_feature_names(dataframe)
|
||||
|
||||
if self.config.get('reduce_df_footprint', False):
|
||||
dataframe = reduce_dataframe_footprint(dataframe)
|
||||
|
||||
|
@ -83,6 +83,7 @@ class IFreqaiModel(ABC):
|
||||
self.CONV_WIDTH = self.freqai_info.get('conv_width', 1)
|
||||
if self.ft_params.get("inlier_metric_window", 0):
|
||||
self.CONV_WIDTH = self.ft_params.get("inlier_metric_window", 0) * 2
|
||||
self.class_names: List[str] = [] # used in classification subclasses
|
||||
self.pair_it = 0
|
||||
self.pair_it_train = 0
|
||||
self.total_pairs = len(self.config.get("exchange", {}).get("pair_whitelist"))
|
||||
@ -105,6 +106,9 @@ class IFreqaiModel(ABC):
|
||||
self.max_system_threads = max(int(psutil.cpu_count() * 2 - 2), 1)
|
||||
self.can_short = True # overridden in start() with strategy.can_short
|
||||
self.model: Any = None
|
||||
if self.ft_params.get('principal_component_analysis', False) and self.continual_learning:
|
||||
self.ft_params.update({'principal_component_analysis': False})
|
||||
logger.warning('User tried to use PCA with continual learning. Deactivating PCA.')
|
||||
|
||||
record_params(config, self.full_path)
|
||||
|
||||
@ -154,8 +158,7 @@ class IFreqaiModel(ABC):
|
||||
dk = self.start_backtesting(dataframe, metadata, self.dk, strategy)
|
||||
dataframe = dk.remove_features_from_df(dk.return_dataframe)
|
||||
else:
|
||||
logger.info(
|
||||
"Backtesting using historic predictions (live models)")
|
||||
logger.info("Backtesting using historic predictions (live models)")
|
||||
dk = self.start_backtesting_from_historic_predictions(
|
||||
dataframe, metadata, self.dk)
|
||||
dataframe = dk.return_dataframe
|
||||
@ -304,7 +307,7 @@ class IFreqaiModel(ABC):
|
||||
if check_features:
|
||||
self.dd.load_metadata(dk)
|
||||
dataframe_dummy_features = self.dk.use_strategy_to_populate_indicators(
|
||||
strategy, prediction_dataframe=dataframe.tail(1), pair=metadata["pair"]
|
||||
strategy, prediction_dataframe=dataframe.tail(1), pair=pair
|
||||
)
|
||||
dk.find_features(dataframe_dummy_features)
|
||||
self.check_if_feature_list_matches_strategy(dk)
|
||||
@ -314,7 +317,7 @@ class IFreqaiModel(ABC):
|
||||
else:
|
||||
if populate_indicators:
|
||||
dataframe = self.dk.use_strategy_to_populate_indicators(
|
||||
strategy, prediction_dataframe=dataframe, pair=metadata["pair"]
|
||||
strategy, prediction_dataframe=dataframe, pair=pair
|
||||
)
|
||||
populate_indicators = False
|
||||
|
||||
@ -330,6 +333,10 @@ class IFreqaiModel(ABC):
|
||||
dataframe_train = dk.slice_dataframe(tr_train, dataframe_base_train)
|
||||
dataframe_backtest = dk.slice_dataframe(tr_backtest, dataframe_base_backtest)
|
||||
|
||||
dataframe_train = dk.remove_special_chars_from_feature_names(dataframe_train)
|
||||
dataframe_backtest = dk.remove_special_chars_from_feature_names(dataframe_backtest)
|
||||
dk.get_unique_classes_from_labels(dataframe_train)
|
||||
|
||||
if not self.model_exists(dk):
|
||||
dk.find_features(dataframe_train)
|
||||
dk.find_labels(dataframe_train)
|
||||
@ -565,8 +572,9 @@ class IFreqaiModel(ABC):
|
||||
file_type = ".joblib"
|
||||
elif self.dd.model_type == 'keras':
|
||||
file_type = ".h5"
|
||||
elif 'stable_baselines' in self.dd.model_type or 'sb3_contrib' == self.dd.model_type:
|
||||
elif self.dd.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
|
||||
file_type = ".zip"
|
||||
|
||||
path_to_modelfile = Path(dk.data_path / f"{dk.model_filename}_model{file_type}")
|
||||
file_exists = path_to_modelfile.is_file()
|
||||
if file_exists:
|
||||
|
@ -14,16 +14,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class CatboostClassifier(BaseClassifierModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
train_data = Pool(
|
||||
|
@ -15,16 +15,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class CatboostClassifierMultiTarget(BaseClassifierModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
cbc = CatBoostClassifier(
|
||||
|
@ -14,16 +14,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class CatboostRegressor(BaseRegressionModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
train_data = Pool(
|
||||
|
@ -15,16 +15,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class CatboostRegressorMultiTarget(BaseRegressionModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
cbr = CatBoostRegressor(
|
||||
|
@ -12,16 +12,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class LightGBMClassifier(BaseClassifierModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) == 0:
|
||||
|
@ -13,16 +13,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class LightGBMClassifierMultiTarget(BaseClassifierModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
lgb = LGBMClassifier(**self.model_training_parameters)
|
||||
|
@ -12,18 +12,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class LightGBMRegressor(BaseRegressionModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
Most regressors use the same function names and arguments e.g. user
|
||||
can drop in LGBMRegressor in place of CatBoostRegressor and all data
|
||||
management will be properly handled by Freqai.
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) == 0:
|
||||
|
@ -13,16 +13,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class LightGBMRegressorMultiTarget(BaseRegressionModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
lgb = LGBMRegressor(**self.model_training_parameters)
|
||||
|
89
freqtrade/freqai/prediction_models/PyTorchMLPClassifier.py
Normal file
89
freqtrade/freqai/prediction_models/PyTorchMLPClassifier.py
Normal file
@ -0,0 +1,89 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
import torch
|
||||
|
||||
from freqtrade.freqai.base_models.BasePyTorchClassifier import BasePyTorchClassifier
|
||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||
from freqtrade.freqai.torch.PyTorchDataConvertor import (DefaultPyTorchDataConvertor,
|
||||
PyTorchDataConvertor)
|
||||
from freqtrade.freqai.torch.PyTorchMLPModel import PyTorchMLPModel
|
||||
from freqtrade.freqai.torch.PyTorchModelTrainer import PyTorchModelTrainer
|
||||
|
||||
|
||||
class PyTorchMLPClassifier(BasePyTorchClassifier):
|
||||
"""
|
||||
This class implements the fit method of IFreqaiModel.
|
||||
in the fit method we initialize the model and trainer objects.
|
||||
the only requirement from the model is to be aligned to PyTorchClassifier
|
||||
predict method that expects the model to predict a tensor of type long.
|
||||
|
||||
parameters are passed via `model_training_parameters` under the freqai
|
||||
section in the config file. e.g:
|
||||
{
|
||||
...
|
||||
"freqai": {
|
||||
...
|
||||
"model_training_parameters" : {
|
||||
"learning_rate": 3e-4,
|
||||
"trainer_kwargs": {
|
||||
"max_iters": 5000,
|
||||
"batch_size": 64,
|
||||
"max_n_eval_batches": null,
|
||||
},
|
||||
"model_kwargs": {
|
||||
"hidden_dim": 512,
|
||||
"dropout_percent": 0.2,
|
||||
"n_layer": 1,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
@property
|
||||
def data_convertor(self) -> PyTorchDataConvertor:
|
||||
return DefaultPyTorchDataConvertor(
|
||||
target_tensor_type=torch.long,
|
||||
squeeze_target_tensor=True
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs) -> None:
|
||||
super().__init__(**kwargs)
|
||||
config = self.freqai_info.get("model_training_parameters", {})
|
||||
self.learning_rate: float = config.get("learning_rate", 3e-4)
|
||||
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
:raises ValueError: If self.class_names is not defined in the parent class.
|
||||
"""
|
||||
|
||||
class_names = self.get_class_names()
|
||||
self.convert_label_column_to_int(data_dictionary, dk, class_names)
|
||||
n_features = data_dictionary["train_features"].shape[-1]
|
||||
model = PyTorchMLPModel(
|
||||
input_dim=n_features,
|
||||
output_dim=len(class_names),
|
||||
**self.model_kwargs
|
||||
)
|
||||
model.to(self.device)
|
||||
optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
|
||||
criterion = torch.nn.CrossEntropyLoss()
|
||||
init_model = self.get_init_model(dk.pair)
|
||||
trainer = PyTorchModelTrainer(
|
||||
model=model,
|
||||
optimizer=optimizer,
|
||||
criterion=criterion,
|
||||
model_meta_data={"class_names": class_names},
|
||||
device=self.device,
|
||||
init_model=init_model,
|
||||
data_convertor=self.data_convertor,
|
||||
**self.trainer_kwargs,
|
||||
)
|
||||
trainer.fit(data_dictionary, self.splits)
|
||||
return trainer
|
83
freqtrade/freqai/prediction_models/PyTorchMLPRegressor.py
Normal file
83
freqtrade/freqai/prediction_models/PyTorchMLPRegressor.py
Normal file
@ -0,0 +1,83 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
import torch
|
||||
|
||||
from freqtrade.freqai.base_models.BasePyTorchRegressor import BasePyTorchRegressor
|
||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||
from freqtrade.freqai.torch.PyTorchDataConvertor import (DefaultPyTorchDataConvertor,
|
||||
PyTorchDataConvertor)
|
||||
from freqtrade.freqai.torch.PyTorchMLPModel import PyTorchMLPModel
|
||||
from freqtrade.freqai.torch.PyTorchModelTrainer import PyTorchModelTrainer
|
||||
|
||||
|
||||
class PyTorchMLPRegressor(BasePyTorchRegressor):
|
||||
"""
|
||||
This class implements the fit method of IFreqaiModel.
|
||||
in the fit method we initialize the model and trainer objects.
|
||||
the only requirement from the model is to be aligned to PyTorchRegressor
|
||||
predict method that expects the model to predict tensor of type float.
|
||||
the trainer defines the training loop.
|
||||
|
||||
parameters are passed via `model_training_parameters` under the freqai
|
||||
section in the config file. e.g:
|
||||
{
|
||||
...
|
||||
"freqai": {
|
||||
...
|
||||
"model_training_parameters" : {
|
||||
"learning_rate": 3e-4,
|
||||
"trainer_kwargs": {
|
||||
"max_iters": 5000,
|
||||
"batch_size": 64,
|
||||
"max_n_eval_batches": null,
|
||||
},
|
||||
"model_kwargs": {
|
||||
"hidden_dim": 512,
|
||||
"dropout_percent": 0.2,
|
||||
"n_layer": 1,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
@property
|
||||
def data_convertor(self) -> PyTorchDataConvertor:
|
||||
return DefaultPyTorchDataConvertor(target_tensor_type=torch.float)
|
||||
|
||||
def __init__(self, **kwargs) -> None:
|
||||
super().__init__(**kwargs)
|
||||
config = self.freqai_info.get("model_training_parameters", {})
|
||||
self.learning_rate: float = config.get("learning_rate", 3e-4)
|
||||
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
n_features = data_dictionary["train_features"].shape[-1]
|
||||
model = PyTorchMLPModel(
|
||||
input_dim=n_features,
|
||||
output_dim=1,
|
||||
**self.model_kwargs
|
||||
)
|
||||
model.to(self.device)
|
||||
optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
|
||||
criterion = torch.nn.MSELoss()
|
||||
init_model = self.get_init_model(dk.pair)
|
||||
trainer = PyTorchModelTrainer(
|
||||
model=model,
|
||||
optimizer=optimizer,
|
||||
criterion=criterion,
|
||||
device=self.device,
|
||||
init_model=init_model,
|
||||
data_convertor=self.data_convertor,
|
||||
**self.trainer_kwargs,
|
||||
)
|
||||
trainer.fit(data_dictionary, self.splits)
|
||||
return trainer
|
@ -18,16 +18,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class XGBoostClassifier(BaseClassifierModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
X = data_dictionary["train_features"].to_numpy()
|
||||
|
@ -18,16 +18,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class XGBoostRFClassifier(BaseClassifierModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
X = data_dictionary["train_features"].to_numpy()
|
||||
|
@ -12,16 +12,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class XGBoostRFRegressor(BaseRegressionModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
X = data_dictionary["train_features"]
|
||||
|
@ -12,16 +12,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class XGBoostRegressor(BaseRegressionModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
X = data_dictionary["train_features"]
|
||||
|
@ -13,16 +13,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class XGBoostRegressorMultiTarget(BaseRegressionModel):
|
||||
"""
|
||||
User created prediction model. The class needs to override three necessary
|
||||
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||
User created prediction model. The class inherits IFreqaiModel, which
|
||||
means it has full access to all Frequency AI functionality. Typically,
|
||||
users would use this to override the common `fit()`, `train()`, or
|
||||
`predict()` methods to add their custom data handling tools or change
|
||||
various aspects of the training that cannot be configured via the
|
||||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
labels, weights
|
||||
:param dk: The datakitchen object for the current coin/model
|
||||
"""
|
||||
|
||||
xgb = XGBRegressor(**self.model_training_parameters)
|
||||
|
67
freqtrade/freqai/torch/PyTorchDataConvertor.py
Normal file
67
freqtrade/freqai/torch/PyTorchDataConvertor.py
Normal file
@ -0,0 +1,67 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Optional
|
||||
|
||||
import pandas as pd
|
||||
import torch
|
||||
|
||||
|
||||
class PyTorchDataConvertor(ABC):
|
||||
"""
|
||||
This class is responsible for converting `*_features` & `*_labels` pandas dataframes
|
||||
to pytorch tensors.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def convert_x(self, df: pd.DataFrame, device: Optional[str] = None) -> List[torch.Tensor]:
|
||||
"""
|
||||
:param df: "*_features" dataframe.
|
||||
:param device: The device to use for training (e.g. 'cpu', 'cuda').
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def convert_y(self, df: pd.DataFrame, device: Optional[str] = None) -> List[torch.Tensor]:
|
||||
"""
|
||||
:param df: "*_labels" dataframe.
|
||||
:param device: The device to use for training (e.g. 'cpu', 'cuda').
|
||||
"""
|
||||
|
||||
|
||||
class DefaultPyTorchDataConvertor(PyTorchDataConvertor):
|
||||
"""
|
||||
A default conversion that keeps features dataframe shapes.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
target_tensor_type: Optional[torch.dtype] = None,
|
||||
squeeze_target_tensor: bool = False
|
||||
):
|
||||
"""
|
||||
:param target_tensor_type: type of target tensor, for classification use
|
||||
torch.long, for regressor use torch.float or torch.double.
|
||||
:param squeeze_target_tensor: controls the target shape, used for loss functions
|
||||
that requires 0D or 1D.
|
||||
"""
|
||||
self._target_tensor_type = target_tensor_type
|
||||
self._squeeze_target_tensor = squeeze_target_tensor
|
||||
|
||||
def convert_x(self, df: pd.DataFrame, device: Optional[str] = None) -> List[torch.Tensor]:
|
||||
x = torch.from_numpy(df.values).float()
|
||||
if device:
|
||||
x = x.to(device)
|
||||
|
||||
return [x]
|
||||
|
||||
def convert_y(self, df: pd.DataFrame, device: Optional[str] = None) -> List[torch.Tensor]:
|
||||
y = torch.from_numpy(df.values)
|
||||
|
||||
if self._target_tensor_type:
|
||||
y = y.to(self._target_tensor_type)
|
||||
|
||||
if self._squeeze_target_tensor:
|
||||
y = y.squeeze()
|
||||
|
||||
if device:
|
||||
y = y.to(device)
|
||||
|
||||
return [y]
|
97
freqtrade/freqai/torch/PyTorchMLPModel.py
Normal file
97
freqtrade/freqai/torch/PyTorchMLPModel.py
Normal file
@ -0,0 +1,97 @@
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
import torch
|
||||
from torch import nn
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PyTorchMLPModel(nn.Module):
|
||||
"""
|
||||
A multi-layer perceptron (MLP) model implemented using PyTorch.
|
||||
|
||||
This class mainly serves as a simple example for the integration of PyTorch model's
|
||||
to freqai. It is not optimized at all and should not be used for production purposes.
|
||||
|
||||
:param input_dim: The number of input features. This parameter specifies the number
|
||||
of features in the input data that the MLP will use to make predictions.
|
||||
:param output_dim: The number of output classes. This parameter specifies the number
|
||||
of classes that the MLP will predict.
|
||||
:param hidden_dim: The number of hidden units in each layer. This parameter controls
|
||||
the complexity of the MLP and determines how many nonlinear relationships the MLP
|
||||
can represent. Increasing the number of hidden units can increase the capacity of
|
||||
the MLP to model complex patterns, but it also increases the risk of overfitting
|
||||
the training data. Default: 256
|
||||
:param dropout_percent: The dropout rate for regularization. This parameter specifies
|
||||
the probability of dropping out a neuron during training to prevent overfitting.
|
||||
The dropout rate should be tuned carefully to balance between underfitting and
|
||||
overfitting. Default: 0.2
|
||||
:param n_layer: The number of layers in the MLP. This parameter specifies the number
|
||||
of layers in the MLP architecture. Adding more layers to the MLP can increase its
|
||||
capacity to model complex patterns, but it also increases the risk of overfitting
|
||||
the training data. Default: 1
|
||||
|
||||
:returns: The output of the MLP, with shape (batch_size, output_dim)
|
||||
"""
|
||||
|
||||
def __init__(self, input_dim: int, output_dim: int, **kwargs):
|
||||
super().__init__()
|
||||
hidden_dim: int = kwargs.get("hidden_dim", 256)
|
||||
dropout_percent: int = kwargs.get("dropout_percent", 0.2)
|
||||
n_layer: int = kwargs.get("n_layer", 1)
|
||||
self.input_layer = nn.Linear(input_dim, hidden_dim)
|
||||
self.blocks = nn.Sequential(*[Block(hidden_dim, dropout_percent) for _ in range(n_layer)])
|
||||
self.output_layer = nn.Linear(hidden_dim, output_dim)
|
||||
self.relu = nn.ReLU()
|
||||
self.dropout = nn.Dropout(p=dropout_percent)
|
||||
|
||||
def forward(self, tensors: List[torch.Tensor]) -> torch.Tensor:
|
||||
x: torch.Tensor = tensors[0]
|
||||
x = self.relu(self.input_layer(x))
|
||||
x = self.dropout(x)
|
||||
x = self.blocks(x)
|
||||
x = self.output_layer(x)
|
||||
return x
|
||||
|
||||
|
||||
class Block(nn.Module):
|
||||
"""
|
||||
A building block for a multi-layer perceptron (MLP).
|
||||
|
||||
:param hidden_dim: The number of hidden units in the feedforward network.
|
||||
:param dropout_percent: The dropout rate for regularization.
|
||||
|
||||
:returns: torch.Tensor. with shape (batch_size, hidden_dim)
|
||||
"""
|
||||
|
||||
def __init__(self, hidden_dim: int, dropout_percent: int):
|
||||
super().__init__()
|
||||
self.ff = FeedForward(hidden_dim)
|
||||
self.dropout = nn.Dropout(p=dropout_percent)
|
||||
self.ln = nn.LayerNorm(hidden_dim)
|
||||
|
||||
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
||||
x = self.ff(self.ln(x))
|
||||
x = self.dropout(x)
|
||||
return x
|
||||
|
||||
|
||||
class FeedForward(nn.Module):
|
||||
"""
|
||||
A simple fully-connected feedforward neural network block.
|
||||
|
||||
:param hidden_dim: The number of hidden units in the block.
|
||||
:return: torch.Tensor. with shape (batch_size, hidden_dim)
|
||||
"""
|
||||
|
||||
def __init__(self, hidden_dim: int):
|
||||
super().__init__()
|
||||
self.net = nn.Sequential(
|
||||
nn.Linear(hidden_dim, hidden_dim),
|
||||
nn.ReLU(),
|
||||
)
|
||||
|
||||
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
||||
return self.net(x)
|
208
freqtrade/freqai/torch/PyTorchModelTrainer.py
Normal file
208
freqtrade/freqai/torch/PyTorchModelTrainer.py
Normal file
@ -0,0 +1,208 @@
|
||||
import logging
|
||||
import math
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import pandas as pd
|
||||
import torch
|
||||
from torch import nn
|
||||
from torch.optim import Optimizer
|
||||
from torch.utils.data import DataLoader, TensorDataset
|
||||
|
||||
from freqtrade.freqai.torch.PyTorchDataConvertor import PyTorchDataConvertor
|
||||
from freqtrade.freqai.torch.PyTorchTrainerInterface import PyTorchTrainerInterface
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PyTorchModelTrainer(PyTorchTrainerInterface):
|
||||
def __init__(
|
||||
self,
|
||||
model: nn.Module,
|
||||
optimizer: Optimizer,
|
||||
criterion: nn.Module,
|
||||
device: str,
|
||||
init_model: Dict,
|
||||
data_convertor: PyTorchDataConvertor,
|
||||
model_meta_data: Dict[str, Any] = {},
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
:param model: The PyTorch model to be trained.
|
||||
:param optimizer: The optimizer to use for training.
|
||||
:param criterion: The loss function to use for training.
|
||||
:param device: The device to use for training (e.g. 'cpu', 'cuda').
|
||||
:param init_model: A dictionary containing the initial model/optimizer
|
||||
state_dict and model_meta_data saved by self.save() method.
|
||||
:param model_meta_data: Additional metadata about the model (optional).
|
||||
:param data_convertor: convertor from pd.DataFrame to torch.tensor.
|
||||
:param max_iters: The number of training iterations to run.
|
||||
iteration here refers to the number of times we call
|
||||
self.optimizer.step(). used to calculate n_epochs.
|
||||
:param batch_size: The size of the batches to use during training.
|
||||
:param max_n_eval_batches: The maximum number batches to use for evaluation.
|
||||
"""
|
||||
self.model = model
|
||||
self.optimizer = optimizer
|
||||
self.criterion = criterion
|
||||
self.model_meta_data = model_meta_data
|
||||
self.device = device
|
||||
self.max_iters: int = kwargs.get("max_iters", 100)
|
||||
self.batch_size: int = kwargs.get("batch_size", 64)
|
||||
self.max_n_eval_batches: Optional[int] = kwargs.get("max_n_eval_batches", None)
|
||||
self.data_convertor = data_convertor
|
||||
if init_model:
|
||||
self.load_from_checkpoint(init_model)
|
||||
|
||||
def fit(self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]):
|
||||
"""
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param splits: splits to use in training, splits must contain "train",
|
||||
optional "test" could be added by setting freqai.data_split_parameters.test_size > 0
|
||||
in the config file.
|
||||
|
||||
- Calculates the predicted output for the batch using the PyTorch model.
|
||||
- Calculates the loss between the predicted and actual output using a loss function.
|
||||
- Computes the gradients of the loss with respect to the model's parameters using
|
||||
backpropagation.
|
||||
- Updates the model's parameters using an optimizer.
|
||||
"""
|
||||
data_loaders_dictionary = self.create_data_loaders_dictionary(data_dictionary, splits)
|
||||
epochs = self.calc_n_epochs(
|
||||
n_obs=len(data_dictionary["train_features"]),
|
||||
batch_size=self.batch_size,
|
||||
n_iters=self.max_iters
|
||||
)
|
||||
for epoch in range(1, epochs + 1):
|
||||
# training
|
||||
losses = []
|
||||
for i, batch_data in enumerate(data_loaders_dictionary["train"]):
|
||||
|
||||
for tensor in batch_data:
|
||||
tensor.to(self.device)
|
||||
|
||||
xb = batch_data[:-1]
|
||||
yb = batch_data[-1]
|
||||
yb_pred = self.model(xb)
|
||||
loss = self.criterion(yb_pred, yb)
|
||||
|
||||
self.optimizer.zero_grad(set_to_none=True)
|
||||
loss.backward()
|
||||
self.optimizer.step()
|
||||
losses.append(loss.item())
|
||||
train_loss = sum(losses) / len(losses)
|
||||
log_message = f"epoch {epoch}/{epochs}: train loss {train_loss:.4f}"
|
||||
|
||||
# evaluation
|
||||
if "test" in splits:
|
||||
test_loss = self.estimate_loss(
|
||||
data_loaders_dictionary,
|
||||
self.max_n_eval_batches,
|
||||
"test"
|
||||
)
|
||||
log_message += f" ; test loss {test_loss:.4f}"
|
||||
|
||||
logger.info(log_message)
|
||||
|
||||
@torch.no_grad()
|
||||
def estimate_loss(
|
||||
self,
|
||||
data_loader_dictionary: Dict[str, DataLoader],
|
||||
max_n_eval_batches: Optional[int],
|
||||
split: str,
|
||||
) -> float:
|
||||
self.model.eval()
|
||||
n_batches = 0
|
||||
losses = []
|
||||
for i, batch_data in enumerate(data_loader_dictionary[split]):
|
||||
if max_n_eval_batches and i > max_n_eval_batches:
|
||||
n_batches += 1
|
||||
break
|
||||
|
||||
for tensor in batch_data:
|
||||
tensor.to(self.device)
|
||||
|
||||
xb = batch_data[:-1]
|
||||
yb = batch_data[-1]
|
||||
yb_pred = self.model(xb)
|
||||
loss = self.criterion(yb_pred, yb)
|
||||
losses.append(loss.item())
|
||||
|
||||
self.model.train()
|
||||
return sum(losses) / len(losses)
|
||||
|
||||
def create_data_loaders_dictionary(
|
||||
self,
|
||||
data_dictionary: Dict[str, pd.DataFrame],
|
||||
splits: List[str]
|
||||
) -> Dict[str, DataLoader]:
|
||||
"""
|
||||
Converts the input data to PyTorch tensors using a data loader.
|
||||
"""
|
||||
data_loader_dictionary = {}
|
||||
for split in splits:
|
||||
x = self.data_convertor.convert_x(data_dictionary[f"{split}_features"])
|
||||
y = self.data_convertor.convert_y(data_dictionary[f"{split}_labels"])
|
||||
dataset = TensorDataset(*x, *y)
|
||||
data_loader = DataLoader(
|
||||
dataset,
|
||||
batch_size=self.batch_size,
|
||||
shuffle=True,
|
||||
drop_last=True,
|
||||
num_workers=0,
|
||||
)
|
||||
data_loader_dictionary[split] = data_loader
|
||||
|
||||
return data_loader_dictionary
|
||||
|
||||
@staticmethod
|
||||
def calc_n_epochs(n_obs: int, batch_size: int, n_iters: int) -> int:
|
||||
"""
|
||||
Calculates the number of epochs required to reach the maximum number
|
||||
of iterations specified in the model training parameters.
|
||||
|
||||
the motivation here is that `max_iters` is easier to optimize and keep stable,
|
||||
across different n_obs - the number of data points.
|
||||
"""
|
||||
|
||||
n_batches = math.ceil(n_obs // batch_size)
|
||||
epochs = math.ceil(n_iters // n_batches)
|
||||
if epochs <= 10:
|
||||
logger.warning("User set `max_iters` in such a way that the trainer will only perform "
|
||||
f" {epochs} epochs. Please consider increasing this value accordingly")
|
||||
if epochs <= 1:
|
||||
logger.warning("Epochs set to 1. Please review your `max_iters` value")
|
||||
epochs = 1
|
||||
return epochs
|
||||
|
||||
def save(self, path: Path):
|
||||
"""
|
||||
- Saving any nn.Module state_dict
|
||||
- Saving model_meta_data, this dict should contain any additional data that the
|
||||
user needs to store. e.g class_names for classification models.
|
||||
"""
|
||||
|
||||
torch.save({
|
||||
"model_state_dict": self.model.state_dict(),
|
||||
"optimizer_state_dict": self.optimizer.state_dict(),
|
||||
"model_meta_data": self.model_meta_data,
|
||||
"pytrainer": self
|
||||
}, path)
|
||||
|
||||
def load(self, path: Path):
|
||||
checkpoint = torch.load(path)
|
||||
return self.load_from_checkpoint(checkpoint)
|
||||
|
||||
def load_from_checkpoint(self, checkpoint: Dict):
|
||||
"""
|
||||
when using continual_learning, DataDrawer will load the dictionary
|
||||
(containing state dicts and model_meta_data) by calling torch.load(path).
|
||||
you can access this dict from any class that inherits IFreqaiModel by calling
|
||||
get_init_model method.
|
||||
"""
|
||||
self.model.load_state_dict(checkpoint["model_state_dict"])
|
||||
self.optimizer.load_state_dict(checkpoint["optimizer_state_dict"])
|
||||
self.model_meta_data = checkpoint["model_meta_data"]
|
||||
return self
|
53
freqtrade/freqai/torch/PyTorchTrainerInterface.py
Normal file
53
freqtrade/freqai/torch/PyTorchTrainerInterface.py
Normal file
@ -0,0 +1,53 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import pandas as pd
|
||||
import torch
|
||||
from torch import nn
|
||||
|
||||
|
||||
class PyTorchTrainerInterface(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def fit(self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]) -> None:
|
||||
"""
|
||||
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||
all the training and test data/labels.
|
||||
:param splits: splits to use in training, splits must contain "train",
|
||||
optional "test" could be added by setting freqai.data_split_parameters.test_size > 0
|
||||
in the config file.
|
||||
|
||||
- Calculates the predicted output for the batch using the PyTorch model.
|
||||
- Calculates the loss between the predicted and actual output using a loss function.
|
||||
- Computes the gradients of the loss with respect to the model's parameters using
|
||||
backpropagation.
|
||||
- Updates the model's parameters using an optimizer.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, path: Path) -> None:
|
||||
"""
|
||||
- Saving any nn.Module state_dict
|
||||
- Saving model_meta_data, this dict should contain any additional data that the
|
||||
user needs to store. e.g class_names for classification models.
|
||||
"""
|
||||
|
||||
def load(self, path: Path) -> nn.Module:
|
||||
"""
|
||||
:param path: path to zip file.
|
||||
:returns: pytorch model.
|
||||
"""
|
||||
checkpoint = torch.load(path)
|
||||
return self.load_from_checkpoint(checkpoint)
|
||||
|
||||
@abstractmethod
|
||||
def load_from_checkpoint(self, checkpoint: Dict) -> nn.Module:
|
||||
"""
|
||||
when using continual_learning, DataDrawer will load the dictionary
|
||||
(containing state dicts and model_meta_data) by calling torch.load(path).
|
||||
you can access this dict from any class that inherits IFreqaiModel by calling
|
||||
get_init_model method.
|
||||
:checkpoint checkpoint: dict containing the model & optimizer state dicts,
|
||||
model_meta_data, etc..
|
||||
"""
|
0
freqtrade/freqai/torch/__init__.py
Normal file
0
freqtrade/freqai/torch/__init__.py
Normal file
@ -21,15 +21,19 @@ from freqtrade.enums import (ExitCheckTuple, ExitType, RPCMessageType, RunMode,
|
||||
State, TradingMode)
|
||||
from freqtrade.exceptions import (DependencyException, ExchangeError, InsufficientFundsError,
|
||||
InvalidOrderException, PricingError)
|
||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date, timeframe_to_seconds
|
||||
from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, timeframe_to_minutes, timeframe_to_next_date,
|
||||
timeframe_to_seconds)
|
||||
from freqtrade.misc import safe_value_fallback, safe_value_fallback2
|
||||
from freqtrade.mixins import LoggingMixin
|
||||
from freqtrade.persistence import Order, PairLocks, Trade, init_db
|
||||
from freqtrade.persistence.key_value_store import set_startup_time
|
||||
from freqtrade.plugins.pairlistmanager import PairListManager
|
||||
from freqtrade.plugins.protectionmanager import ProtectionManager
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
from freqtrade.rpc import RPCManager
|
||||
from freqtrade.rpc.external_message_consumer import ExternalMessageConsumer
|
||||
from freqtrade.rpc.rpc_types import (RPCBuyMsg, RPCCancelMsg, RPCProtectionMsg, RPCSellCancelMsg,
|
||||
RPCSellMsg)
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
from freqtrade.util import FtPrecise
|
||||
@ -179,6 +183,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
performs startup tasks
|
||||
"""
|
||||
migrate_binance_futures_names(self.config)
|
||||
set_startup_time()
|
||||
|
||||
self.rpc.startup_messages(self.config, self.pairlists, self.protections)
|
||||
# Update older trades with precision and precision mode
|
||||
@ -212,7 +217,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.dataprovider.refresh(self.pairlists.create_pair_list(self.active_pair_whitelist),
|
||||
self.strategy.gather_informative_pairs())
|
||||
|
||||
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)()
|
||||
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)(
|
||||
current_time=datetime.now(timezone.utc))
|
||||
|
||||
self.strategy.analyze(self.active_pair_whitelist)
|
||||
|
||||
@ -850,11 +856,13 @@ class FreqtradeBot(LoggingMixin):
|
||||
logger.info(f"Canceling stoploss on exchange for {trade}")
|
||||
co = self.exchange.cancel_stoploss_order_with_result(
|
||||
trade.stoploss_order_id, trade.pair, trade.amount)
|
||||
trade.update_order(co)
|
||||
self.update_trade_state(trade, trade.stoploss_order_id, co, stoploss_order=True)
|
||||
|
||||
# Reset stoploss order id.
|
||||
trade.stoploss_order_id = None
|
||||
except InvalidOrderException:
|
||||
logger.exception(f"Could not cancel stoploss order {trade.stoploss_order_id}")
|
||||
logger.exception(f"Could not cancel stoploss order {trade.stoploss_order_id} "
|
||||
f"for pair {trade.pair}")
|
||||
return trade
|
||||
|
||||
def get_valid_enter_price_and_stake(
|
||||
@ -941,12 +949,11 @@ class FreqtradeBot(LoggingMixin):
|
||||
|
||||
return enter_limit_requested, stake_amount, leverage
|
||||
|
||||
def _notify_enter(self, trade: Trade, order: Order, order_type: Optional[str] = None,
|
||||
def _notify_enter(self, trade: Trade, order: Order, order_type: str,
|
||||
fill: bool = False, sub_trade: bool = False) -> None:
|
||||
"""
|
||||
Sends rpc notification when a entry order occurred.
|
||||
"""
|
||||
msg_type = RPCMessageType.ENTRY_FILL if fill else RPCMessageType.ENTRY
|
||||
open_rate = order.safe_price
|
||||
|
||||
if open_rate is None:
|
||||
@ -957,9 +964,9 @@ class FreqtradeBot(LoggingMixin):
|
||||
current_rate = self.exchange.get_rate(
|
||||
trade.pair, side='entry', is_short=trade.is_short, refresh=False)
|
||||
|
||||
msg = {
|
||||
msg: RPCBuyMsg = {
|
||||
'trade_id': trade.id,
|
||||
'type': msg_type,
|
||||
'type': RPCMessageType.ENTRY_FILL if fill else RPCMessageType.ENTRY,
|
||||
'buy_tag': trade.enter_tag,
|
||||
'enter_tag': trade.enter_tag,
|
||||
'exchange': trade.exchange.capitalize(),
|
||||
@ -971,6 +978,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
'order_type': order_type,
|
||||
'stake_amount': trade.stake_amount,
|
||||
'stake_currency': self.config['stake_currency'],
|
||||
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
|
||||
'fiat_currency': self.config.get('fiat_display_currency', None),
|
||||
'amount': order.safe_amount_after_fee if fill else (order.amount or trade.amount),
|
||||
'open_date': trade.open_date or datetime.utcnow(),
|
||||
@ -989,7 +997,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
current_rate = self.exchange.get_rate(
|
||||
trade.pair, side='entry', is_short=trade.is_short, refresh=False)
|
||||
|
||||
msg = {
|
||||
msg: RPCCancelMsg = {
|
||||
'trade_id': trade.id,
|
||||
'type': RPCMessageType.ENTRY_CANCEL,
|
||||
'buy_tag': trade.enter_tag,
|
||||
@ -1001,7 +1009,9 @@ class FreqtradeBot(LoggingMixin):
|
||||
'limit': trade.open_rate,
|
||||
'order_type': order_type,
|
||||
'stake_amount': trade.stake_amount,
|
||||
'open_rate': trade.open_rate,
|
||||
'stake_currency': self.config['stake_currency'],
|
||||
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
|
||||
'fiat_currency': self.config.get('fiat_display_currency', None),
|
||||
'amount': trade.amount,
|
||||
'open_date': trade.open_date,
|
||||
@ -1165,7 +1175,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
logger.warning('Unable to fetch stoploss order: %s', exception)
|
||||
|
||||
if stoploss_order:
|
||||
trade.update_order(stoploss_order)
|
||||
self.update_trade_state(trade, trade.stoploss_order_id, stoploss_order,
|
||||
stoploss_order=True)
|
||||
|
||||
# We check if stoploss order is fulfilled
|
||||
if stoploss_order and stoploss_order['status'] in ('closed', 'triggered'):
|
||||
@ -1229,7 +1240,9 @@ class FreqtradeBot(LoggingMixin):
|
||||
:param order: Current on exchange stoploss order
|
||||
:return: None
|
||||
"""
|
||||
stoploss_norm = self.exchange.price_to_precision(trade.pair, trade.stoploss_or_liquidation)
|
||||
stoploss_norm = self.exchange.price_to_precision(
|
||||
trade.pair, trade.stoploss_or_liquidation,
|
||||
rounding_mode=ROUND_DOWN if trade.is_short else ROUND_UP)
|
||||
|
||||
if self.exchange.stoploss_adjust(stoploss_norm, order, side=trade.exit_side):
|
||||
# we check if the update is necessary
|
||||
@ -1239,13 +1252,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
# cancelling the current stoploss on exchange first
|
||||
logger.info(f"Cancelling current stoploss on exchange for pair {trade.pair} "
|
||||
f"(orderid:{order['id']}) in order to add another one ...")
|
||||
try:
|
||||
co = self.exchange.cancel_stoploss_order_with_result(order['id'], trade.pair,
|
||||
trade.amount)
|
||||
trade.update_order(co)
|
||||
except InvalidOrderException:
|
||||
logger.exception(f"Could not cancel stoploss order {order['id']} "
|
||||
f"for pair {trade.pair}")
|
||||
|
||||
self.cancel_stoploss_on_exchange(trade)
|
||||
|
||||
# Create new stoploss order
|
||||
if not self.create_stoploss_order(trade=trade, stop_price=stoploss_norm):
|
||||
@ -1477,8 +1485,8 @@ class FreqtradeBot(LoggingMixin):
|
||||
return False
|
||||
|
||||
try:
|
||||
order = self.exchange.cancel_order_with_result(order['id'], trade.pair,
|
||||
trade.amount)
|
||||
order = self.exchange.cancel_order_with_result(
|
||||
order['id'], trade.pair, trade.amount)
|
||||
except InvalidOrderException:
|
||||
logger.exception(
|
||||
f"Could not cancel {trade.exit_side} order {trade.open_order_id}")
|
||||
@ -1490,17 +1498,18 @@ class FreqtradeBot(LoggingMixin):
|
||||
# Order might be filled above in odd timing issues.
|
||||
if order.get('status') in ('canceled', 'cancelled'):
|
||||
trade.exit_reason = None
|
||||
trade.open_order_id = None
|
||||
else:
|
||||
trade.exit_reason = exit_reason_prev
|
||||
cancelled = True
|
||||
else:
|
||||
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
trade.exit_reason = None
|
||||
trade.open_order_id = None
|
||||
|
||||
self.update_trade_state(trade, trade.open_order_id, order)
|
||||
|
||||
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
|
||||
trade.open_order_id = None
|
||||
trade.close_rate = None
|
||||
trade.close_rate_requested = None
|
||||
|
||||
@ -1666,7 +1675,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
amount = trade.amount
|
||||
gain = "profit" if profit_ratio > 0 else "loss"
|
||||
|
||||
msg = {
|
||||
msg: RPCSellMsg = {
|
||||
'type': (RPCMessageType.EXIT_FILL if fill
|
||||
else RPCMessageType.EXIT),
|
||||
'trade_id': trade.id,
|
||||
@ -1692,6 +1701,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
'close_date': trade.close_date or datetime.utcnow(),
|
||||
'stake_amount': trade.stake_amount,
|
||||
'stake_currency': self.config['stake_currency'],
|
||||
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
|
||||
'fiat_currency': self.config.get('fiat_display_currency'),
|
||||
'sub_trade': sub_trade,
|
||||
'cumulative_profit': trade.realized_profit,
|
||||
@ -1722,7 +1732,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
profit_ratio = trade.calc_profit_ratio(profit_rate)
|
||||
gain = "profit" if profit_ratio > 0 else "loss"
|
||||
|
||||
msg = {
|
||||
msg: RPCSellCancelMsg = {
|
||||
'type': RPCMessageType.EXIT_CANCEL,
|
||||
'trade_id': trade.id,
|
||||
'exchange': trade.exchange.capitalize(),
|
||||
@ -1744,6 +1754,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
'open_date': trade.open_date,
|
||||
'close_date': trade.close_date or datetime.now(timezone.utc),
|
||||
'stake_currency': self.config['stake_currency'],
|
||||
'base_currency': self.exchange.get_pair_base_currency(trade.pair),
|
||||
'fiat_currency': self.config.get('fiat_display_currency', None),
|
||||
'reason': reason,
|
||||
'sub_trade': sub_trade,
|
||||
@ -1775,11 +1786,11 @@ class FreqtradeBot(LoggingMixin):
|
||||
return False
|
||||
|
||||
# Update trade with order values
|
||||
logger.info(f'Found open order for {trade}')
|
||||
if not stoploss_order:
|
||||
logger.info(f'Found open order for {trade}')
|
||||
try:
|
||||
order = action_order or self.exchange.fetch_order_or_stoploss_order(order_id,
|
||||
trade.pair,
|
||||
stoploss_order)
|
||||
order = action_order or self.exchange.fetch_order_or_stoploss_order(
|
||||
order_id, trade.pair, stoploss_order)
|
||||
except InvalidOrderException as exception:
|
||||
logger.warning('Unable to fetch order %s: %s', order_id, exception)
|
||||
return False
|
||||
@ -1808,7 +1819,7 @@ class FreqtradeBot(LoggingMixin):
|
||||
# TODO: should shorting/leverage be supported by Edge,
|
||||
# then this will need to be fixed.
|
||||
trade.adjust_stop_loss(trade.open_rate, self.strategy.stoploss, initial=True)
|
||||
if order.get('side') == trade.entry_side or trade.amount > 0:
|
||||
if order.get('side') == trade.entry_side or (trade.amount > 0 and trade.is_open):
|
||||
# Must also run for partial exits
|
||||
# TODO: Margin will need to use interest_rate as well.
|
||||
# interest_rate = self.exchange.get_interest_rate()
|
||||
@ -1844,21 +1855,27 @@ class FreqtradeBot(LoggingMixin):
|
||||
self.handle_protections(trade.pair, trade.trade_direction)
|
||||
elif send_msg and not trade.open_order_id and not stoploss_order:
|
||||
# Enter fill
|
||||
self._notify_enter(trade, order, fill=True, sub_trade=sub_trade)
|
||||
self._notify_enter(trade, order, order.order_type, fill=True, sub_trade=sub_trade)
|
||||
|
||||
def handle_protections(self, pair: str, side: LongShort) -> None:
|
||||
# Lock pair for one candle to prevent immediate rebuys
|
||||
self.strategy.lock_pair(pair, datetime.now(timezone.utc), reason='Auto lock')
|
||||
prot_trig = self.protections.stop_per_pair(pair, side=side)
|
||||
if prot_trig:
|
||||
msg = {'type': RPCMessageType.PROTECTION_TRIGGER, }
|
||||
msg.update(prot_trig.to_json())
|
||||
msg: RPCProtectionMsg = {
|
||||
'type': RPCMessageType.PROTECTION_TRIGGER,
|
||||
'base_currency': self.exchange.get_pair_base_currency(prot_trig.pair),
|
||||
**prot_trig.to_json() # type: ignore
|
||||
}
|
||||
self.rpc.send_msg(msg)
|
||||
|
||||
prot_trig_glb = self.protections.global_stop(side=side)
|
||||
if prot_trig_glb:
|
||||
msg = {'type': RPCMessageType.PROTECTION_TRIGGER_GLOBAL, }
|
||||
msg.update(prot_trig_glb.to_json())
|
||||
msg = {
|
||||
'type': RPCMessageType.PROTECTION_TRIGGER_GLOBAL,
|
||||
'base_currency': self.exchange.get_pair_base_currency(prot_trig_glb.pair),
|
||||
**prot_trig_glb.to_json() # type: ignore
|
||||
}
|
||||
self.rpc.send_msg(msg)
|
||||
|
||||
def apply_fee_conditional(self, trade: Trade, trade_base_currency: str,
|
||||
|
@ -203,9 +203,10 @@ class Backtesting:
|
||||
# since a "perfect" stoploss-exit is assumed anyway
|
||||
# And the regular "stoploss" function would not apply to that case
|
||||
self.strategy.order_types['stoploss_on_exchange'] = False
|
||||
# Update can_short flag
|
||||
self._can_short = self.trading_mode != TradingMode.SPOT and strategy.can_short
|
||||
|
||||
self.strategy.ft_bot_start()
|
||||
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)()
|
||||
|
||||
def _load_protections(self, strategy: IStrategy):
|
||||
if self.config.get('enable_protections', False):
|
||||
@ -740,7 +741,7 @@ class Backtesting:
|
||||
proposed_leverage=1.0,
|
||||
max_leverage=max_leverage,
|
||||
side=direction, entry_tag=entry_tag,
|
||||
) if self._can_short else 1.0
|
||||
) if self.trading_mode != TradingMode.SPOT else 1.0
|
||||
# Cap leverage between 1.0 and max_leverage.
|
||||
leverage = min(max(leverage, 1.0), max_leverage)
|
||||
|
||||
@ -1030,6 +1031,9 @@ class Backtesting:
|
||||
requested_stake=(
|
||||
order.safe_remaining * order.ft_price / trade.leverage),
|
||||
direction='short' if trade.is_short else 'long')
|
||||
# Delete trade if no successful entries happened (if placing the new order failed)
|
||||
if trade.open_order_id is None and trade.nr_of_successful_entries == 0:
|
||||
return True
|
||||
self.replaced_entry_orders += 1
|
||||
else:
|
||||
# assumption: there can't be multiple open entry orders at any given time
|
||||
@ -1155,6 +1159,8 @@ class Backtesting:
|
||||
while current_time <= end_date:
|
||||
open_trade_count_start = LocalTrade.bt_open_open_trade_count
|
||||
self.check_abort()
|
||||
strategy_safe_wrapper(self.strategy.bot_loop_start, supress_error=True)(
|
||||
current_time=current_time)
|
||||
for i, pair in enumerate(data):
|
||||
row_index = indexes[pair]
|
||||
row = self.validate_row(data, pair, row_index, current_time)
|
||||
|
@ -23,6 +23,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
NON_OPT_PARAM_APPENDIX = " # value loaded from strategy"
|
||||
|
||||
HYPER_PARAMS_FILE_FORMAT = rapidjson.NM_NATIVE | rapidjson.NM_NAN
|
||||
|
||||
|
||||
def hyperopt_serializer(x):
|
||||
if isinstance(x, np.integer):
|
||||
@ -76,9 +78,18 @@ class HyperoptTools():
|
||||
with filename.open('w') as f:
|
||||
rapidjson.dump(final_params, f, indent=2,
|
||||
default=hyperopt_serializer,
|
||||
number_mode=rapidjson.NM_NATIVE | rapidjson.NM_NAN
|
||||
number_mode=HYPER_PARAMS_FILE_FORMAT
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def load_params(filename: Path) -> Dict:
|
||||
"""
|
||||
Load parameters from file
|
||||
"""
|
||||
with filename.open('r') as f:
|
||||
params = rapidjson.load(f, number_mode=HYPER_PARAMS_FILE_FORMAT)
|
||||
return params
|
||||
|
||||
@staticmethod
|
||||
def try_export_params(config: Config, strategy_name: str, params: Dict):
|
||||
if params.get(FTHYPT_FILEVERSION, 1) >= 2 and not config.get('disableparamexport', False):
|
||||
@ -189,7 +200,7 @@ class HyperoptTools():
|
||||
for s in ['buy', 'sell', 'protection',
|
||||
'roi', 'stoploss', 'trailing', 'max_open_trades']:
|
||||
HyperoptTools._params_update_for_json(result_dict, params, non_optimized, s)
|
||||
print(rapidjson.dumps(result_dict, default=str, number_mode=rapidjson.NM_NATIVE))
|
||||
print(rapidjson.dumps(result_dict, default=str, number_mode=HYPER_PARAMS_FILE_FORMAT))
|
||||
|
||||
else:
|
||||
HyperoptTools._params_pretty_print(params, 'buy', "Buy hyperspace params:",
|
||||
|
@ -865,6 +865,11 @@ def show_backtest_result(strategy: str, results: Dict[str, Any], stake_currency:
|
||||
print(' BACKTESTING REPORT '.center(len(table.splitlines()[0]), '='))
|
||||
print(table)
|
||||
|
||||
table = text_table_bt_results(results['left_open_trades'], stake_currency=stake_currency)
|
||||
if isinstance(table, str) and len(table) > 0:
|
||||
print(' LEFT OPEN TRADES REPORT '.center(len(table.splitlines()[0]), '='))
|
||||
print(table)
|
||||
|
||||
if (results.get('results_per_enter_tag') is not None
|
||||
or results.get('results_per_buy_tag') is not None):
|
||||
# results_per_buy_tag is deprecated and should be removed 2 versions after short golive.
|
||||
@ -884,11 +889,6 @@ def show_backtest_result(strategy: str, results: Dict[str, Any], stake_currency:
|
||||
print(' EXIT REASON STATS '.center(len(table.splitlines()[0]), '='))
|
||||
print(table)
|
||||
|
||||
table = text_table_bt_results(results['left_open_trades'], stake_currency=stake_currency)
|
||||
if isinstance(table, str) and len(table) > 0:
|
||||
print(' LEFT OPEN TRADES REPORT '.center(len(table.splitlines()[0]), '='))
|
||||
print(table)
|
||||
|
||||
for period in backtest_breakdown:
|
||||
days_breakdown_stats = generate_periodic_breakdown_stats(
|
||||
trade_list=results['trades'], period=period)
|
||||
@ -917,11 +917,11 @@ def show_backtest_results(config: Config, backtest_stats: Dict):
|
||||
strategy, results, stake_currency,
|
||||
config.get('backtest_breakdown', []))
|
||||
|
||||
if len(backtest_stats['strategy']) > 1:
|
||||
if len(backtest_stats['strategy']) > 0:
|
||||
# Print Strategy summary table
|
||||
|
||||
table = text_table_strategy(backtest_stats['strategy_comparison'], stake_currency)
|
||||
print(f"{results['backtest_start']} -> {results['backtest_end']} |"
|
||||
print(f"Backtested {results['backtest_start']} -> {results['backtest_end']} |"
|
||||
f" Max open trades : {results['max_open_trades']}")
|
||||
print(' STRATEGY SUMMARY '.center(len(table.splitlines()[0]), '='))
|
||||
print(table)
|
||||
|
@ -1,5 +1,6 @@
|
||||
# flake8: noqa: F401
|
||||
|
||||
from freqtrade.persistence.key_value_store import KeyStoreKeys, KeyValueStore
|
||||
from freqtrade.persistence.models import init_db
|
||||
from freqtrade.persistence.pairlock_middleware import PairLocks
|
||||
from freqtrade.persistence.trade_model import LocalTrade, Order, Trade
|
||||
|
179
freqtrade/persistence/key_value_store.py
Normal file
179
freqtrade/persistence/key_value_store.py
Normal file
@ -0,0 +1,179 @@
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import ClassVar, Optional, Union
|
||||
|
||||
from sqlalchemy import String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from freqtrade.persistence.base import ModelBase, SessionType
|
||||
|
||||
|
||||
ValueTypes = Union[str, datetime, float, int]
|
||||
|
||||
|
||||
class ValueTypesEnum(str, Enum):
|
||||
STRING = 'str'
|
||||
DATETIME = 'datetime'
|
||||
FLOAT = 'float'
|
||||
INT = 'int'
|
||||
|
||||
|
||||
class KeyStoreKeys(str, Enum):
|
||||
BOT_START_TIME = 'bot_start_time'
|
||||
STARTUP_TIME = 'startup_time'
|
||||
|
||||
|
||||
class _KeyValueStoreModel(ModelBase):
|
||||
"""
|
||||
Pair Locks database model.
|
||||
"""
|
||||
__tablename__ = 'KeyValueStore'
|
||||
session: ClassVar[SessionType]
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
|
||||
key: Mapped[KeyStoreKeys] = mapped_column(String(25), nullable=False, index=True)
|
||||
|
||||
value_type: Mapped[ValueTypesEnum] = mapped_column(String(20), nullable=False)
|
||||
|
||||
string_value: Mapped[Optional[str]]
|
||||
datetime_value: Mapped[Optional[datetime]]
|
||||
float_value: Mapped[Optional[float]]
|
||||
int_value: Mapped[Optional[int]]
|
||||
|
||||
|
||||
class KeyValueStore():
|
||||
"""
|
||||
Generic bot-wide, persistent key-value store
|
||||
Can be used to store generic values, e.g. very first bot startup time.
|
||||
Supports the types str, datetime, float and int.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def store_value(key: KeyStoreKeys, value: ValueTypes) -> None:
|
||||
"""
|
||||
Store the given value for the given key.
|
||||
:param key: Key to store the value for - can be used in get-value to retrieve the key
|
||||
:param value: Value to store - can be str, datetime, float or int
|
||||
"""
|
||||
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
|
||||
_KeyValueStoreModel.key == key).first()
|
||||
if kv is None:
|
||||
kv = _KeyValueStoreModel(key=key)
|
||||
if isinstance(value, str):
|
||||
kv.value_type = ValueTypesEnum.STRING
|
||||
kv.string_value = value
|
||||
elif isinstance(value, datetime):
|
||||
kv.value_type = ValueTypesEnum.DATETIME
|
||||
kv.datetime_value = value
|
||||
elif isinstance(value, float):
|
||||
kv.value_type = ValueTypesEnum.FLOAT
|
||||
kv.float_value = value
|
||||
elif isinstance(value, int):
|
||||
kv.value_type = ValueTypesEnum.INT
|
||||
kv.int_value = value
|
||||
else:
|
||||
raise ValueError(f'Unknown value type {kv.value_type}')
|
||||
_KeyValueStoreModel.session.add(kv)
|
||||
_KeyValueStoreModel.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def delete_value(key: KeyStoreKeys) -> None:
|
||||
"""
|
||||
Delete the value for the given key.
|
||||
:param key: Key to delete the value for
|
||||
"""
|
||||
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
|
||||
_KeyValueStoreModel.key == key).first()
|
||||
if kv is not None:
|
||||
_KeyValueStoreModel.session.delete(kv)
|
||||
_KeyValueStoreModel.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def get_value(key: KeyStoreKeys) -> Optional[ValueTypes]:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
"""
|
||||
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
|
||||
_KeyValueStoreModel.key == key).first()
|
||||
if kv is None:
|
||||
return None
|
||||
if kv.value_type == ValueTypesEnum.STRING:
|
||||
return kv.string_value
|
||||
if kv.value_type == ValueTypesEnum.DATETIME and kv.datetime_value is not None:
|
||||
return kv.datetime_value.replace(tzinfo=timezone.utc)
|
||||
if kv.value_type == ValueTypesEnum.FLOAT:
|
||||
return kv.float_value
|
||||
if kv.value_type == ValueTypesEnum.INT:
|
||||
return kv.int_value
|
||||
# This should never happen unless someone messed with the database manually
|
||||
raise ValueError(f'Unknown value type {kv.value_type}') # pragma: no cover
|
||||
|
||||
@staticmethod
|
||||
def get_string_value(key: KeyStoreKeys) -> Optional[str]:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
"""
|
||||
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
|
||||
_KeyValueStoreModel.key == key,
|
||||
_KeyValueStoreModel.value_type == ValueTypesEnum.STRING).first()
|
||||
if kv is None:
|
||||
return None
|
||||
return kv.string_value
|
||||
|
||||
@staticmethod
|
||||
def get_datetime_value(key: KeyStoreKeys) -> Optional[datetime]:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
"""
|
||||
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
|
||||
_KeyValueStoreModel.key == key,
|
||||
_KeyValueStoreModel.value_type == ValueTypesEnum.DATETIME).first()
|
||||
if kv is None or kv.datetime_value is None:
|
||||
return None
|
||||
return kv.datetime_value.replace(tzinfo=timezone.utc)
|
||||
|
||||
@staticmethod
|
||||
def get_float_value(key: KeyStoreKeys) -> Optional[float]:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
"""
|
||||
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
|
||||
_KeyValueStoreModel.key == key,
|
||||
_KeyValueStoreModel.value_type == ValueTypesEnum.FLOAT).first()
|
||||
if kv is None:
|
||||
return None
|
||||
return kv.float_value
|
||||
|
||||
@staticmethod
|
||||
def get_int_value(key: KeyStoreKeys) -> Optional[int]:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
"""
|
||||
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
|
||||
_KeyValueStoreModel.key == key,
|
||||
_KeyValueStoreModel.value_type == ValueTypesEnum.INT).first()
|
||||
if kv is None:
|
||||
return None
|
||||
return kv.int_value
|
||||
|
||||
|
||||
def set_startup_time():
|
||||
"""
|
||||
sets bot_start_time to the first trade open date - or "now" on new databases.
|
||||
sets startup_time to "now"
|
||||
"""
|
||||
st = KeyValueStore.get_value('bot_start_time')
|
||||
if st is None:
|
||||
from freqtrade.persistence import Trade
|
||||
t = Trade.session.query(Trade).order_by(Trade.open_date.asc()).first()
|
||||
if t is not None:
|
||||
KeyValueStore.store_value('bot_start_time', t.open_date_utc)
|
||||
else:
|
||||
KeyValueStore.store_value('bot_start_time', datetime.now(timezone.utc))
|
||||
KeyValueStore.store_value('startup_time', datetime.now(timezone.utc))
|
@ -13,6 +13,7 @@ from sqlalchemy.pool import StaticPool
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.persistence.base import ModelBase
|
||||
from freqtrade.persistence.key_value_store import _KeyValueStoreModel
|
||||
from freqtrade.persistence.migrations import check_migrate
|
||||
from freqtrade.persistence.pairlock import PairLock
|
||||
from freqtrade.persistence.trade_model import Order, Trade
|
||||
@ -76,6 +77,7 @@ def init_db(db_url: str) -> None:
|
||||
bind=engine, autoflush=False), scopefunc=get_request_or_thread_id)
|
||||
Order.session = Trade.session
|
||||
PairLock.session = Trade.session
|
||||
_KeyValueStoreModel.session = Trade.session
|
||||
|
||||
previous_tables = inspect(engine).get_table_names()
|
||||
ModelBase.metadata.create_all(engine)
|
||||
|
@ -9,13 +9,14 @@ from typing import Any, ClassVar, Dict, List, Optional, Sequence, cast
|
||||
|
||||
from sqlalchemy import (Enum, Float, ForeignKey, Integer, ScalarResult, Select, String,
|
||||
UniqueConstraint, desc, func, select)
|
||||
from sqlalchemy.orm import Mapped, lazyload, mapped_column, relationship
|
||||
from sqlalchemy.orm import Mapped, lazyload, mapped_column, relationship, validates
|
||||
|
||||
from freqtrade.constants import (DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC, NON_OPEN_EXCHANGE_STATES,
|
||||
BuySell, LongShort)
|
||||
from freqtrade.constants import (CUSTOM_TAG_MAX_LENGTH, DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC,
|
||||
NON_OPEN_EXCHANGE_STATES, BuySell, LongShort)
|
||||
from freqtrade.enums import ExitType, TradingMode
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
from freqtrade.exchange import amount_to_contract_precision, price_to_precision
|
||||
from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, amount_to_contract_precision,
|
||||
price_to_precision)
|
||||
from freqtrade.leverage import interest
|
||||
from freqtrade.persistence.base import ModelBase, SessionType
|
||||
from freqtrade.util import FtPrecise
|
||||
@ -560,6 +561,9 @@ class LocalTrade():
|
||||
'trading_mode': self.trading_mode,
|
||||
'funding_fees': self.funding_fees,
|
||||
'open_order_id': self.open_order_id,
|
||||
'amount_precision': self.amount_precision,
|
||||
'price_precision': self.price_precision,
|
||||
'precision_mode': self.precision_mode,
|
||||
'orders': orders,
|
||||
}
|
||||
|
||||
@ -594,7 +598,8 @@ class LocalTrade():
|
||||
"""
|
||||
Method used internally to set self.stop_loss.
|
||||
"""
|
||||
stop_loss_norm = price_to_precision(stop_loss, self.price_precision, self.precision_mode)
|
||||
stop_loss_norm = price_to_precision(stop_loss, self.price_precision, self.precision_mode,
|
||||
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
|
||||
if not self.stop_loss:
|
||||
self.initial_stop_loss = stop_loss_norm
|
||||
self.stop_loss = stop_loss_norm
|
||||
@ -625,7 +630,8 @@ class LocalTrade():
|
||||
if self.initial_stop_loss_pct is None or refresh:
|
||||
self.__set_stop_loss(new_loss, stoploss)
|
||||
self.initial_stop_loss = price_to_precision(
|
||||
new_loss, self.price_precision, self.precision_mode)
|
||||
new_loss, self.price_precision, self.precision_mode,
|
||||
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
|
||||
self.initial_stop_loss_pct = -1 * abs(stoploss)
|
||||
|
||||
# evaluate if the stop loss needs to be updated
|
||||
@ -689,21 +695,24 @@ class LocalTrade():
|
||||
else:
|
||||
logger.warning(
|
||||
f'Got different open_order_id {self.open_order_id} != {order.order_id}')
|
||||
|
||||
elif order.ft_order_side == 'stoploss' and order.status not in ('open', ):
|
||||
self.stoploss_order_id = None
|
||||
self.close_rate_requested = self.stop_loss
|
||||
self.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
|
||||
if self.is_open:
|
||||
logger.info(f'{order.order_type.upper()} is hit for {self}.')
|
||||
else:
|
||||
raise ValueError(f'Unknown order type: {order.order_type}')
|
||||
|
||||
if order.ft_order_side != self.entry_side:
|
||||
amount_tr = amount_to_contract_precision(self.amount, self.amount_precision,
|
||||
self.precision_mode, self.contract_size)
|
||||
if isclose(order.safe_amount_after_fee, amount_tr, abs_tol=MATH_CLOSE_PREC):
|
||||
self.close(order.safe_price)
|
||||
else:
|
||||
self.recalc_trade_from_orders()
|
||||
elif order.ft_order_side == 'stoploss' and order.status not in ('canceled', 'open'):
|
||||
self.stoploss_order_id = None
|
||||
self.close_rate_requested = self.stop_loss
|
||||
self.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
|
||||
if self.is_open:
|
||||
logger.info(f'{order.order_type.upper()} is hit for {self}.')
|
||||
self.close(order.safe_price)
|
||||
else:
|
||||
raise ValueError(f'Unknown order type: {order.order_type}')
|
||||
|
||||
Trade.commit()
|
||||
|
||||
def close(self, rate: float, *, show_msg: bool = True) -> None:
|
||||
@ -1250,11 +1259,13 @@ class Trade(ModelBase, LocalTrade):
|
||||
Float(), nullable=True, default=0.0) # type: ignore
|
||||
# Lowest price reached
|
||||
min_rate: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
exit_reason: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
|
||||
exit_reason: Mapped[Optional[str]] = mapped_column(
|
||||
String(CUSTOM_TAG_MAX_LENGTH), nullable=True) # type: ignore
|
||||
exit_order_status: Mapped[Optional[str]] = mapped_column(
|
||||
String(100), nullable=True) # type: ignore
|
||||
strategy: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
|
||||
enter_tag: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
|
||||
enter_tag: Mapped[Optional[str]] = mapped_column(
|
||||
String(CUSTOM_TAG_MAX_LENGTH), nullable=True) # type: ignore
|
||||
timeframe: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # type: ignore
|
||||
|
||||
trading_mode: Mapped[TradingMode] = mapped_column(
|
||||
@ -1284,6 +1295,13 @@ class Trade(ModelBase, LocalTrade):
|
||||
self.realized_profit = 0
|
||||
self.recalc_open_trade_value()
|
||||
|
||||
@validates('enter_tag', 'exit_reason')
|
||||
def validate_string_len(self, key, value):
|
||||
max_len = getattr(self.__class__, key).prop.columns[0].type.length
|
||||
if value and len(value) > max_len:
|
||||
return value[:max_len]
|
||||
return value
|
||||
|
||||
def delete(self) -> None:
|
||||
|
||||
for order in self.orders:
|
||||
@ -1660,8 +1678,10 @@ class Trade(ModelBase, LocalTrade):
|
||||
stop_loss=data["stop_loss_abs"],
|
||||
stop_loss_pct=data["stop_loss_ratio"],
|
||||
stoploss_order_id=data["stoploss_order_id"],
|
||||
stoploss_last_update=(datetime.fromtimestamp(data["stoploss_last_update"] // 1000,
|
||||
tz=timezone.utc) if data["stoploss_last_update"] else None),
|
||||
stoploss_last_update=(
|
||||
datetime.fromtimestamp(data["stoploss_last_update_timestamp"] // 1000,
|
||||
tz=timezone.utc)
|
||||
if data["stoploss_last_update_timestamp"] else None),
|
||||
initial_stop_loss=data["initial_stop_loss_abs"],
|
||||
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
|
||||
min_rate=data["min_rate"],
|
||||
|
@ -1,4 +1,5 @@
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
@ -635,7 +636,7 @@ def load_and_plot_trades(config: Config):
|
||||
exchange = ExchangeResolver.load_exchange(config['exchange']['name'], config)
|
||||
IStrategy.dp = DataProvider(config, exchange)
|
||||
strategy.ft_bot_start()
|
||||
strategy.bot_loop_start()
|
||||
strategy.bot_loop_start(datetime.now(timezone.utc))
|
||||
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
|
||||
timerange = plot_elements['timerange']
|
||||
trades = plot_elements['trades']
|
||||
|
@ -6,6 +6,7 @@ from typing import Any, Dict, Optional
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import ROUND_UP
|
||||
from freqtrade.exchange.types import Ticker
|
||||
from freqtrade.plugins.pairlist.IPairList import IPairList
|
||||
|
||||
@ -61,9 +62,10 @@ class PrecisionFilter(IPairList):
|
||||
stop_price = ticker['last'] * self._stoploss
|
||||
|
||||
# Adjust stop-prices to precision
|
||||
sp = self._exchange.price_to_precision(pair, stop_price)
|
||||
sp = self._exchange.price_to_precision(pair, stop_price, rounding_mode=ROUND_UP)
|
||||
|
||||
stop_gap_price = self._exchange.price_to_precision(pair, stop_price * 0.99)
|
||||
stop_gap_price = self._exchange.price_to_precision(pair, stop_price * 0.99,
|
||||
rounding_mode=ROUND_UP)
|
||||
logger.debug(f"{pair} - {sp} : {stop_gap_price}")
|
||||
|
||||
if sp <= stop_gap_price:
|
||||
|
@ -108,6 +108,8 @@ class Profit(BaseModel):
|
||||
max_drawdown: float
|
||||
max_drawdown_abs: float
|
||||
trading_volume: Optional[float]
|
||||
bot_start_timestamp: int
|
||||
bot_start_date: str
|
||||
|
||||
|
||||
class SellReason(BaseModel):
|
||||
@ -276,6 +278,10 @@ class TradeSchema(BaseModel):
|
||||
funding_fees: Optional[float]
|
||||
trading_mode: Optional[TradingMode]
|
||||
|
||||
amount_precision: Optional[float]
|
||||
price_precision: Optional[float]
|
||||
precision_mode: Optional[int]
|
||||
|
||||
|
||||
class OpenTradeSchema(TradeSchema):
|
||||
stoploss_current_dist: Optional[float]
|
||||
|
@ -55,7 +55,7 @@ class UvicornServer(uvicorn.Server):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def run_in_thread(self):
|
||||
self.thread = threading.Thread(target=self.run)
|
||||
self.thread = threading.Thread(target=self.run, name='FTUvicorn')
|
||||
self.thread.start()
|
||||
while not self.started:
|
||||
time.sleep(1e-3)
|
||||
|
@ -13,6 +13,7 @@ from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.rpc.api_server.uvicorn_threaded import UvicornServer
|
||||
from freqtrade.rpc.api_server.ws.message_stream import MessageStream
|
||||
from freqtrade.rpc.rpc import RPC, RPCException, RPCHandler
|
||||
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -108,7 +109,7 @@ class ApiServer(RPCHandler):
|
||||
cls._has_rpc = False
|
||||
cls._rpc = None
|
||||
|
||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
||||
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||
"""
|
||||
Publish the message to the message stream
|
||||
"""
|
||||
|
@ -26,10 +26,11 @@ from freqtrade.exceptions import ExchangeError, PricingError
|
||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_msecs
|
||||
from freqtrade.loggers import bufferHandler
|
||||
from freqtrade.misc import decimals_per_coin, shorten_date
|
||||
from freqtrade.persistence import Order, PairLocks, Trade
|
||||
from freqtrade.persistence import KeyStoreKeys, KeyValueStore, Order, PairLocks, Trade
|
||||
from freqtrade.persistence.models import PairLock
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
|
||||
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||
from freqtrade.wallets import PositionWallet, Wallet
|
||||
|
||||
|
||||
@ -79,7 +80,7 @@ class RPCHandler:
|
||||
""" Cleanup pending module resources """
|
||||
|
||||
@abstractmethod
|
||||
def send_msg(self, msg: Dict[str, str]) -> None:
|
||||
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||
""" Sends a message to all registered rpc modules """
|
||||
|
||||
|
||||
@ -542,6 +543,7 @@ class RPC:
|
||||
first_date = trades[0].open_date if trades else None
|
||||
last_date = trades[-1].open_date if trades else None
|
||||
num = float(len(durations) or 1)
|
||||
bot_start = KeyValueStore.get_datetime_value(KeyStoreKeys.BOT_START_TIME)
|
||||
return {
|
||||
'profit_closed_coin': profit_closed_coin_sum,
|
||||
'profit_closed_percent_mean': round(profit_closed_ratio_mean * 100, 2),
|
||||
@ -575,6 +577,8 @@ class RPC:
|
||||
'max_drawdown': max_drawdown,
|
||||
'max_drawdown_abs': max_drawdown_abs,
|
||||
'trading_volume': trading_volume,
|
||||
'bot_start_timestamp': int(bot_start.timestamp() * 1000) if bot_start else 0,
|
||||
'bot_start_date': bot_start.strftime(DATETIME_PRINT_FORMAT) if bot_start else '',
|
||||
}
|
||||
|
||||
def _rpc_balance(self, stake_currency: str, fiat_display_currency: str) -> Dict:
|
||||
@ -1192,6 +1196,7 @@ class RPC:
|
||||
from freqtrade.resolvers.strategy_resolver import StrategyResolver
|
||||
strategy = StrategyResolver.load_strategy(config)
|
||||
strategy.dp = DataProvider(config, exchange=exchange, pairlists=None)
|
||||
strategy.ft_bot_start()
|
||||
|
||||
df_analyzed = strategy.analyze_ticker(_data[pair], {'pair': pair})
|
||||
|
||||
|
@ -3,11 +3,12 @@ This module contains class to manage RPC communications (Telegram, API, ...)
|
||||
"""
|
||||
import logging
|
||||
from collections import deque
|
||||
from typing import Any, Dict, List
|
||||
from typing import List
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.enums import NO_ECHO_MESSAGES, RPCMessageType
|
||||
from freqtrade.rpc import RPC, RPCHandler
|
||||
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -58,7 +59,7 @@ class RPCManager:
|
||||
mod.cleanup()
|
||||
del mod
|
||||
|
||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
||||
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||
"""
|
||||
Send given message to all registered rpc modules.
|
||||
A message consists of one or more key value pairs of strings.
|
||||
@ -69,10 +70,6 @@ class RPCManager:
|
||||
"""
|
||||
if msg.get('type') not in NO_ECHO_MESSAGES:
|
||||
logger.info('Sending rpc message: %s', msg)
|
||||
if 'pair' in msg:
|
||||
msg.update({
|
||||
'base_currency': self._rpc._freqtrade.exchange.get_pair_base_currency(msg['pair'])
|
||||
})
|
||||
for mod in self.registered_modules:
|
||||
logger.debug('Forwarding message to rpc.%s', mod.name)
|
||||
try:
|
||||
|
128
freqtrade/rpc/rpc_types.py
Normal file
128
freqtrade/rpc/rpc_types.py
Normal file
@ -0,0 +1,128 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, List, Literal, Optional, TypedDict, Union
|
||||
|
||||
from freqtrade.constants import PairWithTimeframe
|
||||
from freqtrade.enums import RPCMessageType
|
||||
|
||||
|
||||
class RPCSendMsgBase(TypedDict):
|
||||
pass
|
||||
# ty1pe: Literal[RPCMessageType]
|
||||
|
||||
|
||||
class RPCStatusMsg(RPCSendMsgBase):
|
||||
"""Used for Status, Startup and Warning messages"""
|
||||
type: Literal[RPCMessageType.STATUS, RPCMessageType.STARTUP, RPCMessageType.WARNING]
|
||||
status: str
|
||||
|
||||
|
||||
class RPCStrategyMsg(RPCSendMsgBase):
|
||||
"""Used for Status, Startup and Warning messages"""
|
||||
type: Literal[RPCMessageType.STRATEGY_MSG]
|
||||
msg: str
|
||||
|
||||
|
||||
class RPCProtectionMsg(RPCSendMsgBase):
|
||||
type: Literal[RPCMessageType.PROTECTION_TRIGGER, RPCMessageType.PROTECTION_TRIGGER_GLOBAL]
|
||||
id: int
|
||||
pair: str
|
||||
base_currency: Optional[str]
|
||||
lock_time: str
|
||||
lock_timestamp: int
|
||||
lock_end_time: str
|
||||
lock_end_timestamp: int
|
||||
reason: str
|
||||
side: str
|
||||
active: bool
|
||||
|
||||
|
||||
class RPCWhitelistMsg(RPCSendMsgBase):
|
||||
type: Literal[RPCMessageType.WHITELIST]
|
||||
data: List[str]
|
||||
|
||||
|
||||
class __RPCBuyMsgBase(RPCSendMsgBase):
|
||||
trade_id: int
|
||||
buy_tag: Optional[str]
|
||||
enter_tag: Optional[str]
|
||||
exchange: str
|
||||
pair: str
|
||||
base_currency: str
|
||||
leverage: Optional[float]
|
||||
direction: str
|
||||
limit: float
|
||||
open_rate: float
|
||||
order_type: str
|
||||
stake_amount: float
|
||||
stake_currency: str
|
||||
fiat_currency: Optional[str]
|
||||
amount: float
|
||||
open_date: datetime
|
||||
current_rate: Optional[float]
|
||||
sub_trade: bool
|
||||
|
||||
|
||||
class RPCBuyMsg(__RPCBuyMsgBase):
|
||||
type: Literal[RPCMessageType.ENTRY, RPCMessageType.ENTRY_FILL]
|
||||
|
||||
|
||||
class RPCCancelMsg(__RPCBuyMsgBase):
|
||||
type: Literal[RPCMessageType.ENTRY_CANCEL]
|
||||
reason: str
|
||||
|
||||
|
||||
class RPCSellMsg(__RPCBuyMsgBase):
|
||||
type: Literal[RPCMessageType.EXIT, RPCMessageType.EXIT_FILL]
|
||||
cumulative_profit: float
|
||||
gain: str # Literal["profit", "loss"]
|
||||
close_rate: float
|
||||
profit_amount: float
|
||||
profit_ratio: float
|
||||
sell_reason: Optional[str]
|
||||
exit_reason: Optional[str]
|
||||
close_date: datetime
|
||||
# current_rate: Optional[float]
|
||||
order_rate: Optional[float]
|
||||
|
||||
|
||||
class RPCSellCancelMsg(__RPCBuyMsgBase):
|
||||
type: Literal[RPCMessageType.EXIT_CANCEL]
|
||||
reason: str
|
||||
gain: str # Literal["profit", "loss"]
|
||||
profit_amount: float
|
||||
profit_ratio: float
|
||||
sell_reason: Optional[str]
|
||||
exit_reason: Optional[str]
|
||||
close_date: datetime
|
||||
|
||||
|
||||
class _AnalyzedDFData(TypedDict):
|
||||
key: PairWithTimeframe
|
||||
df: Any
|
||||
la: datetime
|
||||
|
||||
|
||||
class RPCAnalyzedDFMsg(RPCSendMsgBase):
|
||||
"""New Analyzed dataframe message"""
|
||||
type: Literal[RPCMessageType.ANALYZED_DF]
|
||||
data: _AnalyzedDFData
|
||||
|
||||
|
||||
class RPCNewCandleMsg(RPCSendMsgBase):
|
||||
"""New candle ping message, issued once per new candle/pair"""
|
||||
type: Literal[RPCMessageType.NEW_CANDLE]
|
||||
data: PairWithTimeframe
|
||||
|
||||
|
||||
RPCSendMsg = Union[
|
||||
RPCStatusMsg,
|
||||
RPCStrategyMsg,
|
||||
RPCProtectionMsg,
|
||||
RPCWhitelistMsg,
|
||||
RPCBuyMsg,
|
||||
RPCCancelMsg,
|
||||
RPCSellMsg,
|
||||
RPCSellCancelMsg,
|
||||
RPCAnalyzedDFMsg,
|
||||
RPCNewCandleMsg
|
||||
]
|
@ -30,6 +30,7 @@ from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.misc import chunks, plural, round_coin_value
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.rpc import RPC, RPCException, RPCHandler
|
||||
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -429,14 +430,14 @@ class Telegram(RPCHandler):
|
||||
return None
|
||||
return message
|
||||
|
||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
||||
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||
""" Send a message to telegram channel """
|
||||
|
||||
default_noti = 'on'
|
||||
|
||||
msg_type = msg['type']
|
||||
noti = ''
|
||||
if msg_type == RPCMessageType.EXIT:
|
||||
if msg['type'] == RPCMessageType.EXIT:
|
||||
sell_noti = self._config['telegram'] \
|
||||
.get('notification_settings', {}).get(str(msg_type), {})
|
||||
# For backward compatibility sell still can be string
|
||||
@ -453,7 +454,7 @@ class Telegram(RPCHandler):
|
||||
# Notification disabled
|
||||
return
|
||||
|
||||
message = self.compose_message(deepcopy(msg), msg_type)
|
||||
message = self.compose_message(deepcopy(msg), msg_type) # type: ignore
|
||||
if message:
|
||||
self._send_msg(message, disable_notification=(noti == 'silent'))
|
||||
|
||||
@ -818,7 +819,7 @@ class Telegram(RPCHandler):
|
||||
best_pair = stats['best_pair']
|
||||
best_pair_profit_ratio = stats['best_pair_profit_ratio']
|
||||
if stats['trade_count'] == 0:
|
||||
markdown_msg = 'No trades yet.'
|
||||
markdown_msg = f"No trades yet.\n*Bot started:* `{stats['bot_start_date']}`"
|
||||
else:
|
||||
# Message to display
|
||||
if stats['closed_trade_count'] > 0:
|
||||
@ -837,6 +838,7 @@ class Telegram(RPCHandler):
|
||||
f"({profit_all_percent} \N{GREEK CAPITAL LETTER SIGMA}%)`\n"
|
||||
f"∙ `{round_coin_value(profit_all_fiat, fiat_disp_cur)}`\n"
|
||||
f"*Total Trade Count:* `{trade_count}`\n"
|
||||
f"*Bot started:* `{stats['bot_start_date']}`\n"
|
||||
f"*{'First Trade opened' if not timescale else 'Showing Profit since'}:* "
|
||||
f"`{first_trade_date}`\n"
|
||||
f"*Latest Trade opened:* `{latest_trade_date}`\n"
|
||||
|
@ -10,6 +10,7 @@ from requests import RequestException, post
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.enums import RPCMessageType
|
||||
from freqtrade.rpc import RPC, RPCHandler
|
||||
from freqtrade.rpc.rpc_types import RPCSendMsg
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -41,7 +42,7 @@ class Webhook(RPCHandler):
|
||||
"""
|
||||
pass
|
||||
|
||||
def _get_value_dict(self, msg: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
def _get_value_dict(self, msg: RPCSendMsg) -> Optional[Dict[str, Any]]:
|
||||
whconfig = self._config['webhook']
|
||||
# Deprecated 2022.10 - only keep generic method.
|
||||
if msg['type'] in [RPCMessageType.ENTRY]:
|
||||
@ -75,7 +76,7 @@ class Webhook(RPCHandler):
|
||||
return None
|
||||
return valuedict
|
||||
|
||||
def send_msg(self, msg: Dict[str, Any]) -> None:
|
||||
def send_msg(self, msg: RPCSendMsg) -> None:
|
||||
""" Send a message to telegram channel """
|
||||
try:
|
||||
|
||||
|
@ -8,7 +8,7 @@ from typing import Any, Dict, Iterator, List, Optional, Tuple, Type, Union
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.misc import deep_merge_dicts, json_load
|
||||
from freqtrade.misc import deep_merge_dicts
|
||||
from freqtrade.optimize.hyperopt_tools import HyperoptTools
|
||||
from freqtrade.strategy.parameters import BaseParameter
|
||||
|
||||
@ -124,8 +124,7 @@ class HyperStrategyMixin:
|
||||
if filename.is_file():
|
||||
logger.info(f"Loading parameters from file {filename}")
|
||||
try:
|
||||
with filename.open('r') as f:
|
||||
params = json_load(f)
|
||||
params = HyperoptTools.load_params(filename)
|
||||
if params.get('strategy_name') != self.__class__.__name__:
|
||||
raise OperationalException('Invalid parameter file provided.')
|
||||
return params
|
||||
|
@ -10,7 +10,7 @@ from typing import Dict, List, Optional, Tuple, Union
|
||||
import arrow
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.constants import Config, IntOrInf, ListPairsWithTimeframes
|
||||
from freqtrade.constants import CUSTOM_TAG_MAX_LENGTH, Config, IntOrInf, ListPairsWithTimeframes
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.enums import (CandleType, ExitCheckTuple, ExitType, MarketDirection, RunMode,
|
||||
SignalDirection, SignalTagType, SignalType, TradingMode)
|
||||
@ -27,7 +27,6 @@ from freqtrade.wallets import Wallets
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
CUSTOM_EXIT_MAX_LENGTH = 64
|
||||
|
||||
|
||||
class IStrategy(ABC, HyperStrategyMixin):
|
||||
@ -251,11 +250,12 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
"""
|
||||
pass
|
||||
|
||||
def bot_loop_start(self, **kwargs) -> None:
|
||||
def bot_loop_start(self, current_time: datetime, **kwargs) -> None:
|
||||
"""
|
||||
Called at the start of the bot iteration (one loop).
|
||||
Might be used to perform pair-independent tasks
|
||||
(e.g. gather some remote resource for comparison)
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
"""
|
||||
pass
|
||||
@ -1117,11 +1117,11 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
exit_signal = ExitType.CUSTOM_EXIT
|
||||
if isinstance(reason_cust, str):
|
||||
custom_reason = reason_cust
|
||||
if len(reason_cust) > CUSTOM_EXIT_MAX_LENGTH:
|
||||
if len(reason_cust) > CUSTOM_TAG_MAX_LENGTH:
|
||||
logger.warning(f'Custom exit reason returned from '
|
||||
f'custom_exit is too long and was trimmed'
|
||||
f'to {CUSTOM_EXIT_MAX_LENGTH} characters.')
|
||||
custom_reason = reason_cust[:CUSTOM_EXIT_MAX_LENGTH]
|
||||
f'to {CUSTOM_TAG_MAX_LENGTH} characters.')
|
||||
custom_reason = reason_cust[:CUSTOM_TAG_MAX_LENGTH]
|
||||
else:
|
||||
custom_reason = ''
|
||||
if (
|
||||
|
@ -223,6 +223,7 @@ class FreqaiExampleHybridStrategy(IStrategy):
|
||||
:param metadata: metadata of current pair
|
||||
usage example: dataframe["&-target"] = dataframe["close"].shift(-1) / dataframe["close"]
|
||||
"""
|
||||
self.freqai.class_names = ["down", "up"]
|
||||
dataframe['&s-up_or_down'] = np.where(dataframe["close"].shift(-50) >
|
||||
dataframe["close"], 'up', 'down')
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
def bot_loop_start(self, **kwargs) -> None:
|
||||
def bot_loop_start(self, current_time: datetime, **kwargs) -> None:
|
||||
"""
|
||||
Called at the start of the bot iteration (one loop).
|
||||
Might be used to perform pair-independent tasks
|
||||
@ -8,6 +8,7 @@ def bot_loop_start(self, **kwargs) -> None:
|
||||
For full documentation please go to https://www.freqtrade.io/en/latest/strategy-advanced/
|
||||
|
||||
When not implemented by a strategy, this simply does nothing.
|
||||
:param current_time: datetime object, containing the current datetime
|
||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||
"""
|
||||
pass
|
||||
|
@ -7,10 +7,10 @@
|
||||
-r docs/requirements-docs.txt
|
||||
|
||||
coveralls==3.3.1
|
||||
ruff==0.0.257
|
||||
mypy==1.1.1
|
||||
pre-commit==3.2.0
|
||||
pytest==7.2.2
|
||||
ruff==0.0.261
|
||||
mypy==1.2.0
|
||||
pre-commit==3.2.2
|
||||
pytest==7.3.0
|
||||
pytest-asyncio==0.21.0
|
||||
pytest-cov==4.0.0
|
||||
pytest-mock==3.10.0
|
||||
@ -22,11 +22,11 @@ time-machine==2.9.0
|
||||
httpx==0.23.3
|
||||
|
||||
# Convert jupyter notebooks to markdown documents
|
||||
nbconvert==7.2.10
|
||||
nbconvert==7.3.1
|
||||
|
||||
# mypy types
|
||||
types-cachetools==5.3.0.4
|
||||
types-cachetools==5.3.0.5
|
||||
types-filelock==3.2.7
|
||||
types-requests==2.28.11.15
|
||||
types-tabulate==0.9.0.1
|
||||
types-python-dateutil==2.8.19.10
|
||||
types-requests==2.28.11.17
|
||||
types-tabulate==0.9.0.2
|
||||
types-python-dateutil==2.8.19.12
|
||||
|
@ -5,7 +5,7 @@
|
||||
# Required for freqai
|
||||
scikit-learn==1.1.3
|
||||
joblib==1.2.0
|
||||
catboost==1.1.1; platform_machine != 'aarch64' and python_version < '3.11'
|
||||
catboost==1.1.1; platform_machine != 'aarch64' and 'arm' not in platform_machine and python_version < '3.11'
|
||||
lightgbm==3.3.5
|
||||
xgboost==1.7.4
|
||||
tensorboard==2.12.0
|
||||
xgboost==1.7.5
|
||||
tensorboard==2.12.1
|
||||
|
@ -5,5 +5,5 @@
|
||||
scipy==1.10.1
|
||||
scikit-learn==1.1.3
|
||||
scikit-optimize==0.9.0
|
||||
filelock==3.10.0
|
||||
filelock==3.11.0
|
||||
progressbar2==4.2.0
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Include all requirements to run the bot.
|
||||
-r requirements.txt
|
||||
|
||||
plotly==5.13.1
|
||||
plotly==5.14.1
|
||||
|
@ -2,10 +2,10 @@ numpy==1.24.2
|
||||
pandas==1.5.3
|
||||
pandas-ta==0.3.14b
|
||||
|
||||
ccxt==3.0.23
|
||||
cryptography==39.0.2
|
||||
ccxt==3.0.59
|
||||
cryptography==40.0.1
|
||||
aiohttp==3.8.4
|
||||
SQLAlchemy==2.0.7
|
||||
SQLAlchemy==2.0.9
|
||||
python-telegram-bot==13.15
|
||||
arrow==1.2.3
|
||||
cachetools==4.2.2
|
||||
@ -28,14 +28,14 @@ py_find_1st==1.1.5
|
||||
# Load ticker files 30% faster
|
||||
python-rapidjson==1.10
|
||||
# Properly format api responses
|
||||
orjson==3.8.7
|
||||
orjson==3.8.10
|
||||
|
||||
# Notify systemd
|
||||
sdnotify==0.3.2
|
||||
|
||||
# API Server
|
||||
fastapi==0.95.0
|
||||
pydantic==1.10.6
|
||||
pydantic==1.10.7
|
||||
uvicorn==0.21.1
|
||||
pyjwt==2.6.0
|
||||
aiofiles==23.1.0
|
||||
@ -50,10 +50,10 @@ prompt-toolkit==3.0.38
|
||||
python-dateutil==2.8.2
|
||||
|
||||
#Futures
|
||||
schedule==1.1.0
|
||||
schedule==1.2.0
|
||||
|
||||
#WS Messages
|
||||
websockets==10.4
|
||||
websockets==11.0.1
|
||||
janus==1.0.0
|
||||
|
||||
ast-comments==1.0.1
|
||||
|
2
setup.py
2
setup.py
@ -59,7 +59,7 @@ setup(
|
||||
install_requires=[
|
||||
# from requirements.txt
|
||||
'ccxt>=2.6.26',
|
||||
'SQLAlchemy',
|
||||
'SQLAlchemy>=2.0.6',
|
||||
'python-telegram-bot>=13.4',
|
||||
'arrow>=0.17.0',
|
||||
'cachetools',
|
||||
|
2
setup.sh
2
setup.sh
@ -85,7 +85,7 @@ function updateenv() {
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]
|
||||
then
|
||||
REQUIREMENTS_FREQAI="-r requirements-freqai.txt --use-pep517"
|
||||
read -p "Do you also want dependencies for freqai-rl (~700mb additional space required) [y/N]? "
|
||||
read -p "Do you also want dependencies for freqai-rl or PyTorch (~700mb additional space required) [y/N]? "
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]
|
||||
then
|
||||
REQUIREMENTS_FREQAI="-r requirements-freqai-rl.txt"
|
||||
|
@ -252,7 +252,7 @@ def test_datahandler__check_empty_df(testdatadir, caplog):
|
||||
assert log_has_re(expected_text, caplog)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('datahandler', ['feather', 'parquet'])
|
||||
@pytest.mark.parametrize('datahandler', ['parquet'])
|
||||
def test_datahandler_trades_not_supported(datahandler, testdatadir, ):
|
||||
dh = get_datahandler(testdatadir, datahandler)
|
||||
with pytest.raises(NotImplementedError):
|
||||
@ -496,6 +496,58 @@ def test_hdf5datahandler_ohlcv_purge(mocker, testdatadir):
|
||||
assert unlinkmock.call_count == 2
|
||||
|
||||
|
||||
def test_featherdatahandler_trades_load(testdatadir):
|
||||
dh = get_datahandler(testdatadir, 'feather')
|
||||
trades = dh.trades_load('XRP/ETH')
|
||||
assert isinstance(trades, list)
|
||||
assert trades[0][0] == 1570752011620
|
||||
assert trades[-1][-1] == 0.1986231
|
||||
|
||||
trades1 = dh.trades_load('UNITTEST/NONEXIST')
|
||||
assert trades1 == []
|
||||
|
||||
|
||||
def test_featherdatahandler_trades_store(testdatadir, tmpdir):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
dh = get_datahandler(testdatadir, 'feather')
|
||||
trades = dh.trades_load('XRP/ETH')
|
||||
|
||||
dh1 = get_datahandler(tmpdir1, 'feather')
|
||||
dh1.trades_store('XRP/NEW', trades)
|
||||
file = tmpdir1 / 'XRP_NEW-trades.feather'
|
||||
assert file.is_file()
|
||||
# Load trades back
|
||||
trades_new = dh1.trades_load('XRP/NEW')
|
||||
|
||||
assert len(trades_new) == len(trades)
|
||||
assert trades[0][0] == trades_new[0][0]
|
||||
assert trades[0][1] == trades_new[0][1]
|
||||
# assert trades[0][2] == trades_new[0][2] # This is nan - so comparison does not make sense
|
||||
assert trades[0][3] == trades_new[0][3]
|
||||
assert trades[0][4] == trades_new[0][4]
|
||||
assert trades[0][5] == trades_new[0][5]
|
||||
assert trades[0][6] == trades_new[0][6]
|
||||
assert trades[-1][0] == trades_new[-1][0]
|
||||
assert trades[-1][1] == trades_new[-1][1]
|
||||
# assert trades[-1][2] == trades_new[-1][2] # This is nan - so comparison does not make sense
|
||||
assert trades[-1][3] == trades_new[-1][3]
|
||||
assert trades[-1][4] == trades_new[-1][4]
|
||||
assert trades[-1][5] == trades_new[-1][5]
|
||||
assert trades[-1][6] == trades_new[-1][6]
|
||||
|
||||
|
||||
def test_featherdatahandler_trades_purge(mocker, testdatadir):
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
|
||||
dh = get_datahandler(testdatadir, 'feather')
|
||||
assert not dh.trades_purge('UNITTEST/NONEXIST')
|
||||
assert unlinkmock.call_count == 0
|
||||
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||
assert dh.trades_purge('UNITTEST/NONEXIST')
|
||||
assert unlinkmock.call_count == 1
|
||||
|
||||
|
||||
def test_gethandlerclass():
|
||||
cl = get_datahandlerclass('json')
|
||||
assert cl == JsonDataHandler
|
||||
|
@ -15,8 +15,8 @@ from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
||||
('buy', 'limit', 'gtc', {'timeInForce': 'GTC'}),
|
||||
('buy', 'limit', 'IOC', {'timeInForce': 'IOC'}),
|
||||
('buy', 'market', 'IOC', {}),
|
||||
('buy', 'limit', 'PO', {'postOnly': True}),
|
||||
('sell', 'limit', 'PO', {'postOnly': True}),
|
||||
('buy', 'limit', 'PO', {'timeInForce': 'PO'}),
|
||||
('sell', 'limit', 'PO', {'timeInForce': 'PO'}),
|
||||
('sell', 'market', 'PO', {}),
|
||||
])
|
||||
def test__get_params_binance(default_conf, mocker, side, type, time_in_force, expected):
|
||||
@ -48,7 +48,7 @@ def test_create_stoploss_order_binance(default_conf, mocker, limitratio, expecte
|
||||
default_conf['margin_mode'] = MarginMode.ISOLATED
|
||||
default_conf['trading_mode'] = trademode
|
||||
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
|
||||
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
|
||||
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
|
||||
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance')
|
||||
|
||||
@ -127,7 +127,7 @@ def test_create_stoploss_order_dry_run_binance(default_conf, mocker):
|
||||
order_type = 'stop_loss_limit'
|
||||
default_conf['dry_run'] = True
|
||||
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
|
||||
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
|
||||
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
|
||||
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance')
|
||||
|
||||
|
@ -8,6 +8,7 @@ from unittest.mock import MagicMock, Mock, PropertyMock, patch
|
||||
import arrow
|
||||
import ccxt
|
||||
import pytest
|
||||
from ccxt import DECIMAL_PLACES, ROUND, ROUND_UP, TICK_SIZE, TRUNCATE
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.enums import CandleType, MarginMode, TradingMode
|
||||
@ -113,18 +114,21 @@ async def async_ccxt_exception(mocker, default_conf, api_mock, fun, mock_ccxt_fu
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
||||
await getattr(exchange, fun)(**kwargs)
|
||||
assert api_mock.__dict__[mock_ccxt_fun].call_count == retries
|
||||
exchange.close()
|
||||
|
||||
with pytest.raises(TemporaryError):
|
||||
api_mock.__dict__[mock_ccxt_fun] = MagicMock(side_effect=ccxt.NetworkError("DeadBeef"))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
||||
await getattr(exchange, fun)(**kwargs)
|
||||
assert api_mock.__dict__[mock_ccxt_fun].call_count == retries
|
||||
exchange.close()
|
||||
|
||||
with pytest.raises(OperationalException):
|
||||
api_mock.__dict__[mock_ccxt_fun] = MagicMock(side_effect=ccxt.BaseError("DeadBeef"))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
||||
await getattr(exchange, fun)(**kwargs)
|
||||
assert api_mock.__dict__[mock_ccxt_fun].call_count == 1
|
||||
exchange.close()
|
||||
|
||||
|
||||
def test_init(default_conf, mocker, caplog):
|
||||
@ -312,35 +316,54 @@ def test_amount_to_precision(amount, precision_mode, precision, expected,):
|
||||
assert amount_to_precision(amount, precision, precision_mode) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("price,precision_mode,precision,expected", [
|
||||
(2.34559, 2, 4, 2.3456),
|
||||
(2.34559, 2, 5, 2.34559),
|
||||
(2.34559, 2, 3, 2.346),
|
||||
(2.9999, 2, 3, 3.000),
|
||||
(2.9909, 2, 3, 2.991),
|
||||
# Tests for Tick_size
|
||||
(2.34559, 4, 0.0001, 2.3456),
|
||||
(2.34559, 4, 0.00001, 2.34559),
|
||||
(2.34559, 4, 0.001, 2.346),
|
||||
(2.9999, 4, 0.001, 3.000),
|
||||
(2.9909, 4, 0.001, 2.991),
|
||||
(2.9909, 4, 0.005, 2.995),
|
||||
(2.9973, 4, 0.005, 3.0),
|
||||
(2.9977, 4, 0.005, 3.0),
|
||||
(234.43, 4, 0.5, 234.5),
|
||||
(234.53, 4, 0.5, 235.0),
|
||||
(0.891534, 4, 0.0001, 0.8916),
|
||||
(64968.89, 4, 0.01, 64968.89),
|
||||
(0.000000003483, 4, 1e-12, 0.000000003483),
|
||||
|
||||
@pytest.mark.parametrize("price,precision_mode,precision,expected,rounding_mode", [
|
||||
# Tests for DECIMAL_PLACES, ROUND_UP
|
||||
(2.34559, 2, 4, 2.3456, ROUND_UP),
|
||||
(2.34559, 2, 5, 2.34559, ROUND_UP),
|
||||
(2.34559, 2, 3, 2.346, ROUND_UP),
|
||||
(2.9999, 2, 3, 3.000, ROUND_UP),
|
||||
(2.9909, 2, 3, 2.991, ROUND_UP),
|
||||
# Tests for DECIMAL_PLACES, ROUND
|
||||
(2.345600000000001, DECIMAL_PLACES, 4, 2.3456, ROUND),
|
||||
(2.345551, DECIMAL_PLACES, 4, 2.3456, ROUND),
|
||||
(2.49, DECIMAL_PLACES, 0, 2., ROUND),
|
||||
(2.51, DECIMAL_PLACES, 0, 3., ROUND),
|
||||
(5.1, DECIMAL_PLACES, -1, 10., ROUND),
|
||||
(4.9, DECIMAL_PLACES, -1, 0., ROUND),
|
||||
# Tests for TICK_SIZE, ROUND_UP
|
||||
(2.34559, TICK_SIZE, 0.0001, 2.3456, ROUND_UP),
|
||||
(2.34559, TICK_SIZE, 0.00001, 2.34559, ROUND_UP),
|
||||
(2.34559, TICK_SIZE, 0.001, 2.346, ROUND_UP),
|
||||
(2.9999, TICK_SIZE, 0.001, 3.000, ROUND_UP),
|
||||
(2.9909, TICK_SIZE, 0.001, 2.991, ROUND_UP),
|
||||
(2.9909, TICK_SIZE, 0.005, 2.995, ROUND_UP),
|
||||
(2.9973, TICK_SIZE, 0.005, 3.0, ROUND_UP),
|
||||
(2.9977, TICK_SIZE, 0.005, 3.0, ROUND_UP),
|
||||
(234.43, TICK_SIZE, 0.5, 234.5, ROUND_UP),
|
||||
(234.53, TICK_SIZE, 0.5, 235.0, ROUND_UP),
|
||||
(0.891534, TICK_SIZE, 0.0001, 0.8916, ROUND_UP),
|
||||
(64968.89, TICK_SIZE, 0.01, 64968.89, ROUND_UP),
|
||||
(0.000000003483, TICK_SIZE, 1e-12, 0.000000003483, ROUND_UP),
|
||||
# Tests for TICK_SIZE, ROUND
|
||||
(2.49, TICK_SIZE, 1., 2., ROUND),
|
||||
(2.51, TICK_SIZE, 1., 3., ROUND),
|
||||
(2.000000051, TICK_SIZE, 0.0000001, 2.0000001, ROUND),
|
||||
(2.000000049, TICK_SIZE, 0.0000001, 2., ROUND),
|
||||
(2.9909, TICK_SIZE, 0.005, 2.990, ROUND),
|
||||
(2.9973, TICK_SIZE, 0.005, 2.995, ROUND),
|
||||
(2.9977, TICK_SIZE, 0.005, 3.0, ROUND),
|
||||
(234.24, TICK_SIZE, 0.5, 234., ROUND),
|
||||
(234.26, TICK_SIZE, 0.5, 234.5, ROUND),
|
||||
# Tests for TRUNCATTE
|
||||
(2.34559, 2, 4, 2.3455, TRUNCATE),
|
||||
(2.34559, 2, 5, 2.34559, TRUNCATE),
|
||||
(2.34559, 2, 3, 2.345, TRUNCATE),
|
||||
(2.9999, 2, 3, 2.999, TRUNCATE),
|
||||
(2.9909, 2, 3, 2.990, TRUNCATE),
|
||||
])
|
||||
def test_price_to_precision(price, precision_mode, precision, expected):
|
||||
# digits counting mode
|
||||
# DECIMAL_PLACES = 2
|
||||
# SIGNIFICANT_DIGITS = 3
|
||||
# TICK_SIZE = 4
|
||||
|
||||
assert price_to_precision(price, precision, precision_mode) == expected
|
||||
def test_price_to_precision(price, precision_mode, precision, expected, rounding_mode):
|
||||
assert price_to_precision(
|
||||
price, precision, precision_mode, rounding_mode=rounding_mode) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("price,precision_mode,precision,expected", [
|
||||
@ -414,7 +437,7 @@ def test__get_stake_amount_limit(mocker, default_conf) -> None:
|
||||
}
|
||||
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value=markets))
|
||||
result = exchange.get_min_pair_stake_amount('ETH/BTC', 2, stoploss)
|
||||
expected_result = 2 * 2 * (1 + 0.05) / (1 - abs(stoploss))
|
||||
expected_result = 2 * 2 * (1 + 0.05)
|
||||
assert pytest.approx(result) == expected_result
|
||||
# With Leverage
|
||||
result = exchange.get_min_pair_stake_amount('ETH/BTC', 2, stoploss, 5.0)
|
||||
@ -423,14 +446,14 @@ def test__get_stake_amount_limit(mocker, default_conf) -> None:
|
||||
result = exchange.get_max_pair_stake_amount('ETH/BTC', 2)
|
||||
assert result == 20000
|
||||
|
||||
# min amount and cost are set (cost is minimal)
|
||||
# min amount and cost are set (cost is minimal and therefore ignored)
|
||||
markets["ETH/BTC"]["limits"] = {
|
||||
'cost': {'min': 2, 'max': None},
|
||||
'amount': {'min': 2, 'max': None},
|
||||
}
|
||||
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value=markets))
|
||||
result = exchange.get_min_pair_stake_amount('ETH/BTC', 2, stoploss)
|
||||
expected_result = max(2, 2 * 2) * (1 + 0.05) / (1 - abs(stoploss))
|
||||
expected_result = max(2, 2 * 2) * (1 + 0.05)
|
||||
assert pytest.approx(result) == expected_result
|
||||
# With Leverage
|
||||
result = exchange.get_min_pair_stake_amount('ETH/BTC', 2, stoploss, 10)
|
||||
@ -473,6 +496,9 @@ def test__get_stake_amount_limit(mocker, default_conf) -> None:
|
||||
result = exchange.get_max_pair_stake_amount('ETH/BTC', 2)
|
||||
assert result == 1000
|
||||
|
||||
result = exchange.get_max_pair_stake_amount('ETH/BTC', 2, 12.0)
|
||||
assert result == 1000 / 12
|
||||
|
||||
markets["ETH/BTC"]["contractSize"] = '0.01'
|
||||
default_conf['trading_mode'] = 'futures'
|
||||
default_conf['margin_mode'] = 'isolated'
|
||||
@ -1436,7 +1462,10 @@ def test_buy_prod(default_conf, mocker, exchange_name):
|
||||
assert api_mock.create_order.call_args[0][1] == order_type
|
||||
assert api_mock.create_order.call_args[0][2] == 'buy'
|
||||
assert api_mock.create_order.call_args[0][3] == 1
|
||||
assert api_mock.create_order.call_args[0][4] is None
|
||||
if exchange._order_needs_price(order_type):
|
||||
assert api_mock.create_order.call_args[0][4] == 200
|
||||
else:
|
||||
assert api_mock.create_order.call_args[0][4] is None
|
||||
|
||||
api_mock.create_order.reset_mock()
|
||||
order_type = 'limit'
|
||||
@ -1541,7 +1570,10 @@ def test_buy_considers_time_in_force(default_conf, mocker, exchange_name):
|
||||
assert api_mock.create_order.call_args[0][1] == order_type
|
||||
assert api_mock.create_order.call_args[0][2] == 'buy'
|
||||
assert api_mock.create_order.call_args[0][3] == 1
|
||||
assert api_mock.create_order.call_args[0][4] is None
|
||||
if exchange._order_needs_price(order_type):
|
||||
assert api_mock.create_order.call_args[0][4] == 200
|
||||
else:
|
||||
assert api_mock.create_order.call_args[0][4] is None
|
||||
# Market orders should not send timeInForce!!
|
||||
assert "timeInForce" not in api_mock.create_order.call_args[0][5]
|
||||
|
||||
@ -1585,7 +1617,10 @@ def test_sell_prod(default_conf, mocker, exchange_name):
|
||||
assert api_mock.create_order.call_args[0][1] == order_type
|
||||
assert api_mock.create_order.call_args[0][2] == 'sell'
|
||||
assert api_mock.create_order.call_args[0][3] == 1
|
||||
assert api_mock.create_order.call_args[0][4] is None
|
||||
if exchange._order_needs_price(order_type):
|
||||
assert api_mock.create_order.call_args[0][4] == 200
|
||||
else:
|
||||
assert api_mock.create_order.call_args[0][4] is None
|
||||
|
||||
api_mock.create_order.reset_mock()
|
||||
order_type = 'limit'
|
||||
@ -1679,7 +1714,10 @@ def test_sell_considers_time_in_force(default_conf, mocker, exchange_name):
|
||||
assert api_mock.create_order.call_args[0][1] == order_type
|
||||
assert api_mock.create_order.call_args[0][2] == 'sell'
|
||||
assert api_mock.create_order.call_args[0][3] == 1
|
||||
assert api_mock.create_order.call_args[0][4] is None
|
||||
if exchange._order_needs_price(order_type):
|
||||
assert api_mock.create_order.call_args[0][4] == 200
|
||||
else:
|
||||
assert api_mock.create_order.call_args[0][4] is None
|
||||
# Market orders should not send timeInForce!!
|
||||
assert "timeInForce" not in api_mock.create_order.call_args[0][5]
|
||||
|
||||
@ -2248,7 +2286,6 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
|
||||
assert res[pair2].at[0, 'open']
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_name):
|
||||
ohlcv = [
|
||||
@ -2277,7 +2314,7 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
|
||||
assert res[3] == ohlcv
|
||||
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
||||
assert not log_has(f"Using cached candle (OHLCV) data for {pair} ...", caplog)
|
||||
|
||||
exchange.close()
|
||||
# exchange = Exchange(default_conf)
|
||||
await async_ccxt_exception(mocker, default_conf, MagicMock(),
|
||||
"_async_get_candle_history", "fetch_ohlcv",
|
||||
@ -2292,15 +2329,17 @@ async def test__async_get_candle_history(default_conf, mocker, caplog, exchange_
|
||||
await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT,
|
||||
(arrow.utcnow().int_timestamp - 2000) * 1000)
|
||||
|
||||
exchange.close()
|
||||
|
||||
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
|
||||
r'historical candle \(OHLCV\) data\..*'):
|
||||
api_mock.fetch_ohlcv = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
await exchange._async_get_candle_history(pair, "5m", CandleType.SPOT,
|
||||
(arrow.utcnow().int_timestamp - 2000) * 1000)
|
||||
exchange.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test__async_kucoin_get_candle_history(default_conf, mocker, caplog):
|
||||
from freqtrade.exchange.common import _reset_logging_mixin
|
||||
_reset_logging_mixin()
|
||||
@ -2341,9 +2380,9 @@ async def test__async_kucoin_get_candle_history(default_conf, mocker, caplog):
|
||||
# Expect the "returned exception" message 12 times (4 retries * 3 (loop))
|
||||
assert num_log_has_re(msg, caplog) == 12
|
||||
assert num_log_has_re(msg2, caplog) == 9
|
||||
exchange.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
|
||||
""" Test empty exchange result """
|
||||
ohlcv = []
|
||||
@ -2363,6 +2402,7 @@ async def test__async_get_candle_history_empty(default_conf, mocker, caplog):
|
||||
assert res[2] == CandleType.SPOT
|
||||
assert res[3] == ohlcv
|
||||
assert exchange._api_async.fetch_ohlcv.call_count == 1
|
||||
exchange.close()
|
||||
|
||||
|
||||
def test_refresh_latest_ohlcv_inv_result(default_conf, mocker, caplog):
|
||||
@ -2757,7 +2797,6 @@ async def test___async_get_candle_history_sort(default_conf, mocker, exchange_na
|
||||
assert res_ohlcv[9][5] == 2.31452783
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
|
||||
fetch_trades_result):
|
||||
@ -2785,8 +2824,8 @@ async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
|
||||
assert exchange._api_async.fetch_trades.call_args[1]['limit'] == 1000
|
||||
assert exchange._api_async.fetch_trades.call_args[1]['params'] == {'from': '123'}
|
||||
assert log_has_re(f"Fetching trades for pair {pair}, params: .*", caplog)
|
||||
exchange.close()
|
||||
|
||||
exchange = Exchange(default_conf)
|
||||
await async_ccxt_exception(mocker, default_conf, MagicMock(),
|
||||
"_async_fetch_trades", "fetch_trades",
|
||||
pair='ABCD/BTC', since=None)
|
||||
@ -2796,15 +2835,16 @@ async def test__async_fetch_trades(default_conf, mocker, caplog, exchange_name,
|
||||
api_mock.fetch_trades = MagicMock(side_effect=ccxt.BaseError("Unknown error"))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
|
||||
exchange.close()
|
||||
|
||||
with pytest.raises(OperationalException, match=r'Exchange.* does not support fetching '
|
||||
r'historical trade data\..*'):
|
||||
api_mock.fetch_trades = MagicMock(side_effect=ccxt.NotSupported("Not supported"))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
await exchange._async_fetch_trades(pair, since=(arrow.utcnow().int_timestamp - 2000) * 1000)
|
||||
exchange.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
async def test__async_fetch_trades_contract_size(default_conf, mocker, caplog, exchange_name,
|
||||
fetch_trades_result):
|
||||
@ -2839,6 +2879,7 @@ async def test__async_fetch_trades_contract_size(default_conf, mocker, caplog, e
|
||||
pair = 'ETH/USDT:USDT'
|
||||
res = await exchange._async_fetch_trades(pair, since=None, params=None)
|
||||
assert res[0][5] == 300
|
||||
exchange.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -4807,7 +4848,6 @@ def test_load_leverage_tiers(mocker, default_conf, leverage_tiers, exchange_name
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize('exchange_name', EXCHANGES)
|
||||
async def test_get_market_leverage_tiers(mocker, default_conf, exchange_name):
|
||||
default_conf['exchange']['name'] = exchange_name
|
||||
@ -5264,7 +5304,7 @@ def test_stoploss_contract_size(mocker, default_conf, contract_size, order_amoun
|
||||
})
|
||||
default_conf['dry_run'] = False
|
||||
mocker.patch(f'{EXMS}.amount_to_precision', lambda s, x, y: y)
|
||||
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y: y)
|
||||
mocker.patch(f'{EXMS}.price_to_precision', lambda s, x, y, **kwargs: y)
|
||||
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock)
|
||||
exchange.get_contract_size = MagicMock(return_value=contract_size)
|
||||
@ -5284,3 +5324,10 @@ def test_stoploss_contract_size(mocker, default_conf, contract_size, order_amoun
|
||||
assert order['cost'] == 100
|
||||
assert order['filled'] == 100
|
||||
assert order['remaining'] == 100
|
||||
|
||||
|
||||
def test_price_to_precision_with_default_conf(default_conf, mocker):
|
||||
conf = copy.deepcopy(default_conf)
|
||||
patched_ex = get_patched_exchange(mocker, conf)
|
||||
prec_price = patched_ex.price_to_precision("XRP/USDT", 1.0000000101)
|
||||
assert prec_price == 1.00000001
|
||||
|
@ -4,42 +4,9 @@ from unittest.mock import MagicMock
|
||||
import pytest
|
||||
|
||||
from freqtrade.enums import MarginMode, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import Gate
|
||||
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
|
||||
from tests.conftest import EXMS, get_patched_exchange
|
||||
|
||||
|
||||
def test_validate_order_types_gate(default_conf, mocker):
|
||||
default_conf['exchange']['name'] = 'gate'
|
||||
mocker.patch(f'{EXMS}._init_ccxt')
|
||||
mocker.patch(f'{EXMS}._load_markets', return_value={})
|
||||
mocker.patch(f'{EXMS}.validate_pairs')
|
||||
mocker.patch(f'{EXMS}.validate_timeframes')
|
||||
mocker.patch(f'{EXMS}.validate_stakecurrency')
|
||||
mocker.patch(f'{EXMS}.validate_pricing')
|
||||
mocker.patch(f'{EXMS}.name', 'Gate')
|
||||
exch = ExchangeResolver.load_exchange('gate', default_conf, True)
|
||||
assert isinstance(exch, Gate)
|
||||
|
||||
default_conf['order_types'] = {
|
||||
'entry': 'market',
|
||||
'exit': 'limit',
|
||||
'stoploss': 'market',
|
||||
'stoploss_on_exchange': False
|
||||
}
|
||||
|
||||
with pytest.raises(OperationalException,
|
||||
match=r'Exchange .* does not support market orders.'):
|
||||
ExchangeResolver.load_exchange('gate', default_conf, True)
|
||||
|
||||
# market-orders supported on futures markets.
|
||||
default_conf['trading_mode'] = 'futures'
|
||||
default_conf['margin_mode'] = 'isolated'
|
||||
ex = ExchangeResolver.load_exchange('gate', default_conf, True)
|
||||
assert ex
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
def test_fetch_stoploss_order_gate(default_conf, mocker):
|
||||
exchange = get_patched_exchange(mocker, default_conf, id='gate')
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user