Merge branch 'develop' into remove-redundant-dependencies

This commit is contained in:
ASU 2023-02-26 04:29:30 +02:00
commit 7e7ae144a9
17 changed files with 92 additions and 38 deletions

View File

@ -90,14 +90,14 @@ jobs:
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 6 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
- name: Flake8
run: |
flake8
- name: Sort imports (isort)
run: |
isort --check .
- name: Run Ruff
run: |
ruff check --format=github .
- name: Mypy
run: |
mypy freqtrade scripts tests
@ -186,14 +186,14 @@ jobs:
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
- name: Flake8
run: |
flake8
- name: Sort imports (isort)
run: |
isort --check .
- name: Run Ruff
run: |
ruff check --format=github .
- name: Mypy
run: |
mypy freqtrade scripts
@ -248,9 +248,9 @@ jobs:
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
- name: Flake8
- name: Run Ruff
run: |
flake8
ruff check --format=github .
- name: Mypy
run: |

View File

@ -27,6 +27,12 @@ repos:
name: isort (python)
# stages: [push]
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.251'
hooks:
- id: ruff
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:

View File

@ -45,16 +45,17 @@ pytest tests/test_<file_name>.py::test_<method_name>
### 2. Test if your code is PEP8 compliant
#### Run Flake8
#### Run Ruff
```bash
flake8 freqtrade tests scripts
ruff .
```
We receive a lot of code that fails the `flake8` checks.
We receive a lot of code that fails the `ruff` checks.
To help with that, we encourage you to install the git pre-commit
hook that will warn you when you try to commit code that fails these checks.
Guide for installing them is [here](http://flake8.pycqa.org/en/latest/user/using-hooks.html).
you can manually run pre-commit with `pre-commit run -a`.
##### Additional styles applied

View File

@ -24,7 +24,7 @@ This will spin up a local server (usually on port 8000) so you can see if everyt
To configure a development environment, you can either use the provided [DevContainer](#devcontainer-setup), or use the `setup.sh` script and answer "y" when asked "Do you want to install dependencies for dev [y/N]? ".
Alternatively (e.g. if your system is not supported by the setup.sh script), follow the manual installation process and run `pip3 install -e .[all]`.
This will install all required tools for development, including `pytest`, `flake8`, `mypy`, and `coveralls`.
This will install all required tools for development, including `pytest`, `ruff`, `mypy`, and `coveralls`.
Then install the git hook scripts by running `pre-commit install`, so your changes will be verified locally before committing.
This avoids a lot of waiting for CI already, as some basic formatting checks are done locally on your machine.

0
environment.yml Normal file
View File

View File

@ -1,5 +1,5 @@
""" Freqtrade bot """
__version__ = '2023.2.dev'
__version__ = '2023.3.dev'
if 'dev' in __version__:
from pathlib import Path

View File

@ -5,7 +5,7 @@ from datetime import datetime, timedelta
from typing import Any, Dict, List
from freqtrade.configuration import TimeRange, setup_utils_configuration
from freqtrade.constants import DATETIME_PRINT_FORMAT
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
from freqtrade.data.converter import convert_ohlcv_format, convert_trades_format
from freqtrade.data.history import (convert_trades_to_ohlcv, refresh_backtest_ohlcv_data,
refresh_backtest_trades_data)
@ -20,15 +20,24 @@ from freqtrade.util.binance_mig import migrate_binance_futures_data
logger = logging.getLogger(__name__)
def _data_download_sanity(config: Config) -> None:
if 'days' in config and 'timerange' in config:
raise OperationalException("--days and --timerange are mutually exclusive. "
"You can only specify one or the other.")
if 'pairs' not in config:
raise OperationalException(
"Downloading data requires a list of pairs. "
"Please check the documentation on how to configure this.")
def start_download_data(args: Dict[str, Any]) -> None:
"""
Download data (former download_backtest_data.py script)
"""
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
if 'days' in config and 'timerange' in config:
raise OperationalException("--days and --timerange are mutually exclusive. "
"You can only specify one or the other.")
_data_download_sanity(config)
timerange = TimeRange()
if 'days' in config:
time_since = (datetime.now() - timedelta(days=config['days'])).strftime("%Y%m%d")
@ -40,11 +49,6 @@ def start_download_data(args: Dict[str, Any]) -> None:
# Remove stake-currency to skip checks which are not relevant for datadownload
config['stake_currency'] = ''
if 'pairs' not in config:
raise OperationalException(
"Downloading data requires a list of pairs. "
"Please check the documentation on how to configure this.")
pairs_not_available: List[str] = []
# Init exchange

View File

@ -13,6 +13,9 @@ class CandleType(str, Enum):
FUNDING_RATE = "funding_rate"
# BORROW_RATE = "borrow_rate" # * unimplemented
def __str__(self):
return f"{self.name.lower()}"
@staticmethod
def from_string(value: str) -> 'CandleType':
if not value:

View File

@ -37,5 +37,8 @@ class RPCRequestType(str, Enum):
WHITELIST = 'whitelist'
ANALYZED_DF = 'analyzed_df'
def __str__(self):
return self.value
NO_ECHO_MESSAGES = (RPCMessageType.ANALYZED_DF, RPCMessageType.WHITELIST, RPCMessageType.NEW_CANDLE)

View File

@ -10,6 +10,9 @@ class SignalType(Enum):
ENTER_SHORT = "enter_short"
EXIT_SHORT = "exit_short"
def __str__(self):
return f"{self.name.lower()}"
class SignalTagType(Enum):
"""
@ -18,7 +21,13 @@ class SignalTagType(Enum):
ENTER_TAG = "enter_tag"
EXIT_TAG = "exit_tag"
def __str__(self):
return f"{self.name.lower()}"
class SignalDirection(str, Enum):
LONG = 'long'
SHORT = 'short'
def __str__(self):
return f"{self.name.lower()}"

View File

@ -1961,7 +1961,8 @@ class Exchange:
cache: bool, drop_incomplete: bool) -> DataFrame:
# keeping last candle time as last refreshed time of the pair
if ticks and cache:
self._pairs_last_refresh_time[(pair, timeframe, c_type)] = ticks[-1][0] // 1000
idx = -2 if drop_incomplete and len(ticks) > 1 else -1
self._pairs_last_refresh_time[(pair, timeframe, c_type)] = ticks[idx][0] // 1000
# keeping parsed dataframe in cache
ohlcv_df = ohlcv_to_dataframe(ticks, timeframe, pair=pair, fill_missing=True,
drop_incomplete=drop_incomplete)
@ -2034,7 +2035,9 @@ class Exchange:
# Timeframe in seconds
interval_in_sec = timeframe_to_seconds(timeframe)
plr = self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0) + interval_in_sec
return plr < arrow.utcnow().int_timestamp
# current,active candle open date
now = int(timeframe_to_prev_date(timeframe).timestamp())
return plr < now
@retrier_async
async def _async_get_candle_history(

View File

@ -19,5 +19,4 @@ class Hitbtc(Exchange):
_ft_has: Dict = {
"ohlcv_candle_limit": 1000,
"ohlcv_params": {"sort": "DESC"}
}

View File

@ -56,3 +56,24 @@ exclude = [
"build_helpers/*.py",
]
ignore = ["freqtrade/vendor/**"]
[tool.ruff]
line-length = 100
extend-exclude = [".env"]
target-version = "py38"
extend-select = [
"C90", # mccabe
# "N", # pep8-naming
# "UP", # pyupgrade
"TID", # flake8-tidy-imports
# "EXE", # flake8-executable
"YTT", # flake8-2020
# "DTZ", # flake8-datetimez
# "RSE", # flake8-raise
# "TCH", # flake8-type-checking
# "PTH", # flake8-use-pathlib
]
[tool.ruff.mccabe]
max-complexity = 12

View File

@ -7,8 +7,7 @@
-r docs/requirements-docs.txt
coveralls==3.3.1
flake8==6.0.0
flake8-tidy-imports==4.8.0
ruff==0.0.251
mypy==1.0.1
pre-commit==3.0.4
pytest==7.2.1

View File

@ -32,8 +32,6 @@ hdf5 = [
develop = [
'coveralls',
'flake8',
'flake8-tidy-imports',
'mypy',
'pytest',
'pytest-asyncio',

View File

@ -2215,7 +2215,7 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
assert len(res[pair1]) == 99
assert len(res[pair2]) == 99
assert exchange._klines
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-1][0] // 1000
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-2][0] // 1000
exchange._api_async.fetch_ohlcv.reset_mock()
# Returned from cache
@ -2224,7 +2224,7 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
assert len(res) == 2
assert len(res[pair1]) == 99
assert len(res[pair2]) == 99
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-1][0] // 1000
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-2][0] // 1000
# Move time 1 candle further but result didn't change yet
time_machine.move_to(start + timedelta(hours=101))
@ -2234,7 +2234,7 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
assert len(res[pair1]) == 99
assert len(res[pair2]) == 99
assert res[pair2].at[0, 'open']
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-1][0] // 1000
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-2][0] // 1000
refresh_pior = exchange._pairs_last_refresh_time[pair1]
# New candle on exchange - return 100 candles - but skip one candle so we actually get 2 candles
@ -2252,8 +2252,8 @@ def test_refresh_latest_ohlcv_cache(mocker, default_conf, candle_type, time_mach
assert res[pair2].at[0, 'open']
assert refresh_pior != exchange._pairs_last_refresh_time[pair1]
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-1][0] // 1000
assert exchange._pairs_last_refresh_time[pair2] == ohlcv[-1][0] // 1000
assert exchange._pairs_last_refresh_time[pair1] == ohlcv[-2][0] // 1000
assert exchange._pairs_last_refresh_time[pair2] == ohlcv[-2][0] // 1000
exchange._api_async.fetch_ohlcv.reset_mock()
# Retry same call - from cache

View File

@ -356,6 +356,14 @@ def test_exception_send_msg(default_conf, mocker, caplog):
}
webhook.send_msg(msg)
# Test no failure for not implemented but known messagetypes
for e in RPCMessageType:
msg = {
'type': e,
'status': 'whatever'
}
webhook.send_msg(msg)
def test__send_msg(default_conf, mocker, caplog):
default_conf["webhook"] = get_webhook_dict()