Merge branch 'develop' of github.com:lolongcovas/freqtrade into strategies
This commit is contained in:
commit
145d9fd633
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@ -410,7 +410,7 @@ jobs:
|
||||
python setup.py sdist bdist_wheel
|
||||
|
||||
- name: Publish to PyPI (Test)
|
||||
uses: pypa/gh-action-pypi-publish@v1.5.1
|
||||
uses: pypa/gh-action-pypi-publish@v1.6.1
|
||||
if: (github.event_name == 'release')
|
||||
with:
|
||||
user: __token__
|
||||
@ -418,7 +418,7 @@ jobs:
|
||||
repository_url: https://test.pypi.org/legacy/
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.5.1
|
||||
uses: pypa/gh-action-pypi-publish@v1.6.1
|
||||
if: (github.event_name == 'release')
|
||||
with:
|
||||
user: __token__
|
||||
|
@ -37,7 +37,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
||||
| `indicator_max_period_candles` | **No longer used (#7325)**. Replaced by `startup_candle_count` which is set in the [strategy](freqai-configuration.md#building-a-freqai-strategy). `startup_candle_count` is timeframe independent and defines the maximum *period* used in `populate_any_indicators()` for indicator creation. FreqAI uses this parameter together with the maximum timeframe in `include_time_frames` to calculate how many data points to download such that the first data point does not include a NaN. <br> **Datatype:** Positive integer.
|
||||
| `indicator_periods_candles` | Time periods to calculate indicators for. The indicators are added to the base indicator dataset. <br> **Datatype:** List of positive integers.
|
||||
| `principal_component_analysis` | Automatically reduce the dimensionality of the data set using Principal Component Analysis. See details about how it works [here](#reducing-data-dimensionality-with-principal-component-analysis) <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||
| `plot_feature_importances` | Create a feature importance plot for each model for the top/bottom `plot_feature_importances` number of features. <br> **Datatype:** Integer. <br> Default: `0`.
|
||||
| `plot_feature_importances` | Create a feature importance plot for each model for the top/bottom `plot_feature_importances` number of features. Plot is stored in `user_data/models/<identifier>/sub-train-<COIN>_<timestamp>.html`. <br> **Datatype:** Integer. <br> Default: `0`.
|
||||
| `DI_threshold` | Activates the use of the Dissimilarity Index for outlier detection when set to > 0. See details about how it works [here](freqai-feature-engineering.md#identifying-outliers-with-the-dissimilarity-index-di). <br> **Datatype:** Positive float (typically < 1).
|
||||
| `use_SVM_to_remove_outliers` | Train a support vector machine to detect and remove outliers from the training dataset, as well as from incoming data points. See details about how it works [here](freqai-feature-engineering.md#identifying-outliers-using-a-support-vector-machine-svm). <br> **Datatype:** Boolean.
|
||||
| `svm_params` | All parameters available in Sklearn's `SGDOneClassSVM()`. See details about some select parameters [here](freqai-feature-engineering.md#identifying-outliers-using-a-support-vector-machine-svm). <br> **Datatype:** Dictionary.
|
||||
|
@ -243,7 +243,7 @@ cd freqtrade
|
||||
tensorboard --logdir user_data/models/unique-id
|
||||
```
|
||||
|
||||
where `unique-id` is the `identifier` set in the `freqai` configuration file. This command must be run in a separate shell to view the output in their browser at 127.0.0.1:6060 (6060 is the default port used by Tensorboard).
|
||||
where `unique-id` is the `identifier` set in the `freqai` configuration file. This command must be run in a separate shell to view the output in their browser at 127.0.0.1:6006 (6006 is the default port used by Tensorboard).
|
||||
|
||||

|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
markdown==3.3.7
|
||||
mkdocs==1.4.2
|
||||
mkdocs-material==8.5.10
|
||||
mkdocs-material==8.5.11
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==9.8
|
||||
pymdown-extensions==9.9
|
||||
jinja2==3.1.2
|
||||
|
@ -104,13 +104,15 @@ class DataProvider:
|
||||
def _emit_df(
|
||||
self,
|
||||
pair_key: PairWithTimeframe,
|
||||
dataframe: DataFrame
|
||||
dataframe: DataFrame,
|
||||
new_candle: bool
|
||||
) -> None:
|
||||
"""
|
||||
Send this dataframe as an ANALYZED_DF message to RPC
|
||||
|
||||
:param pair_key: PairWithTimeframe tuple
|
||||
:param data: Tuple containing the DataFrame and the datetime it was cached
|
||||
:param dataframe: Dataframe to emit
|
||||
:param new_candle: This is a new candle
|
||||
"""
|
||||
if self.__rpc:
|
||||
self.__rpc.send_msg(
|
||||
@ -123,6 +125,11 @@ class DataProvider:
|
||||
}
|
||||
}
|
||||
)
|
||||
if new_candle:
|
||||
self.__rpc.send_msg({
|
||||
'type': RPCMessageType.NEW_CANDLE,
|
||||
'data': pair_key,
|
||||
})
|
||||
|
||||
def _add_external_df(
|
||||
self,
|
||||
|
@ -6,7 +6,7 @@ from freqtrade.enums.exittype import ExitType
|
||||
from freqtrade.enums.hyperoptstate import HyperoptState
|
||||
from freqtrade.enums.marginmode import MarginMode
|
||||
from freqtrade.enums.ordertypevalue import OrderTypeValues
|
||||
from freqtrade.enums.rpcmessagetype import RPCMessageType, RPCRequestType
|
||||
from freqtrade.enums.rpcmessagetype import NO_ECHO_MESSAGES, RPCMessageType, RPCRequestType
|
||||
from freqtrade.enums.runmode import NON_UTIL_MODES, OPTIMIZE_MODES, TRADING_MODES, RunMode
|
||||
from freqtrade.enums.signaltype import SignalDirection, SignalTagType, SignalType
|
||||
from freqtrade.enums.state import State
|
||||
|
@ -21,6 +21,7 @@ class RPCMessageType(str, Enum):
|
||||
|
||||
WHITELIST = 'whitelist'
|
||||
ANALYZED_DF = 'analyzed_df'
|
||||
NEW_CANDLE = 'new_candle'
|
||||
|
||||
def __repr__(self):
|
||||
return self.value
|
||||
@ -35,3 +36,6 @@ class RPCRequestType(str, Enum):
|
||||
|
||||
WHITELIST = 'whitelist'
|
||||
ANALYZED_DF = 'analyzed_df'
|
||||
|
||||
|
||||
NO_ECHO_MESSAGES = (RPCMessageType.ANALYZED_DF, RPCMessageType.WHITELIST, RPCMessageType.NEW_CANDLE)
|
||||
|
@ -462,10 +462,10 @@ class FreqaiDataKitchen:
|
||||
:param df: Dataframe containing all candles to run the entire backtest. Here
|
||||
it is sliced down to just the present training period.
|
||||
"""
|
||||
|
||||
df = df.loc[df["date"] >= timerange.startdt, :]
|
||||
if not self.live:
|
||||
df = df.loc[df["date"] < timerange.stopdt, :]
|
||||
df = df.loc[(df["date"] >= timerange.startdt) & (df["date"] < timerange.stopdt), :]
|
||||
else:
|
||||
df = df.loc[df["date"] >= timerange.startdt, :]
|
||||
|
||||
return df
|
||||
|
||||
|
@ -282,10 +282,10 @@ class IFreqaiModel(ABC):
|
||||
train_it += 1
|
||||
total_trains = len(dk.backtesting_timeranges)
|
||||
self.training_timerange = tr_train
|
||||
dataframe_train = dk.slice_dataframe(tr_train, dataframe)
|
||||
dataframe_backtest = dk.slice_dataframe(tr_backtest, dataframe)
|
||||
len_backtest_df = len(dataframe.loc[(dataframe["date"] >= tr_backtest.startdt) & (
|
||||
dataframe["date"] < tr_backtest.stopdt), :])
|
||||
|
||||
if not self.ensure_data_exists(dataframe_backtest, tr_backtest, pair):
|
||||
if not self.ensure_data_exists(len_backtest_df, tr_backtest, pair):
|
||||
continue
|
||||
|
||||
self.log_backtesting_progress(tr_train, pair, train_it, total_trains)
|
||||
@ -298,13 +298,15 @@ class IFreqaiModel(ABC):
|
||||
|
||||
dk.set_new_model_names(pair, timestamp_model_id)
|
||||
|
||||
if dk.check_if_backtest_prediction_is_valid(len(dataframe_backtest)):
|
||||
if dk.check_if_backtest_prediction_is_valid(len_backtest_df):
|
||||
self.dd.load_metadata(dk)
|
||||
dk.find_features(dataframe_train)
|
||||
dk.find_features(dataframe)
|
||||
self.check_if_feature_list_matches_strategy(dk)
|
||||
append_df = dk.get_backtesting_prediction()
|
||||
dk.append_predictions(append_df)
|
||||
else:
|
||||
dataframe_train = dk.slice_dataframe(tr_train, dataframe)
|
||||
dataframe_backtest = dk.slice_dataframe(tr_backtest, dataframe)
|
||||
if not self.model_exists(dk):
|
||||
dk.find_features(dataframe_train)
|
||||
dk.find_labels(dataframe_train)
|
||||
@ -804,16 +806,16 @@ class IFreqaiModel(ABC):
|
||||
self.pair_it = 1
|
||||
self.current_candle = self.dd.current_candle
|
||||
|
||||
def ensure_data_exists(self, dataframe_backtest: DataFrame,
|
||||
def ensure_data_exists(self, len_dataframe_backtest: int,
|
||||
tr_backtest: TimeRange, pair: str) -> bool:
|
||||
"""
|
||||
Check if the dataframe is empty, if not, report useful information to user.
|
||||
:param dataframe_backtest: the backtesting dataframe, maybe empty.
|
||||
:param len_dataframe_backtest: the len of backtesting dataframe
|
||||
:param tr_backtest: current backtesting timerange.
|
||||
:param pair: current pair
|
||||
:return: if the data exists or not
|
||||
"""
|
||||
if self.config.get("freqai_backtest_live_models", False) and len(dataframe_backtest) == 0:
|
||||
if self.config.get("freqai_backtest_live_models", False) and len_dataframe_backtest == 0:
|
||||
logger.info(f"No data found for pair {pair} from "
|
||||
f"from { tr_backtest.start_fmt} to {tr_backtest.stop_fmt}. "
|
||||
"Probably more than one training within the same candle period.")
|
||||
|
@ -37,7 +37,8 @@ logger = logging.getLogger(__name__)
|
||||
# 2.16: Additional daily metrics
|
||||
# 2.17: Forceentry - leverage, partial force_exit
|
||||
# 2.20: Add websocket endpoints
|
||||
API_VERSION = 2.20
|
||||
# 2.21: Add new_candle messagetype
|
||||
API_VERSION = 2.21
|
||||
|
||||
# Public API, requires no auth.
|
||||
router_public = APIRouter()
|
||||
|
@ -6,7 +6,7 @@ from collections import deque
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.enums import RPCMessageType
|
||||
from freqtrade.enums import NO_ECHO_MESSAGES, RPCMessageType
|
||||
from freqtrade.rpc import RPC, RPCHandler
|
||||
|
||||
|
||||
@ -67,7 +67,7 @@ class RPCManager:
|
||||
'status': 'stopping bot'
|
||||
}
|
||||
"""
|
||||
if msg.get('type') not in (RPCMessageType.ANALYZED_DF, RPCMessageType.WHITELIST):
|
||||
if msg.get('type') not in NO_ECHO_MESSAGES:
|
||||
logger.info('Sending rpc message: %s', msg)
|
||||
if 'pair' in msg:
|
||||
msg.update({
|
||||
|
@ -68,6 +68,7 @@ class Webhook(RPCHandler):
|
||||
RPCMessageType.PROTECTION_TRIGGER_GLOBAL,
|
||||
RPCMessageType.WHITELIST,
|
||||
RPCMessageType.ANALYZED_DF,
|
||||
RPCMessageType.NEW_CANDLE,
|
||||
RPCMessageType.STRATEGY_MSG):
|
||||
# Don't fail for non-implemented types
|
||||
return None
|
||||
|
@ -739,10 +739,10 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
"""
|
||||
pair = str(metadata.get('pair'))
|
||||
|
||||
new_candle = self._last_candle_seen_per_pair.get(pair, None) != dataframe.iloc[-1]['date']
|
||||
# Test if seen this pair and last candle before.
|
||||
# always run if process_only_new_candles is set to false
|
||||
if (not self.process_only_new_candles or
|
||||
self._last_candle_seen_per_pair.get(pair, None) != dataframe.iloc[-1]['date']):
|
||||
if not self.process_only_new_candles or new_candle:
|
||||
|
||||
# Defs that only make change on new candle data.
|
||||
dataframe = self.analyze_ticker(dataframe, metadata)
|
||||
@ -751,7 +751,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
||||
|
||||
candle_type = self.config.get('candle_type_def', CandleType.SPOT)
|
||||
self.dp._set_cached_df(pair, self.timeframe, dataframe, candle_type=candle_type)
|
||||
self.dp._emit_df((pair, self.timeframe, candle_type), dataframe)
|
||||
self.dp._emit_df((pair, self.timeframe, candle_type), dataframe, new_candle)
|
||||
|
||||
else:
|
||||
logger.debug("Skipping TA Analysis for already analyzed candle")
|
||||
|
@ -15,7 +15,7 @@ pytest==7.2.0
|
||||
pytest-asyncio==0.20.2
|
||||
pytest-cov==4.0.0
|
||||
pytest-mock==3.10.0
|
||||
pytest-random-order==1.0.4
|
||||
pytest-random-order==1.1.0
|
||||
isort==5.10.1
|
||||
# For datetime mocking
|
||||
time-machine==2.8.2
|
||||
|
@ -1,8 +1,8 @@
|
||||
numpy==1.23.5
|
||||
pandas==1.5.1
|
||||
pandas==1.5.2
|
||||
pandas-ta==0.3.14b
|
||||
|
||||
ccxt==2.2.36
|
||||
ccxt==2.2.67
|
||||
# Pin cryptography for now due to rust build errors with piwheels
|
||||
cryptography==38.0.1; platform_machine == 'armv7l'
|
||||
cryptography==38.0.4; platform_machine != 'armv7l'
|
||||
@ -13,7 +13,7 @@ arrow==1.2.3
|
||||
cachetools==4.2.2
|
||||
requests==2.28.1
|
||||
urllib3==1.26.13
|
||||
jsonschema==4.17.1
|
||||
jsonschema==4.17.3
|
||||
TA-Lib==0.4.25
|
||||
technical==1.3.0
|
||||
tabulate==0.9.0
|
||||
@ -30,13 +30,13 @@ py_find_1st==1.1.5
|
||||
# Load ticker files 30% faster
|
||||
python-rapidjson==1.9
|
||||
# Properly format api responses
|
||||
orjson==3.8.2
|
||||
orjson==3.8.3
|
||||
|
||||
# Notify systemd
|
||||
sdnotify==0.3.2
|
||||
|
||||
# API Server
|
||||
fastapi==0.87.0
|
||||
fastapi==0.88.0
|
||||
pydantic==1.10.2
|
||||
uvicorn==0.20.0
|
||||
pyjwt==2.6.0
|
||||
|
@ -207,12 +207,18 @@ def test_emit_df(mocker, default_conf, ohlcv_history):
|
||||
assert send_mock.call_count == 0
|
||||
|
||||
# Rpc is added, we call emit, should call send_msg
|
||||
dataprovider._emit_df(pair, ohlcv_history)
|
||||
dataprovider._emit_df(pair, ohlcv_history, False)
|
||||
assert send_mock.call_count == 1
|
||||
|
||||
send_mock.reset_mock()
|
||||
dataprovider._emit_df(pair, ohlcv_history, True)
|
||||
assert send_mock.call_count == 2
|
||||
|
||||
send_mock.reset_mock()
|
||||
|
||||
# No rpc added, emit called, should not call send_msg
|
||||
dataprovider_no_rpc._emit_df(pair, ohlcv_history)
|
||||
assert send_mock.call_count == 1
|
||||
dataprovider_no_rpc._emit_df(pair, ohlcv_history, False)
|
||||
assert send_mock.call_count == 0
|
||||
|
||||
|
||||
def test_refresh(mocker, default_conf, ohlcv_history):
|
||||
|
@ -588,7 +588,7 @@ def test_api_show_config(botclient):
|
||||
assert 'unfilledtimeout' in response
|
||||
assert 'version' in response
|
||||
assert 'api_version' in response
|
||||
assert 2.1 <= response['api_version'] <= 2.2
|
||||
assert 2.1 <= response['api_version'] < 3.0
|
||||
|
||||
|
||||
def test_api_daily(botclient, mocker, ticker, fee, markets):
|
||||
|
Loading…
Reference in New Issue
Block a user