from typing import Dict, List, Optional, Tuple, Union import logging from functools import reduce from turtle import update from h11 import Data from datetime import datetime, timedelta, timezone import pandas as pd import talib.abstract as ta from pandas_ta.trend import adx from pandas import DataFrame from technical import qtpylib import numpy as np from scipy.signal import argrelextrema from sklearn.metrics import precision_recall_curve from freqtrade.exchange import timeframe_to_prev_date from freqtrade.persistence import Trade from technical.util import resample_to_interval, resampled_merge from freqtrade.strategy import DecimalParameter, IntParameter, merge_informative_pair from freqtrade.strategy.interface import IStrategy logger = logging.getLogger(__name__) def find_support_levels(df: DataFrame) -> DataFrame: """ cond1 = df['Low'][i] < df['Low'][i-1] cond2 = df['Low'][i] < df['Low'][i+1] cond3 = df['Low'][i+1] < df['Low'][i+2] cond4 = df['Low'][i-1] < df['Low'][i-2] """ cond1 = df["low"] < df["low"].shift(1) cond2 = df["low"] < df["low"].shift(-1) cond3 = df["low"].shift(-1) < df["low"].shift(-2) cond4 = df["low"].shift(1) < df["low"].shift(2) return (cond1 & cond2 & cond3 & cond4) def get_max_labels(df: DataFrame, alpha: float = 0.5) -> DataFrame: price = (df['high'] + df['low'] + df['close']) / 3 max_peaks = argrelextrema(price.values, np.greater, order=12)[0] out = adx(df["high"], df["low"], df["close"], window=12) diplus = out["DMP_14"] di_thr = diplus[max_peaks].mean() + diplus[max_peaks].std() * alpha nn = 2 labels = np.zeros(len(df), dtype=np.int32) for mp in max_peaks: ref_close = price.iloc[mp] start = max(0, mp-nn) end = min(df.shape[0], mp+nn+1) pct = np.abs(price[start:end] / ref_close - 1) is_close = np.where(pct <= 0.005)[0] left_idx = is_close[0] right_idx = is_close[-1] # locality labeling if diplus[mp-nn+left_idx:mp-nn+right_idx].mean() >= di_thr: labels[mp-nn+left_idx:mp-nn+right_idx] = 1 if labels.max() == 0: # if not any positive label is found, we force it idx = np.nanargmax(diplus[max_peaks]) labels[max_peaks[idx]] = 1 return labels def get_min_labels(df: DataFrame, alpha : float = 0.5) -> DataFrame: price = (df['high'] + df['low'] + df['close']) / 3 min_peaks = argrelextrema(price.values, np.less, order=12)[0] out = adx(df["high"], df["low"], df["close"], window=12) diminus = out["DMN_14"] di_thr = diminus[min_peaks].mean() + diminus[min_peaks].std() * alpha nn = 2 labels = np.zeros(len(df), dtype=np.int32) for mp in min_peaks: ref_close = price.iloc[mp] start = max(0, mp-nn) end = min(df.shape[0], mp+nn+1) pct = np.abs(price[start:end] / ref_close - 1) is_close = np.where(pct <= 0.005)[0] left_idx = is_close[0] right_idx = is_close[-1] # locality labeling if diminus[mp-nn+left_idx:mp-nn+right_idx].mean() >= di_thr: labels[mp-nn+left_idx:mp-nn+right_idx] = 1 # return np.array([str(x) for x in labels]).astype(np.object0) if labels.max() == 0: # if not any positive label is found, we force it idx = np.nanargmax(diminus[min_peaks]) labels[min_peaks[idx]] = 1 return labels def expand_labels(df: DataFrame, peaks: List[int]): nn = 2 labels = np.zeros(len(df), dtype=np.int32) price = (df['high'] + df['low'] + df['close']) / 3 for p in peaks: ref_price = price[p] start = max(0, p - nn) end = min(df.shape[0], p + nn + 1) pct = np.abs(price[start:end] / ref_price - 1) is_close = np.where(pct <= 0.005)[0] left_idx = is_close[0] right_idx = is_close[-1] # locality labeling labels[p-nn+left_idx:p-nn+right_idx] = 1 return labels def find_labels(df: DataFrame, alpha=0.1) -> DataFrame: """Find min/max locals.""" max_peaks = get_max_labels(df, alpha=alpha).nonzero()[0] min_peaks = get_min_labels(df, alpha=alpha).nonzero()[0] price = (df['high'] + df['low'] + df['close']) / 3 peaks = sorted(set(min_peaks).union(set(max_peaks))) updown = None max_peaks2 = [] min_peaks2 = [] for idx in peaks: if (idx in min_peaks and idx in max_peaks): # one peak cant be at both sides. continue if idx in min_peaks: if updown is None or updown == True: updown = False min_peaks2.append(idx) else: if price[min_peaks2[-1]] < price[idx]: continue else: min_peaks2[-1] = idx elif idx in max_peaks: if updown is None or updown == False: updown = True max_peaks2.append(idx) else: if price[max_peaks2[-1]] > price[idx]: continue else: max_peaks2[-1] = idx min_peaks = expand_labels(df, min_peaks2) max_peaks = expand_labels(df, max_peaks2) return min_peaks, max_peaks class FreqaiBinaryClassStrategy_v4(IStrategy): """ Example strategy showing how the user connects their own IFreqaiModel to the strategy. Namely, the user uses: self.model = CustomModel(self.config) self.model.bridge.start(dataframe, metadata) to make predictions on their data. populate_any_indicators() automatically generates the variety of features indicated by the user in the canonical freqtrade configuration file under config['freqai']. """ minimal_roi = {"0": 0.1, "240": -1} plot_config = { "main_plot": {}, "subplots": { "do_predict": { "do_predict": { "color": "brown" } }, "DI_values": { "DI_values": { "color": "#8115a9", "type": "line" } }, "GTs": { "tp_max": { "color": "#69796a", "type": "bar" }, "tp_min": { "color": "#e2517f", "type": "bar" }, "max": { "color": "#69796a", "type": "line" }, "min": { "color": "#e2517f", "type": "line" }, "neutral": { "color": "#ffffff", "type": "line" } }, "di+-": { "di-": { "color": "#e5e95b", "type": "line" }, "di+": { "color": "#cda122", "type": "line" } } } } position_adjustment_enable = False process_only_new_candles = True stoploss = -0.05 use_exit_signal = True startup_candle_count: int = 300 can_short = True panic_market = False linear_roi_offset = DecimalParameter( 0.00, 0.02, default=0.005, space="sell", optimize=False, load=True ) # max roi time long in minutes # max_roi_time_long = IntParameter(0, 800, default=341, space="sell", optimize=True, load=True) # BTC max_roi_time_long = IntParameter(0, 800, default=400, space="sell", optimize=True, load=False) entry_thr = DecimalParameter(0.5, 1.0, default=0.7, space="buy", optimize=True, load=False) exit_thr = DecimalParameter(0.5, 1.0, default=0.7, space="sell", optimize=True, load=False) def informative_pairs(self): whitelist_pairs = self.dp.current_whitelist() corr_pairs = self.config["freqai"]["feature_parameters"]["include_corr_pairlist"] informative_pairs = [] for tf in self.config["freqai"]["feature_parameters"]["include_timeframes"]: for pair in whitelist_pairs: informative_pairs.append((pair, tf)) for pair in corr_pairs: if pair in whitelist_pairs: continue # avoid duplication informative_pairs.append((pair, tf)) return informative_pairs def feature_engineering_expand_all(self, dataframe: DataFrame, period: int, **kwargs): dataframe["%-rsi-period"] = ta.RSI(dataframe, timeperiod=period) dataframe["%-mfi-period"] = ta.MFI(dataframe, timeperiod=period) out = adx(dataframe["high"], dataframe["low"], dataframe["close"], window=period) dataframe["%-adx-period"] = out["ADX_14"] dataframe["%-diplus-period"] = out["DMP_14"] dataframe["%-diminus-period"] = out["DMN_14"] dataframe["%-sma-period"] = ta.SMA(dataframe, timeperiod=period) dataframe["%-ema-period"] = ta.EMA(dataframe, timeperiod=period) dataframe[f"%-close_over_20sma-period"] = ( dataframe["close"] / dataframe["%-sma-period"] ) bollinger = qtpylib.bollinger_bands( qtpylib.typical_price(dataframe), window=period, stds=2.2 ) dataframe["bb_lowerband-period"] = bollinger["lower"] dataframe["bb_middleband-period"] = bollinger["mid"] dataframe["bb_upperband-period"] = bollinger["upper"] dataframe["%-bb_width-period"] = ( dataframe["bb_upperband-period"] - dataframe["bb_lowerband-period"] ) / dataframe["bb_middleband-period"] dataframe["%-close-bb_lower-period"] = ( dataframe["close"] / dataframe["bb_lowerband-period"] ) dataframe["%-roc-period"] = ta.ROC(dataframe, timeperiod=period) macd = ta.MACD(dataframe, timeperiod=period) dataframe["-macd-period"] = macd["macd"] dataframe["%-relative_volume-period"] = ( dataframe["volume"] / dataframe["volume"].rolling(period).mean() ) return dataframe def feature_engineering_expand_basic(self, dataframe: DataFrame, **kwargs): dataframe["%-pct-change"] = dataframe["close"].pct_change() dataframe["%-raw_volume"] = dataframe["volume"] dataframe["%-raw_price"] = dataframe["close"] return dataframe def feature_engineering_standard(self, dataframe: DataFrame, **kwargs): dataframe["%-day_of_week"] = dataframe["date"].dt.dayofweek dataframe["%-hour_of_day"] = dataframe["date"].dt.hour return dataframe def set_freqai_targets(self, dataframe, **kwargs): minmax = np.array(["neutral"] * len(dataframe)) min_labels, max_labels = find_labels(dataframe, alpha=-0.5) minmax[min_labels == 1] = "min" minmax[max_labels == 1] = "max" dataframe["&s-minmax"] = np.array([str(x) for x in minmax]).astype(np.object0) return dataframe # def populate_any_indicators( # self, pair, df, tf, informative=None, set_generalized_indicators=False # ): # """ # Function designed to automatically generate, name and merge features # from user indicated timeframes in the configuration file. User controls the indicators # passed to the training/prediction by prepending indicators with `'%-' + coin ` # (see convention below). I.e. user should not prepend any supporting metrics # (e.g. bb_lowerband below) with % unless they explicitly want to pass that metric to the # model. # :params: # :pair: pair to be used as informative # :df: strategy dataframe which will receive merges from informatives # :tf: timeframe of the dataframe which will modify the feature names # :informative: the dataframe associated with the informative pair # :coin: the name of the coin which will modify the feature names. # """ # coin = pair.split('/')[0] # if informative is None: # informative = self.dp.get_pair_dataframe(pair, tf) # # first loop is automatically duplicating indicators for time periods # for t in self.freqai_info["feature_parameters"]["indicator_periods_candles"]: # t = int(t) # informative[f"%-{pair}rsi-period_{t}"] = ta.RSI(informative, timeperiod=t) # informative[f"%-{pair}mfi-period_{t}"] = ta.MFI(informative, timeperiod=t) # out = adx(informative["high"], informative["low"], informative["close"], window=t) # informative[f"%-{pair}adx-period_{t}"] = out["ADX_14"] # informative[f"%-{pair}diplus-period_{t}"] = out["DMP_14"] # informative[f"%-{pair}diminus-period_{t}"] = out["DMN_14"] # informative[f"{pair}20sma-period_{t}"] = ta.SMA(informative, timeperiod=t) # #informative[f"{pair}21ema-period_{t}"] = ta.EMA(informative, timeperiod=t) # informative[f"%-{pair}close_over_20sma-period_{t}"] = ( # informative["close"] / informative[f"{pair}20sma-period_{t}"] # ) # bollinger = qtpylib.bollinger_bands( # qtpylib.typical_price(informative), window=t, stds=2.2 # ) # informative[f"{pair}bb_lowerband-period_{t}"] = bollinger["lower"] # informative[f"{pair}bb_middleband-period_{t}"] = bollinger["mid"] # informative[f"{pair}bb_upperband-period_{t}"] = bollinger["upper"] # informative[f"%-{pair}bb_width-period_{t}"] = ( # informative[f"{pair}bb_upperband-period_{t}"] # - informative[f"{pair}bb_lowerband-period_{t}"] # ) / informative[f"{pair}bb_middleband-period_{t}"] # informative[f"%-{pair}close-bb_lower-period_{t}"] = ( # informative["close"] / informative[f"{pair}bb_lowerband-period_{t}"] # ) # informative[f"%-{pair}roc-period_{t}"] = ta.ROC(informative, timeperiod=t) # macd = ta.MACD(informative, timeperiod=t) # informative[f"%-{pair}macd-period_{t}"] = macd["macd"] # informative[f"%-{pair}relative_volume-period_{t}"] = ( # informative["volume"] / informative["volume"].rolling(t).mean() # ) # informative[f"%-{pair}pct-change"] = informative["close"].pct_change() # informative[f"%-{pair}raw_volume"] = informative["volume"] # informative[f"%-{pair}raw_price"] = informative["close"] # indicators = [col for col in informative if col.startswith("%")] # # This loop duplicates and shifts all indicators to add a sense of recency to data # for n in range(self.freqai_info["feature_parameters"]["include_shifted_candles"] + 1): # if n == 0: # continue # informative_shift = informative[indicators].shift(n) # informative_shift = informative_shift.add_suffix("_shift-" + str(n)) # informative = pd.concat((informative, informative_shift), axis=1) # # find support levels # if tf == self.freqai_info["feature_parameters"]["include_timeframes"][-1]: # informative_6h = resample_to_interval(informative, "6h") # informative_6h["support_levels"] = find_support_levels(informative_6h) # df = merge_informative_pair(df, informative_6h, self.config["timeframe"], "6h", ffill=True) # df = merge_informative_pair(df, informative, self.config["timeframe"], tf, ffill=True) # skip_columns = [ # (s + "_" + tf) for s in ["date", "open", "high", "low", "close", "volume"] # ] # df = df.drop(columns=skip_columns) # # Add generalized indicators here (because in live, it will call this # # function to populate indicators during training). Notice how we ensure not to # # add them multiple times # if set_generalized_indicators: # df["%-day_of_week"] = (df["date"].dt.dayofweek + 1) / 7 # df["%-hour_of_day"] = (df["date"].dt.hour + 1) / 25 # # user adds targets here by prepending them with &- (see convention below) # # If user wishes to use multiple targets, a multioutput prediction model # # needs to be used such as templates/CatboostPredictionMultiModel.py # #df["&s-minima"] = FreqaiBinaryClassStrategy.get_min_labels(df) # #df["&s-maxima"] = FreqaiBinaryClassStrategy.get_max_labels(df) # minmax = np.array(["neutral"] * len(df)) # min_labels, max_labels = find_labels(df, alpha=-0.5) # minmax[min_labels == 1] = "min" # minmax[max_labels == 1] = "max" # df["&s-minmax"] = np.array([str(x) for x in minmax]).astype(np.object0) # return df def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame: # All indicators must be populated by feature_engineering_*() functions # the model will return all labels created by user in `feature_engineering_*` # (& appended targets), an indication of whether or not the prediction should be accepted, # the target mean/std values for each of the labels created by user in # `set_freqai_targets()` for each training period. dataframe = self.freqai.start(dataframe, metadata, self) # dataframe["&s-minima"] = dataframe["&s-minima"].astype(np.float32) # dataframe["&s-maxima"] = dataframe["&s-maxima"].astype(np.float32) min_labels, max_labels = find_labels(dataframe, alpha=-0.5) self.maxima_threhsold = 0.7 # dataframe["max"][dataframe["&s-minmax"] == "max"].mean() self.minima_threhsold = 0.7 # dataframe["min"][dataframe["&s-minmax"] == "min"].mean() dataframe["tp_max"] = max_labels.astype(np.float32) dataframe["tp_min"] = min_labels.astype(np.float32) dataframe["di-"] = ta.MINUS_DI(dataframe, window=12) dataframe["di+"] = ta.PLUS_DI(dataframe, window=12) return dataframe def populate_entry_trend(self, df: DataFrame, metadata: dict) -> DataFrame: last_candles_are_stable = True if self.panic_market: hours_candle_stability = 4 # enter the market if last `hours_candle_stability` are stable last_candles_are_stable = df["do_predict"].rolling(12 * hours_candle_stability).sum().iloc[-1] == 12 * hours_candle_stability if last_candles_are_stable: self.panic_market = False enter_long_conditions = [df["do_predict"] == 1, df["min"] >= self.entry_thr.value] if enter_long_conditions: df.loc[ reduce(lambda x, y: x & y, enter_long_conditions), ["enter_long", "enter_tag"] ] = (1, "long") if self.can_short: enter_short_conditions = [df["do_predict"] == 1, df["max"] >= self.exit_thr.value] if enter_short_conditions: df.loc[ reduce(lambda x, y: x & y, enter_short_conditions), ["enter_short", "enter_tag"] ] = (1, "short") else: df["enter_long"] = np.zeros(df.shape[0]) return df def populate_exit_trend(self, df: DataFrame, metadata: dict) -> DataFrame: exit_long_conditions = [df["do_predict"] == 1, df["max"] >= self.exit_thr.value] if exit_long_conditions: df.loc[reduce(lambda x, y: x & y, exit_long_conditions), ["exit_long", "exit_tag"]] = (1, "exit signal") if self.can_short: exit_short_conditions = [df["do_predict"] == 1, df["min"] >= self.entry_thr.value] if exit_short_conditions: df.loc[reduce(lambda x, y: x & y, exit_short_conditions), ["exit_short", "exit_tag"]] = (1, "exit signal") return df def get_ticker_indicator(self): return int(self.config["timeframe"][:-1]) def custom_exit( self, pair: str, trade: Trade, current_time, current_rate, current_profit, **kwargs ): dataframe, _ = self.dp.get_analyzed_dataframe(pair=pair, timeframe=self.timeframe) if dataframe.empty: return None trade_date = timeframe_to_prev_date(self.config["timeframe"], trade.open_date_utc) trade_candle = dataframe.loc[(dataframe["date"] == trade_date)] if trade_candle.empty: return None trade_candle = trade_candle.squeeze() if dataframe["do_predict"].iloc[-1] != 1: self.panic_market = True return f"OOD_{trade.enter_tag}_Exit" time_alpha = (1 + current_profit / (self.minimal_roi[0] - self.stoploss)) time_alpha = 1. if (current_time - trade.open_date_utc).seconds > (self.max_roi_time_long.value * 60 * time_alpha): return f"{trade.enter_tag}_Expired" def confirm_trade_exit( self, pair: str, trade: Trade, order_type: str, amount: float, rate: float, time_in_force: str, exit_reason: str, current_time, **kwargs, ) -> bool: entry_tag = trade.enter_tag follow_mode = self.config.get("freqai", {}).get("follow_mode", False) if not follow_mode: pair_dict = self.freqai.dd.pair_dict else: pair_dict = self.freqai.dd.follower_dict pair_dict[pair]["prediction" + entry_tag] = 0 if not follow_mode: self.freqai.dd.save_drawer_to_disk() else: self.freqai.dd.save_follower_dict_to_disk() return True def confirm_trade_entry( self, pair: str, order_type: str, amount: float, rate: float, time_in_force: str, current_time, entry_tag, side: str, **kwargs, ) -> bool: df, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe) last_candle = df.iloc[-1].squeeze() if side == "long": if rate > (last_candle["close"] * (1 + 0.0025)): return False else: if rate < (last_candle["close"] * (1 - 0.0025)): return False return True def adjust_trade_position(self, trade: Trade, current_time: datetime, current_rate: float, current_profit: float, min_stake: Optional[float], max_stake: float, **kwargs) -> Optional[float]: """ Custom trade adjustment logic, returning the stake amount that a trade should be increased. This means extra buy orders with additional fees. Only called when `position_adjustment_enable` is set to True. For full documentation please go to https://www.freqtrade.io/en/latest/strategy-advanced/ When not implemented by a strategy, returns None :param trade: trade object. :param current_time: datetime object, containing the current datetime :param current_rate: Current buy rate. :param current_profit: Current profit (as ratio), calculated based on current_rate. :param min_stake: Minimal stake size allowed by exchange. :param max_stake: Balance available for trading. :param **kwargs: Ensure to keep this here so updates to this won't break your strategy. :return float: Stake amount to adjust your trade """ if not trade.is_short: if current_profit < -0.02: df, _ = self.dp.get_analyzed_dataframe(trade.pair, self.timeframe) try: new_local_minima = [df["&s-minima"] > self.minima_threhsold, (df["close"] / current_rate - 1) < 1e-3] if df.shape[0] - df.loc[reduce(lambda x, y: x & y, new_local_minima)].index[-1] <= 10: return 20 except: pass return None