From b898f86364787b3c1ba0686281a96254eb213579 Mon Sep 17 00:00:00 2001 From: theluxaz Date: Wed, 13 Oct 2021 00:02:28 +0300 Subject: [PATCH] Added sell_tag and buy/sell telegram performance functions --- freqtrade/data/btanalysis.py | 2 +- freqtrade/enums/signaltype.py | 1 + freqtrade/freqtradebot.py | 16 +- freqtrade/optimize/backtesting.py | 13 +- freqtrade/optimize/optimize_reports.py | 138 +- freqtrade/persistence/migrations.py | 6 +- freqtrade/persistence/models.py | 121 +- freqtrade/rpc/rpc.py | 31 + freqtrade/rpc/telegram.py | 152 +++ freqtrade/strategy/interface.py | 8 +- .../hyperopts/RuleNOTANDoptimizer.py | 1203 +++++++++++++++++ 11 files changed, 1673 insertions(+), 18 deletions(-) create mode 100644 freqtrade/user_data/hyperopts/RuleNOTANDoptimizer.py diff --git a/freqtrade/data/btanalysis.py b/freqtrade/data/btanalysis.py index 7d97661c4..82b2bb3a9 100644 --- a/freqtrade/data/btanalysis.py +++ b/freqtrade/data/btanalysis.py @@ -30,7 +30,7 @@ BT_DATA_COLUMNS = ['pair', 'stake_amount', 'amount', 'open_date', 'close_date', 'fee_open', 'fee_close', 'trade_duration', 'profit_ratio', 'profit_abs', 'sell_reason', 'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs', - 'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'buy_tag'] + 'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'buy_tag', 'sell_tag'] def get_latest_optimize_filename(directory: Union[Path, str], variant: str) -> str: diff --git a/freqtrade/enums/signaltype.py b/freqtrade/enums/signaltype.py index d2995d57a..32ac19ba4 100644 --- a/freqtrade/enums/signaltype.py +++ b/freqtrade/enums/signaltype.py @@ -14,3 +14,4 @@ class SignalTagType(Enum): Enum for signal columns """ BUY_TAG = "buy_tag" + SELL_TAG = "sell_tag" \ No newline at end of file diff --git a/freqtrade/freqtradebot.py b/freqtrade/freqtradebot.py index 259270483..55828f763 100644 --- a/freqtrade/freqtradebot.py +++ b/freqtrade/freqtradebot.py @@ -420,7 +420,7 @@ class FreqtradeBot(LoggingMixin): return False # running get_signal on historical data fetched - (buy, sell, buy_tag) = self.strategy.get_signal( + (buy, sell, buy_tag,sell_tag) = self.strategy.get_signal( pair, self.strategy.timeframe, analyzed_df @@ -706,7 +706,7 @@ class FreqtradeBot(LoggingMixin): analyzed_df, _ = self.dataprovider.get_analyzed_dataframe(trade.pair, self.strategy.timeframe) - (buy, sell, _) = self.strategy.get_signal( + (buy, sell, buy_tag, sell_tag) = self.strategy.get_signal( trade.pair, self.strategy.timeframe, analyzed_df @@ -714,7 +714,7 @@ class FreqtradeBot(LoggingMixin): logger.debug('checking sell') sell_rate = self.exchange.get_rate(trade.pair, refresh=True, side="sell") - if self._check_and_execute_sell(trade, sell_rate, buy, sell): + if self._check_and_execute_sell(trade, sell_rate, buy, sell, sell_tag): return True logger.debug('Found no sell signal for %s.', trade) @@ -852,18 +852,19 @@ class FreqtradeBot(LoggingMixin): f"for pair {trade.pair}.") def _check_and_execute_sell(self, trade: Trade, sell_rate: float, - buy: bool, sell: bool) -> bool: + buy: bool, sell: bool, sell_tag: Optional[str]) -> bool: """ Check and execute sell """ + print(str(sell_tag)+"1") should_sell = self.strategy.should_sell( trade, sell_rate, datetime.now(timezone.utc), buy, sell, force_stoploss=self.edge.stoploss(trade.pair) if self.edge else 0 ) if should_sell.sell_flag: - logger.info(f'Executing Sell for {trade.pair}. Reason: {should_sell.sell_type}') - self.execute_trade_exit(trade, sell_rate, should_sell) + logger.info(f'Executing Sell for {trade.pair}. Reason: {should_sell.sell_type}. Tag: {sell_tag}') + self.execute_trade_exit(trade, sell_rate, should_sell,sell_tag) return True return False @@ -1064,7 +1065,7 @@ class FreqtradeBot(LoggingMixin): raise DependencyException( f"Not enough amount to sell. Trade-amount: {amount}, Wallet: {wallet_amount}") - def execute_trade_exit(self, trade: Trade, limit: float, sell_reason: SellCheckTuple) -> bool: + def execute_trade_exit(self, trade: Trade, limit: float, sell_reason: SellCheckTuple, sell_tag: Optional[str] = None) -> bool: """ Executes a trade exit for the given trade and limit :param trade: Trade instance @@ -1141,6 +1142,7 @@ class FreqtradeBot(LoggingMixin): trade.sell_order_status = '' trade.close_rate_requested = limit trade.sell_reason = sell_reason.sell_reason + trade.sell_tag = sell_tag # In case of market sell orders the order can be closed immediately if order.get('status', 'unknown') in ('closed', 'expired'): self.update_trade_state(trade, trade.open_order_id, order) diff --git a/freqtrade/optimize/backtesting.py b/freqtrade/optimize/backtesting.py index eecc7af54..3bed3c540 100644 --- a/freqtrade/optimize/backtesting.py +++ b/freqtrade/optimize/backtesting.py @@ -44,7 +44,7 @@ SELL_IDX = 4 LOW_IDX = 5 HIGH_IDX = 6 BUY_TAG_IDX = 7 - +SELL_TAG_IDX = 8 class Backtesting: """ @@ -218,7 +218,7 @@ class Backtesting: """ # Every change to this headers list must evaluate further usages of the resulting tuple # and eventually change the constants for indexes at the top - headers = ['date', 'buy', 'open', 'close', 'sell', 'low', 'high', 'buy_tag'] + headers = ['date', 'buy', 'open', 'close', 'sell', 'low', 'high', 'buy_tag', 'sell_tag'] data: Dict = {} self.progress.init_step(BacktestState.CONVERT, len(processed)) @@ -230,6 +230,7 @@ class Backtesting: pair_data.loc[:, 'buy'] = 0 # cleanup if buy_signal is exist pair_data.loc[:, 'sell'] = 0 # cleanup if sell_signal is exist pair_data.loc[:, 'buy_tag'] = None # cleanup if buy_tag is exist + pair_data.loc[:, 'sell_tag'] = None # cleanup if sell_tag is exist df_analyzed = self.strategy.advise_sell( self.strategy.advise_buy(pair_data, {'pair': pair}), {'pair': pair}).copy() @@ -241,6 +242,7 @@ class Backtesting: df_analyzed.loc[:, 'buy'] = df_analyzed.loc[:, 'buy'].shift(1) df_analyzed.loc[:, 'sell'] = df_analyzed.loc[:, 'sell'].shift(1) df_analyzed.loc[:, 'buy_tag'] = df_analyzed.loc[:, 'buy_tag'].shift(1) + df_analyzed.loc[:, 'sell_tag'] = df_analyzed.loc[:, 'sell_tag'].shift(1) # Update dataprovider cache self.dataprovider._set_cached_df(pair, self.timeframe, df_analyzed) @@ -319,6 +321,9 @@ class Backtesting: return sell_row[OPEN_IDX] def _get_sell_trade_entry(self, trade: LocalTrade, sell_row: Tuple) -> Optional[LocalTrade]: + + + sell_candle_time = sell_row[DATE_IDX].to_pydatetime() sell = self.strategy.should_sell(trade, sell_row[OPEN_IDX], # type: ignore sell_candle_time, sell_row[BUY_IDX], @@ -327,6 +332,8 @@ class Backtesting: if sell.sell_flag: trade.close_date = sell_candle_time + if(sell_row[SELL_TAG_IDX] is not None): + trade.sell_tag = sell_row[SELL_TAG_IDX] trade.sell_reason = sell.sell_reason trade_dur = int((trade.close_date_utc - trade.open_date_utc).total_seconds() // 60) closerate = self._get_close_rate(sell_row, trade, sell, trade_dur) @@ -375,6 +382,7 @@ class Backtesting: if stake_amount and (not min_stake_amount or stake_amount > min_stake_amount): # Enter trade has_buy_tag = len(row) >= BUY_TAG_IDX + 1 + has_sell_tag = len(row) >= SELL_TAG_IDX + 1 trade = LocalTrade( pair=pair, open_rate=row[OPEN_IDX], @@ -385,6 +393,7 @@ class Backtesting: fee_close=self.fee, is_open=True, buy_tag=row[BUY_TAG_IDX] if has_buy_tag else None, + sell_tag=row[SELL_TAG_IDX] if has_sell_tag else None, exchange='backtesting', ) return trade diff --git a/freqtrade/optimize/optimize_reports.py b/freqtrade/optimize/optimize_reports.py index 7bb60228a..fcead07ba 100644 --- a/freqtrade/optimize/optimize_reports.py +++ b/freqtrade/optimize/optimize_reports.py @@ -82,7 +82,7 @@ def _generate_result_line(result: DataFrame, starting_balance: int, first_column 'profit_sum_pct': round(profit_sum * 100.0, 2), 'profit_total_abs': result['profit_abs'].sum(), 'profit_total': profit_total, - 'profit_total_pct': round(profit_total * 100.0, 2), + 'profit_total_pct': round(profit_sum * 100.0, 2), 'duration_avg': str(timedelta( minutes=round(result['trade_duration'].mean())) ) if not result.empty else '0:00', @@ -126,6 +126,92 @@ def generate_pair_metrics(data: Dict[str, Dict], stake_currency: str, starting_b tabular_data.append(_generate_result_line(results, starting_balance, 'TOTAL')) return tabular_data +def generate_tag_metrics(tag_type:str, data: Dict[str, Dict], stake_currency: str, starting_balance: int, + results: DataFrame, skip_nan: bool = False) -> List[Dict]: + """ + Generates and returns a list of metrics for the given tag trades and the results dataframe + :param data: Dict of containing data that was used during backtesting. + :param stake_currency: stake-currency - used to correctly name headers + :param starting_balance: Starting balance + :param results: Dataframe containing the backtest results + :param skip_nan: Print "left open" open trades + :return: List of Dicts containing the metrics per pair + """ + + tabular_data = [] + + # for tag, count in results[tag_type].value_counts().iteritems(): + # result = results.loc[results[tag_type] == tag] + # + # profit_mean = result['profit_ratio'].mean() + # profit_sum = result['profit_ratio'].sum() + # profit_total = profit_sum / max_open_trades + # + # tabular_data.append( + # { + # 'sell_reason': tag, + # 'trades': count, + # 'wins': len(result[result['profit_abs'] > 0]), + # 'draws': len(result[result['profit_abs'] == 0]), + # 'losses': len(result[result['profit_abs'] < 0]), + # 'profit_mean': profit_mean, + # 'profit_mean_pct': round(profit_mean * 100, 2), + # 'profit_sum': profit_sum, + # 'profit_sum_pct': round(profit_sum * 100, 2), + # 'profit_total_abs': result['profit_abs'].sum(), + # 'profit_total': profit_total, + # 'profit_total_pct': round(profit_total * 100, 2), + # } + # ) + # + # tabular_data = [] + + for tag, count in results[tag_type].value_counts().iteritems(): + result = results[results[tag_type] == tag] + if skip_nan and result['profit_abs'].isnull().all(): + continue + + tabular_data.append(_generate_tag_result_line(result, starting_balance, tag)) + + # Sort by total profit %: + tabular_data = sorted(tabular_data, key=lambda k: k['profit_total_abs'], reverse=True) + + # Append Total + tabular_data.append(_generate_result_line(results, starting_balance, 'TOTAL')) + return tabular_data + +def _generate_tag_result_line(result: DataFrame, starting_balance: int, first_column: str) -> Dict: + """ + Generate one result dict, with "first_column" as key. + """ + profit_sum = result['profit_ratio'].sum() + # (end-capital - starting capital) / starting capital + profit_total = result['profit_abs'].sum() / starting_balance + + return { + 'key': first_column, + 'trades': len(result), + 'profit_mean': result['profit_ratio'].mean() if len(result) > 0 else 0.0, + 'profit_mean_pct': result['profit_ratio'].mean() * 100.0 if len(result) > 0 else 0.0, + 'profit_sum': profit_sum, + 'profit_sum_pct': round(profit_sum * 100.0, 2), + 'profit_total_abs': result['profit_abs'].sum(), + 'profit_total': profit_total, + 'profit_total_pct': round(profit_total * 100.0, 2), + 'duration_avg': str(timedelta( + minutes=round(result['trade_duration'].mean())) + ) if not result.empty else '0:00', + # 'duration_max': str(timedelta( + # minutes=round(result['trade_duration'].max())) + # ) if not result.empty else '0:00', + # 'duration_min': str(timedelta( + # minutes=round(result['trade_duration'].min())) + # ) if not result.empty else '0:00', + 'wins': len(result[result['profit_abs'] > 0]), + 'draws': len(result[result['profit_abs'] == 0]), + 'losses': len(result[result['profit_abs'] < 0]), + } + def generate_sell_reason_stats(max_open_trades: int, results: DataFrame) -> List[Dict]: """ @@ -313,6 +399,13 @@ def generate_strategy_stats(btdata: Dict[str, DataFrame], pair_results = generate_pair_metrics(btdata, stake_currency=stake_currency, starting_balance=starting_balance, results=results, skip_nan=False) + buy_tag_results = generate_tag_metrics("buy_tag",btdata, stake_currency=stake_currency, + starting_balance=starting_balance, + results=results, skip_nan=False) + sell_tag_results = generate_tag_metrics("sell_tag",btdata, stake_currency=stake_currency, + starting_balance=starting_balance, + results=results, skip_nan=False) + sell_reason_stats = generate_sell_reason_stats(max_open_trades=max_open_trades, results=results) left_open_results = generate_pair_metrics(btdata, stake_currency=stake_currency, @@ -336,6 +429,8 @@ def generate_strategy_stats(btdata: Dict[str, DataFrame], 'best_pair': best_pair, 'worst_pair': worst_pair, 'results_per_pair': pair_results, + 'results_per_buy_tag': buy_tag_results, + 'results_per_sell_tag': sell_tag_results, 'sell_reason_summary': sell_reason_stats, 'left_open_trades': left_open_results, 'total_trades': len(results), @@ -504,6 +599,27 @@ def text_table_sell_reason(sell_reason_stats: List[Dict[str, Any]], stake_curren ] for t in sell_reason_stats] return tabulate(output, headers=headers, tablefmt="orgtbl", stralign="right") +def text_table_tags(tag_type:str, tag_results: List[Dict[str, Any]], stake_currency: str) -> str: + """ + Generates and returns a text table for the given backtest data and the results dataframe + :param pair_results: List of Dictionaries - one entry per pair + final TOTAL row + :param stake_currency: stake-currency - used to correctly name headers + :return: pretty printed table with tabulate as string + """ + + headers = _get_line_header("TAG", stake_currency) + floatfmt = _get_line_floatfmt(stake_currency) + output = [[ + t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'], + t['profit_total_pct'], t['duration_avg'], + _generate_wins_draws_losses(t['wins'], t['draws'], t['losses']) + ] for t in tag_results] + # Ignore type as floatfmt does allow tuples but mypy does not know that + return tabulate(output, headers=headers, + floatfmt=floatfmt, tablefmt="orgtbl", stralign="right") + + + def text_table_strategy(strategy_results, stake_currency: str) -> str: """ @@ -624,12 +740,24 @@ def show_backtest_result(strategy: str, results: Dict[str, Any], stake_currency: print(' BACKTESTING REPORT '.center(len(table.splitlines()[0]), '=')) print(table) + + table = text_table_tags("buy_tag", results['results_per_buy_tag'], stake_currency=stake_currency) + + if isinstance(table, str) and len(table) > 0: + print(' BUY TAG STATS '.center(len(table.splitlines()[0]), '=')) + print(table) + + table = text_table_sell_reason(sell_reason_stats=results['sell_reason_summary'], stake_currency=stake_currency) if isinstance(table, str) and len(table) > 0: print(' SELL REASON STATS '.center(len(table.splitlines()[0]), '=')) print(table) + + + + table = text_table_bt_results(results['left_open_trades'], stake_currency=stake_currency) if isinstance(table, str) and len(table) > 0: print(' LEFT OPEN TRADES REPORT '.center(len(table.splitlines()[0]), '=')) @@ -640,8 +768,16 @@ def show_backtest_result(strategy: str, results: Dict[str, Any], stake_currency: print(' SUMMARY METRICS '.center(len(table.splitlines()[0]), '=')) print(table) + table = text_table_tags("sell_tag",results['results_per_sell_tag'], stake_currency=stake_currency) + + if isinstance(table, str) and len(table) > 0: + print(' SELL TAG STATS '.center(len(table.splitlines()[0]), '=')) + print(table) + if isinstance(table, str) and len(table) > 0: print('=' * len(table.splitlines()[0])) + + print() diff --git a/freqtrade/persistence/migrations.py b/freqtrade/persistence/migrations.py index 1839c4130..db93cf8b0 100644 --- a/freqtrade/persistence/migrations.py +++ b/freqtrade/persistence/migrations.py @@ -82,7 +82,7 @@ def migrate_trades_table(decl_base, inspector, engine, table_back_name: str, col stake_amount, amount, amount_requested, open_date, close_date, open_order_id, stop_loss, stop_loss_pct, initial_stop_loss, initial_stop_loss_pct, stoploss_order_id, stoploss_last_update, - max_rate, min_rate, sell_reason, sell_order_status, strategy, buy_tag, + max_rate, min_rate, sell_reason, sell_order_status, strategy, buy_tag, sell_tag, timeframe, open_trade_value, close_profit_abs ) select id, lower(exchange), pair, @@ -98,7 +98,7 @@ def migrate_trades_table(decl_base, inspector, engine, table_back_name: str, col {stoploss_order_id} stoploss_order_id, {stoploss_last_update} stoploss_last_update, {max_rate} max_rate, {min_rate} min_rate, {sell_reason} sell_reason, {sell_order_status} sell_order_status, - {strategy} strategy, {buy_tag} buy_tag, {timeframe} timeframe, + {strategy} strategy, {buy_tag} buy_tag, {sell_tag} sell_tag, {timeframe} timeframe, {open_trade_value} open_trade_value, {close_profit_abs} close_profit_abs from {table_back_name} """)) @@ -157,7 +157,7 @@ def check_migrate(engine, decl_base, previous_tables) -> None: table_back_name = get_backup_name(tabs, 'trades_bak') # Check for latest column - if not has_column(cols, 'buy_tag'): + if not has_column(cols, 'sell_tag'): logger.info(f'Running database migration for trades - backup: {table_back_name}') migrate_trades_table(decl_base, inspector, engine, table_back_name, cols) # Reread columns - the above recreated the table! diff --git a/freqtrade/persistence/models.py b/freqtrade/persistence/models.py index 8c8c1e0a9..b06386810 100644 --- a/freqtrade/persistence/models.py +++ b/freqtrade/persistence/models.py @@ -258,6 +258,7 @@ class LocalTrade(): sell_order_status: str = '' strategy: str = '' buy_tag: Optional[str] = None + sell_tag: Optional[str] = None timeframe: Optional[int] = None def __init__(self, **kwargs): @@ -324,7 +325,8 @@ class LocalTrade(): 'profit_pct': round(self.close_profit * 100, 2) if self.close_profit else None, 'profit_abs': self.close_profit_abs, - 'sell_reason': self.sell_reason, + 'sell_reason': (f' ({self.sell_reason})' if self.sell_reason else ''), #+str(self.sell_reason) ## CHANGE TO BUY TAG IF NEEDED + 'sell_tag': self.sell_tag, 'sell_order_status': self.sell_order_status, 'stop_loss_abs': self.stop_loss, 'stop_loss_ratio': self.stop_loss_pct if self.stop_loss_pct else None, @@ -706,6 +708,7 @@ class Trade(_DECL_BASE, LocalTrade): sell_order_status = Column(String(100), nullable=True) strategy = Column(String(100), nullable=True) buy_tag = Column(String(100), nullable=True) + sell_tag = Column(String(100), nullable=True) timeframe = Column(Integer, nullable=True) def __init__(self, **kwargs): @@ -856,6 +859,122 @@ class Trade(_DECL_BASE, LocalTrade): for pair, profit, profit_abs, count in pair_rates ] + @staticmethod + def get_buy_tag_performance(pair: str) -> List[Dict[str, Any]]: + """ + Returns List of dicts containing all Trades, based on buy tag performance + Can either be average for all pairs or a specific pair provided + NOTE: Not supported in Backtesting. + """ + + if(pair is not None): + tag_perf = Trade.query.with_entities( + Trade.buy_tag, + func.sum(Trade.close_profit).label('profit_sum'), + func.sum(Trade.close_profit_abs).label('profit_sum_abs'), + func.count(Trade.pair).label('count') + ).filter(Trade.is_open.is_(False))\ + .filter(Trade.pair.lower() == pair.lower()) \ + .order_by(desc('profit_sum_abs')) \ + .all() + else: + tag_perf = Trade.query.with_entities( + Trade.buy_tag, + func.sum(Trade.close_profit).label('profit_sum'), + func.sum(Trade.close_profit_abs).label('profit_sum_abs'), + func.count(Trade.pair).label('count') + ).filter(Trade.is_open.is_(False))\ + .group_by(Trade.pair) \ + .order_by(desc('profit_sum_abs')) \ + .all() + + return [ + { + 'buy_tag': buy_tag, + 'profit': profit, + 'profit_abs': profit_abs, + 'count': count + } + for buy_tag, profit, profit_abs, count in tag_perf + ] + + @staticmethod + def get_sell_tag_performance(pair: str) -> List[Dict[str, Any]]: + """ + Returns List of dicts containing all Trades, based on sell tag performance + Can either be average for all pairs or a specific pair provided + NOTE: Not supported in Backtesting. + """ + if(pair is not None): + tag_perf = Trade.query.with_entities( + Trade.sell_tag, + func.sum(Trade.close_profit).label('profit_sum'), + func.sum(Trade.close_profit_abs).label('profit_sum_abs'), + func.count(Trade.pair).label('count') + ).filter(Trade.is_open.is_(False))\ + .filter(Trade.pair.lower() == pair.lower()) \ + .order_by(desc('profit_sum_abs')) \ + .all() + else: + tag_perf = Trade.query.with_entities( + Trade.sell_tag, + func.sum(Trade.close_profit).label('profit_sum'), + func.sum(Trade.close_profit_abs).label('profit_sum_abs'), + func.count(Trade.pair).label('count') + ).filter(Trade.is_open.is_(False))\ + .group_by(Trade.pair) \ + .order_by(desc('profit_sum_abs')) \ + .all() + + return [ + { + 'sell_tag': sell_tag, + 'profit': profit, + 'profit_abs': profit_abs, + 'count': count + } + for sell_tag, profit, profit_abs, count in tag_perf + ] + + @staticmethod + def get_mix_tag_performance(pair: str) -> List[Dict[str, Any]]: + """ + Returns List of dicts containing all Trades, based on buy_tag + sell_tag performance + Can either be average for all pairs or a specific pair provided + NOTE: Not supported in Backtesting. + """ + if(pair is not None): + tag_perf = Trade.query.with_entities( + Trade.buy_tag, + Trade.sell_tag, + func.sum(Trade.close_profit).label('profit_sum'), + func.sum(Trade.close_profit_abs).label('profit_sum_abs'), + func.count(Trade.pair).label('count') + ).filter(Trade.is_open.is_(False))\ + .filter(Trade.pair.lower() == pair.lower()) \ + .order_by(desc('profit_sum_abs')) \ + .all() + else: + tag_perf = Trade.query.with_entities( + Trade.buy_tag, + Trade.sell_tag, + func.sum(Trade.close_profit).label('profit_sum'), + func.sum(Trade.close_profit_abs).label('profit_sum_abs'), + func.count(Trade.pair).label('count') + ).filter(Trade.is_open.is_(False))\ + .group_by(Trade.pair) \ + .order_by(desc('profit_sum_abs')) \ + .all() + + return [ + { 'mix_tag': str(buy_tag) + " " +str(sell_tag), + 'profit': profit, + 'profit_abs': profit_abs, + 'count': count + } + for buy_tag, sell_tag, profit, profit_abs, count in tag_perf + ] + @staticmethod def get_best_pair(start_date: datetime = datetime.fromtimestamp(0)): """ diff --git a/freqtrade/rpc/rpc.py b/freqtrade/rpc/rpc.py index 95a37452b..a53ce2150 100644 --- a/freqtrade/rpc/rpc.py +++ b/freqtrade/rpc/rpc.py @@ -669,6 +669,37 @@ class RPC: [x.update({'profit': round(x['profit'] * 100, 2)}) for x in pair_rates] return pair_rates + def _rpc_buy_tag_performance(self, pair: str) -> List[Dict[str, Any]]: + """ + Handler for buy tag performance. + Shows a performance statistic from finished trades + """ + buy_tags = Trade.get_buy_tag_performance(pair) + # Round and convert to % + [x.update({'profit': round(x['profit'] * 100, 2)}) for x in buy_tags] + return buy_tags + + + def _rpc_sell_tag_performance(self, pair: str) -> List[Dict[str, Any]]: + """ + Handler for sell tag performance. + Shows a performance statistic from finished trades + """ + sell_tags = Trade.get_sell_tag_performance(pair) + # Round and convert to % + [x.update({'profit': round(x['profit'] * 100, 2)}) for x in sell_tags] + return sell_tags + + def _rpc_mix_tag_performance(self, pair: str) -> List[Dict[str, Any]]: + """ + Handler for mix tag performance. + Shows a performance statistic from finished trades + """ + mix_tags = Trade.get_mix_tag_performance(pair) + # Round and convert to % + [x.update({'profit': round(x['profit'] * 100, 2)}) for x in mix_tags] + return mix_tags + def _rpc_count(self) -> Dict[str, float]: """ Returns the number of trades running """ if self._freqtrade.state != State.RUNNING: diff --git a/freqtrade/rpc/telegram.py b/freqtrade/rpc/telegram.py index a988d2b60..1834abd64 100644 --- a/freqtrade/rpc/telegram.py +++ b/freqtrade/rpc/telegram.py @@ -108,6 +108,7 @@ class Telegram(RPCHandler): r'/trades$', r'/performance$', r'/daily$', r'/daily \d+$', r'/profit$', r'/profit \d+', r'/stats$', r'/count$', r'/locks$', r'/balance$', + r'/buys',r'/sells',r'/mix_tags', r'/stopbuy$', r'/reload_config$', r'/show_config$', r'/logs$', r'/whitelist$', r'/blacklist$', r'/edge$', r'/forcebuy$', r'/help$', r'/version$'] @@ -152,6 +153,9 @@ class Telegram(RPCHandler): CommandHandler('trades', self._trades), CommandHandler('delete', self._delete_trade), CommandHandler('performance', self._performance), + CommandHandler('buys', self._buy_tag_performance), + CommandHandler('sells', self._sell_tag_performance), + CommandHandler('mix_tags', self._mix_tag_performance), CommandHandler('stats', self._stats), CommandHandler('daily', self._daily), CommandHandler('count', self._count), @@ -173,6 +177,9 @@ class Telegram(RPCHandler): CallbackQueryHandler(self._profit, pattern='update_profit'), CallbackQueryHandler(self._balance, pattern='update_balance'), CallbackQueryHandler(self._performance, pattern='update_performance'), + CallbackQueryHandler(self._performance, pattern='update_buy_tag_performance'), + CallbackQueryHandler(self._performance, pattern='update_sell_tag_performance'), + CallbackQueryHandler(self._performance, pattern='update_mix_tag_performance'), CallbackQueryHandler(self._count, pattern='update_count'), CallbackQueryHandler(self._forcebuy_inline), ] @@ -258,6 +265,42 @@ class Telegram(RPCHandler): "*Current Rate:* `{current_rate:.8f}`\n" "*Close Rate:* `{limit:.8f}`").format(**msg) + sell_tag = msg['sell_tag'] + buy_tag = msg['buy_tag'] + + if sell_tag is not None and buy_tag is not None: + message = ("{emoji} *{exchange}:* Selling {pair} (#{trade_id})\n" + "*Profit:* `{profit_percent:.2f}%{profit_extra}`\n" + "*Buy Tag:* `{buy_tag}`\n" + "*Sell Tag:* `{sell_tag}`\n" + "*Sell Reason:* `{sell_reason}`\n" + "*Duration:* `{duration} ({duration_min:.1f} min)`\n" + "*Amount:* `{amount:.8f}`\n" + "*Open Rate:* `{open_rate:.8f}`\n" + "*Current Rate:* `{current_rate:.8f}`\n" + "*Close Rate:* `{limit:.8f}`").format(**msg) + elif sell_tag is None and buy_tag is not None: + message = ("{emoji} *{exchange}:* Selling {pair} (#{trade_id})\n" + "*Profit:* `{profit_percent:.2f}%{profit_extra}`\n" + "*Buy Tag:* `{buy_tag}`\n" + "*Sell Reason:* `{sell_reason}`\n" + "*Duration:* `{duration} ({duration_min:.1f} min)`\n" + "*Amount:* `{amount:.8f}`\n" + "*Open Rate:* `{open_rate:.8f}`\n" + "*Current Rate:* `{current_rate:.8f}`\n" + "*Close Rate:* `{limit:.8f}`").format(**msg) + elif sell_tag is not None and buy_tag is None: + message = ("{emoji} *{exchange}:* Selling {pair} (#{trade_id})\n" + "*Profit:* `{profit_percent:.2f}%{profit_extra}`\n" + "*Sell Tag:* `{sell_tag}`\n" + "*Sell Reason:* `{sell_reason}`\n" + "*Duration:* `{duration} ({duration_min:.1f} min)`\n" + "*Amount:* `{amount:.8f}`\n" + "*Open Rate:* `{open_rate:.8f}`\n" + "*Current Rate:* `{current_rate:.8f}`\n" + "*Close Rate:* `{limit:.8f}`").format(**msg) + + return message def send_msg(self, msg: Dict[str, Any]) -> None: @@ -364,6 +407,7 @@ class Telegram(RPCHandler): "*Current Pair:* {pair}", "*Amount:* `{amount} ({stake_amount} {base_currency})`", "*Buy Tag:* `{buy_tag}`" if r['buy_tag'] else "", + "*Sell Tag:* `{sell_tag}`" if r['sell_tag'] else "", "*Open Rate:* `{open_rate:.8f}`", "*Close Rate:* `{close_rate}`" if r['close_rate'] else "", "*Current Rate:* `{current_rate:.8f}`", @@ -845,6 +889,111 @@ class Telegram(RPCHandler): except RPCException as e: self._send_msg(str(e)) + @authorized_only + def _buy_tag_performance(self, update: Update, context: CallbackContext) -> None: + """ + Handler for /buys PAIR . + Shows a performance statistic from finished trades + :param bot: telegram bot + :param update: message update + :return: None + """ + try: + pair=None + if context.args: + pair = context.args[0] + + trades = self._rpc._rpc_buy_tag_performance(pair) + output = "Performance:\n" + for i, trade in enumerate(trades): + stat_line = ( + f"{i+1}.\t {trade['buy_tag']}\t" + f"{round_coin_value(trade['profit_abs'], self._config['stake_currency'])} " + f"({trade['profit']:.2f}%) " + f"({trade['count']})\n") + + if len(output + stat_line) >= MAX_TELEGRAM_MESSAGE_LENGTH: + self._send_msg(output, parse_mode=ParseMode.HTML) + output = stat_line + else: + output += stat_line + + self._send_msg(output, parse_mode=ParseMode.HTML, + reload_able=True, callback_path="update_buy_tag_performance", + query=update.callback_query) + except RPCException as e: + self._send_msg(str(e)) + + @authorized_only + def _sell_tag_performance(self, update: Update, context: CallbackContext) -> None: + """ + Handler for /sells. + Shows a performance statistic from finished trades + :param bot: telegram bot + :param update: message update + :return: None + """ + try: + pair=None + if context.args: + pair = context.args[0] + + trades = self._rpc._rpc_sell_tag_performance(pair) + output = "Performance:\n" + for i, trade in enumerate(trades): + stat_line = ( + f"{i+1}.\t {trade['sell_tag']}\t" + f"{round_coin_value(trade['profit_abs'], self._config['stake_currency'])} " + f"({trade['profit']:.2f}%) " + f"({trade['count']})\n") + + if len(output + stat_line) >= MAX_TELEGRAM_MESSAGE_LENGTH: + self._send_msg(output, parse_mode=ParseMode.HTML) + output = stat_line + else: + output += stat_line + + self._send_msg(output, parse_mode=ParseMode.HTML, + reload_able=True, callback_path="update_sell_tag_performance", + query=update.callback_query) + except RPCException as e: + self._send_msg(str(e)) + + @authorized_only + def _mix_tag_performance(self, update: Update, context: CallbackContext) -> None: + """ + Handler for /mix_tags. + Shows a performance statistic from finished trades + :param bot: telegram bot + :param update: message update + :return: None + """ + try: + pair=None + if context.args: + pair = context.args[0] + + trades = self._rpc._rpc_mix_tag_performance(pair) + output = "Performance:\n" + for i, trade in enumerate(trades): + stat_line = ( + f"{i+1}.\t {trade['mix_tag']}\t" + f"{round_coin_value(trade['profit_abs'], self._config['stake_currency'])} " + f"({trade['profit']:.2f}%) " + f"({trade['count']})\n") + + if len(output + stat_line) >= MAX_TELEGRAM_MESSAGE_LENGTH: + self._send_msg(output, parse_mode=ParseMode.HTML) + output = stat_line + else: + output += stat_line + + self._send_msg(output, parse_mode=ParseMode.HTML, + reload_able=True, callback_path="update_mix_tag_performance", + query=update.callback_query) + except RPCException as e: + self._send_msg(str(e)) + @authorized_only def _count(self, update: Update, context: CallbackContext) -> None: """ @@ -1020,6 +1169,9 @@ class Telegram(RPCHandler): " *table :* `will display trades in a table`\n" " `pending buy orders are marked with an asterisk (*)`\n" " `pending sell orders are marked with a double asterisk (**)`\n" + "*/buys :* `Shows the buy_tag performance`\n" + "*/sells :* `Shows the sell reason performance`\n" + "*/mix_tag :* `Shows combined buy tag + sell reason performance`\n" "*/trades [limit]:* `Lists last closed trades (limited to 10 by default)`\n" "*/profit []:* `Lists cumulative profit from all finished trades, " "over the last n days`\n" diff --git a/freqtrade/strategy/interface.py b/freqtrade/strategy/interface.py index c51860011..68b65b293 100644 --- a/freqtrade/strategy/interface.py +++ b/freqtrade/strategy/interface.py @@ -460,6 +460,7 @@ class IStrategy(ABC, HyperStrategyMixin): dataframe['buy'] = 0 dataframe['sell'] = 0 dataframe['buy_tag'] = None + dataframe['sell_tag'] = None # Other Defs in strategy that want to be called every loop here # twitter_sell = self.watch_twitter_feed(dataframe, metadata) @@ -537,7 +538,7 @@ class IStrategy(ABC, HyperStrategyMixin): pair: str, timeframe: str, dataframe: DataFrame - ) -> Tuple[bool, bool, Optional[str]]: + ) -> Tuple[bool, bool, Optional[str], Optional[str]]: """ Calculates current signal based based on the buy / sell columns of the dataframe. Used by Bot to get the signal to buy or sell @@ -572,6 +573,7 @@ class IStrategy(ABC, HyperStrategyMixin): sell = latest[SignalType.SELL.value] == 1 buy_tag = latest.get(SignalTagType.BUY_TAG.value, None) + sell_tag = latest.get(SignalTagType.SELL_TAG.value, None) logger.debug('trigger: %s (pair=%s) buy=%s sell=%s', latest['date'], pair, str(buy), str(sell)) @@ -580,8 +582,8 @@ class IStrategy(ABC, HyperStrategyMixin): current_time=datetime.now(timezone.utc), timeframe_seconds=timeframe_seconds, buy=buy): - return False, sell, buy_tag - return buy, sell, buy_tag + return False, sell, buy_tag, sell_tag + return buy, sell, buy_tag, sell_tag def ignore_expired_candle(self, latest_date: datetime, current_time: datetime, timeframe_seconds: int, buy: bool): diff --git a/freqtrade/user_data/hyperopts/RuleNOTANDoptimizer.py b/freqtrade/user_data/hyperopts/RuleNOTANDoptimizer.py new file mode 100644 index 000000000..f720b59ca --- /dev/null +++ b/freqtrade/user_data/hyperopts/RuleNOTANDoptimizer.py @@ -0,0 +1,1203 @@ +# pragma pylint: disable=missing-docstring, invalid-name, pointless-string-statement +# isort: skip_file +# --- Do not remove these libs --- +from functools import reduce +from typing import Any, Callable, Dict, List + +import numpy as np # noqa +import pandas as pd # noqa +from pandas import DataFrame +from skopt.space import Categorical, Dimension,Integer , Real # noqa +from freqtrade.optimize.space import SKDecimal +from freqtrade.optimize.hyperopt_interface import IHyperOpt + +# -------------------------------- +# Add your lib to import here +import talib.abstract as ta # noqa +import freqtrade.vendor.qtpylib.indicators as qtpylib + +##PYCHARM +import sys +sys.path.append(r"/freqtrade/user_data/strategies") + + +# ##HYPEROPT +# import sys,os +# file_dir = os.path.dirname(__file__) +# sys.path.append(file_dir) + + +from z_buyer_mid_volatility import mid_volatility_buyer +from z_seller_mid_volatility import mid_volatility_seller +from z_COMMON_FUNCTIONS import MID_VOLATILITY + + + + +class RuleOptimizer15min(IHyperOpt): + """ + This is a sample hyperopt to inspire you. + Feel free to customize it. + + More information in the documentation: https://www.freqtrade.io/en/latest/hyperopt/ + + You should: + - Rename the class name to some unique name. + - Add any methods you want to build your hyperopt. + - Add any lib you need to build your hyperopt. + + You must keep: + - The prototypes for the methods: populate_indicators, indicator_space, buy_strategy_generator. + + The methods roi_space, generate_roi_table and stoploss_space are not required + and are provided by default. + However, you may override them if you need the + 'roi' and the 'stoploss' spaces that differ from the defaults offered by Freqtrade. + + This sample illustrates how to override these methods. + """ + + + @staticmethod + def buy_strategy_generator(params: Dict[str, Any]) -> Callable: + """ + Define the buy strategy parameters to be used by hyperopt + """ + def populate_buy_trend(dataframe: DataFrame, metadata: dict) -> DataFrame: + """ + Buy strategy Hyperopt will build and use + """ + conditions = [] + + + +#--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +#--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + + ##MAIN SELECTORS + +#-------------------- + + ##VOLATILITY + + conditions.append(dataframe['vol_mid'] > 0 ) + + # conditions.append((dataframe['vol_low'] > 0) |(dataframe['vol_mid'] > 0) ) + + # conditions.append((dataframe['vol_high'] > 0) |(dataframe['vol_mid'] > 0) ) + + +#-------------------- + + + ##PICKS TREND COMBO + + conditions.append( + + (dataframe['downtrend'] >= params['main_1_trend_strength']) + |#OR & + (dataframe['downtrendsmall'] >= params['main_2_trend_strength']) + + ) + + ##UPTREND + #conditions.append(dataframe['uptrend'] >= params['main_1_trend_strength']) + ##DOWNTREND + #conditions.append(dataframe['downtrend'] >= params['main_1_trend_strength']) + ##NOTREND + #conditions.append((dataframe['uptrend'] <1)&(dataframe['downtrend'] <1)) + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + ##ABOVE / BELOW THRESHOLDS + + #RSI ABOVE + if 'include_sell_ab_9_rsi_above_value' in params and params['include_sell_ab_9_rsi_above_value']: + conditions.append(dataframe['rsi'] > params['sell_ab_9_rsi_above_value']) + #RSI RECENT PIT 5 + if 'include_sell_ab_10_rsi_recent_pit_2_value' in params and params['include_sell_ab_10_rsi_recent_pit_2_value']: + conditions.append(dataframe['rsi'].rolling(2).min() < params['sell_ab_10_rsi_recent_pit_2_value']) + #RSI RECENT PIT 12 + if 'include_sell_ab_11_rsi_recent_pit_4_value' in params and params['include_sell_ab_11_rsi_recent_pit_4_value']: + conditions.append(dataframe['rsi'].rolling(4).min() < params['sell_ab_11_rsi_recent_pit_4_value']) + #RSI5 BELOW + if 'include_sell_ab_12_rsi5_above_value' in params and params['include_sell_ab_12_rsi5_above_value']: + conditions.append(dataframe['rsi5'] > params['sell_ab_12_rsi5_above_value']) + #RSI50 BELOW + if 'include_sell_ab_13_rsi50_above_value' in params and params['include_sell_ab_13_rsi50_above_value']: + conditions.append(dataframe['rsi50'] > params['sell_ab_13_rsi50_above_value']) + +#----------------------- + + #ROC BELOW + if 'include_sell_ab_14_roc_above_value' in params and params['include_sell_ab_14_roc_above_value']: + conditions.append(dataframe['roc'] > (params['sell_ab_14_roc_above_value']/2)) + #ROC50 BELOW + if 'include_sell_ab_15_roc50_above_value' in params and params['include_sell_ab_15_roc50_above_value']: + conditions.append(dataframe['roc50'] > (params['sell_ab_15_roc50_above_value'])) + #ROC2 BELOW + if 'include_sell_ab_16_roc2_above_value' in params and params['include_sell_ab_16_roc2_above_value']: + conditions.append(dataframe['roc2'] > (params['sell_ab_16_roc2_above_value']/2)) + +#----------------------- + + #PPO5 BELOW + if 'include_sell_ab_17_ppo5_above_value' in params and params['include_sell_ab_17_ppo5_above_value']: + conditions.append(dataframe['ppo5'] > (params['sell_ab_17_ppo5_above_value']/2)) + #PPO10 BELOW + if 'include_sell_ab_18_ppo10_above_value' in params and params['include_sell_ab_18_ppo10_above_value']: + conditions.append(dataframe['ppo10'] > (params['sell_ab_18_ppo10_above_value']/2)) + #PPO25 BELOW + if 'include_sell_ab_19_ppo25_above_value' in params and params['include_sell_ab_19_ppo25_above_value']: + conditions.append(dataframe['ppo25'] > (params['sell_ab_19_ppo25_above_value']/2)) + + #PPO50 BELOW + if 'include_sell_ab_20_ppo50_above_value' in params and params['include_sell_ab_20_ppo50_above_value']: + conditions.append(dataframe['ppo50'] > (params['sell_ab_20_ppo50_above_value']/2)) + #PPO100 BELOW + if 'include_sell_ab_21_ppo100_above_value' in params and params['include_sell_ab_21_ppo100_above_value']: + conditions.append(dataframe['ppo100'] > (params['sell_ab_21_ppo100_above_value'])) + #PPO200 BELOW + if 'include_sell_ab_22_ppo200_above_value' in params and params['include_sell_ab_22_ppo200_above_value']: + conditions.append(dataframe['ppo200'] > (params['sell_ab_22_ppo200_above_value'])) + #PPO500 BELOW + if 'include_sell_ab_23_ppo500_above_value' in params and params['include_sell_ab_23_ppo500_above_value']: + conditions.append(dataframe['ppo500'] > (params['sell_ab_23_ppo500_above_value']*2)) + + ##USE AT A LATER STEP + + #convsmall BELOW + if 'include_sell_ab_24_convsmall_above_value' in params and params['include_sell_ab_24_convsmall_above_value']: + conditions.append(dataframe['convsmall'] > (params['sell_ab_24_convsmall_above_value']/2)) + #convmedium BELOW + if 'include_sell_ab_25_convmedium_above_value' in params and params['include_sell_ab_25_convmedium_above_value']: + conditions.append(dataframe['convmedium'] >(params['sell_ab_25_convmedium_above_value'])) + #convlarge BELOW + if 'include_sell_ab_26_convlarge_above_value' in params and params['include_sell_ab_26_convlarge_above_value']: + conditions.append(dataframe['convlarge'] > (params['sell_ab_26_convlarge_above_value'])) + #convultra BELOW + if 'include_sell_ab_27_convultra_above_value' in params and params['include_sell_ab_27_convultra_above_value']: + conditions.append(dataframe['convultra'] > (params['sell_ab_27_convultra_above_value']/2)) + #convdist BELOW + if 'include_sell_ab_28_convdist_above_value' in params and params['include_sell_ab_28_convdist_above_value']: + conditions.append(dataframe['convdist'] > (params['sell_ab_28_convdist_above_value'])) + + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + ##SMA'S GOING DOWN + + if 'sell_down_0a_sma3' in params and params['sell_down_0a_sma3']: + conditions.append((dataframe['sma3'].shift(1) >dataframe['sma3'])) + if 'sell_down_0b_sma5' in params and params['sell_down_0b_sma5']: + conditions.append((dataframe['sma5'].shift(1) >dataframe['sma5'])) + if 'sell_down_1_sma10' in params and params['sell_down_1_sma10']: + conditions.append((dataframe['sma10'].shift(1) >dataframe['sma10'])) + if 'sell_down_2_sma25' in params and params['sell_down_2_sma25']: + conditions.append((dataframe['sma25'].shift(1) >dataframe['sma25'])) + if 'sell_down_3_sma50' in params and params['sell_down_3_sma50']: + conditions.append((dataframe['sma50'].shift(2) >dataframe['sma50'])) + if 'sell_down_4_sma100' in params and params['sell_down_4_sma100']: + conditions.append((dataframe['sma100'].shift(3) >dataframe['sma100'])) + if 'sell_down_5_sma200' in params and params['sell_down_5_sma200']: + conditions.append((dataframe['sma200'].shift(4) >dataframe['sma200'])) + + if 'sell_down_6_sma400' in params and params['sell_down_6_sma400']: + conditions.append((dataframe['sma400'].shift(4) >dataframe['sma400'])) + if 'sell_down_7_sma10k' in params and params['sell_down_7_sma10k']: + conditions.append((dataframe['sma10k'].shift(5) >dataframe['sma10k'])) + # if 'sell_down_8_sma20k' in params and params['sell_down_8_sma20k']: + # conditions.append((dataframe['sma20k'].shift(5) >dataframe['sma20k'])) + # if 'sell_down_9_sma30k' in params and params['sell_down_9_sma30k']: + # conditions.append((dataframe['sma30k'].shift(5) >dataframe['sma30k'])) + + if 'sell_down_10_convsmall' in params and params['sell_down_10_convsmall']: + conditions.append((dataframe['convsmall'].shift(2) >dataframe['convsmall'])) + if 'sell_down_11_convmedium' in params and params['sell_down_11_convmedium']: + conditions.append((dataframe['convmedium'].shift(3) >dataframe['convmedium'])) + if 'sell_down_12_convlarge' in params and params['sell_down_12_convlarge']: + conditions.append((dataframe['convlarge'].shift(4) >dataframe['convlarge'])) + if 'sell_down_13_convultra' in params and params['sell_down_13_convultra']: + conditions.append((dataframe['convultra'].shift(4) >dataframe['convultra'])) + if 'sell_down_14_convdist' in params and params['sell_down_14_convdist']: + conditions.append((dataframe['convdist'].shift(4) >dataframe['convdist'])) + + if 'sell_down_15_vol50' in params and params['sell_down_15_vol50']: + conditions.append((dataframe['vol50'].shift(2) >dataframe['vol50'])) + if 'sell_down_16_vol100' in params and params['sell_down_16_vol100']: + conditions.append((dataframe['vol100'].shift(3) >dataframe['vol100'])) + if 'sell_down_17_vol175' in params and params['sell_down_17_vol175']: + conditions.append((dataframe['vol175'].shift(4) >dataframe['vol175'])) + if 'sell_down_18_vol250' in params and params['sell_down_18_vol250']: + conditions.append((dataframe['vol250'].shift(4) >dataframe['vol250'])) + if 'sell_down_19_vol500' in params and params['sell_down_19_vol500']: + conditions.append((dataframe['vol500'].shift(4) >dataframe['vol500'])) + + if 'sell_down_20_vol1000' in params and params['sell_down_20_vol1000']: + conditions.append((dataframe['vol1000'].shift(4) >dataframe['vol1000'])) + if 'sell_down_21_vol100mean' in params and params['sell_down_21_vol100mean']: + conditions.append((dataframe['vol100mean'].shift(4) >dataframe['vol100mean'])) + if 'sell_down_22_vol250mean' in params and params['sell_down_22_vol250mean']: + conditions.append((dataframe['vol250mean'].shift(4) >dataframe['vol250mean'])) + + if 'up_20_conv3' in params and params['up_20_conv3']: + conditions.append(((dataframe['conv3'].shift(25) < dataframe['conv3'])&(dataframe['conv3'].shift(50) < dataframe['conv3']))) + if 'up_21_vol5' in params and params['up_21_vol5']: + conditions.append(((dataframe['vol5'].shift(25) < dataframe['vol5'])&(dataframe['vol5'].shift(50) < dataframe['vol5']))) + if 'up_22_vol5ultra' in params and params['up_22_vol5ultra']: + conditions.append(((dataframe['vol5ultra'].shift(25) < dataframe['vol5ultra'])&(dataframe['vol5ultra'].shift(50) < dataframe['vol5ultra']))) + if 'up_23_vol1ultra' in params and params['up_23_vol1ultra']: + conditions.append(((dataframe['vol1ultra'].shift(25) < dataframe['vol1ultra'])& (dataframe['vol1ultra'].shift(50) < dataframe['vol1ultra']))) + if 'up_24_vol1' in params and params['up_24_vol1']: + conditions.append(((dataframe['vol1'].shift(30) < dataframe['vol1'])&(dataframe['vol1'].shift(10) < dataframe['vol1']))) + if 'up_25_vol5inc24' in params and params['up_25_vol5inc24']: + conditions.append((dataframe['vol5inc24'].shift(50) < dataframe['vol5inc24'])) + + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + ##ABOVE / BELOW SMAS 1 above/ 0 None / -1 below + + #SMA10 + conditions.append((dataframe['close'] > dataframe['sma10'])|(0.5 > params['ab_1_sma10'])) + conditions.append((dataframe['close'] < dataframe['sma10'])|(-0.5 < params['ab_1_sma10'])) + #SMA25 + conditions.append((dataframe['close'] > dataframe['sma25'])|(0.5 > params['ab_2_sma25'])) + conditions.append((dataframe['close'] < dataframe['sma25'])|(-0.5 < params['ab_2_sma25'])) + #SMA50 + conditions.append((dataframe['close'] > dataframe['sma50'])|(0.5 > params['ab_3_sma50'])) + conditions.append((dataframe['close'] < dataframe['sma50'])|(-0.5 < params['ab_3_sma50'])) + + + #SMA100 + conditions.append((dataframe['close'] > dataframe['sma100'])|(0.5 > params['ab_4_sma100'])) + conditions.append((dataframe['close'] < dataframe['sma100'])|(-0.5 < params['ab_4_sma100'])) + #SMA100 + conditions.append((dataframe['close'] > dataframe['sma200'])|(0.5 > params['ab_5_sma200'])) + conditions.append((dataframe['close'] < dataframe['sma200'])|(-0.5 < params['ab_5_sma200'])) + #SMA400 + conditions.append((dataframe['close'] > dataframe['sma400'])|(0.5 > params['ab_6_sma400'])) + conditions.append((dataframe['close'] < dataframe['sma400'])|(-0.5 < params['ab_6_sma400'])) + #SMA10k + conditions.append((dataframe['close'] > dataframe['sma10k'])|(0.5 > params['ab_7_sma10k'])) + conditions.append((dataframe['close'] < dataframe['sma10k'])|(-0.5 < params['ab_7_sma10k'])) + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + ##DOWNSWINGS / UPSWINGS PPO'S + + #ppo5 UP OR DOWN (1 UP, 0 NOTHING, -1 DOWN) + conditions.append((dataframe['ppo5'].shift(2) params['sell_swings_1_ppo5_up_or_down_bool'])) + conditions.append((dataframe['ppo5'].shift(2) >dataframe['ppo5'])|(-0.5 < params['sell_swings_1_ppo5_up_or_down_bool'])) + #ppo10 + conditions.append((dataframe['ppo10'].shift(3) params['sell_swings_2_ppo10_up_or_down_bool'])) + conditions.append((dataframe['ppo10'].shift(3) >dataframe['ppo10'])|(-0.5 < params['sell_swings_2_ppo10_up_or_down_bool'])) + #ppo25 + #conditions.append((dataframe['ppo25'].shift(3) params['sell_swings_3_ppo25_up_or_down_bool'])) + conditions.append((dataframe['ppo25'].shift(3) >dataframe['ppo25'])|(-0.5 < params['sell_swings_3_ppo25_up_or_down_bool'])) + + #ppo50 + #conditions.append((dataframe['ppo50'].shift(3 params['sell_swings_4_ppo50_up_or_down_bool'])) + conditions.append((dataframe['ppo50'].shift(3) >dataframe['ppo50'])|(-0.5 < params['sell_swings_4_ppo50_up_or_down_bool'])) + #ppo100 + #conditions.append((dataframe['ppo100'].shift(4) params['sell_swings_5_ppo100_up_or_down_bool'])) + conditions.append((dataframe['ppo100'].shift(4) >dataframe['ppo100'])|(-0.5 < params['sell_swings_5_ppo100_up_or_down_bool'])) + #ppo200 + #conditions.append((dataframe['ppo200'].shift(4) params['sell_swings_6_ppo200_up_or_down_bool'])) + conditions.append((dataframe['ppo200'].shift(4) >dataframe['ppo200'])|(-0.5 < params['sell_swings_6_ppo200_up_or_down_bool'])) + + #ppo500 + #conditions.append((dataframe['ppo500'].shift(5) params['sell_swings_7_ppo500_up_or_down_bool'])) + conditions.append((dataframe['ppo500'].shift(5) >dataframe['ppo500'])|(-0.5 < params['sell_swings_7_ppo500_up_or_down_bool'])) + + #roc50 + #conditions.append((dataframe['roc50'].shift(3) params['sell_swings_8_roc50_up_or_down_bool'])) + conditions.append((dataframe['roc50'].shift(3) >dataframe['roc50'])|(-0.5 < params['sell_swings_8_roc50_up_or_down_bool'])) + #roc10 + #conditions.append((dataframe['roc10'].shift(2) params['sell_swings_9_roc10_up_or_down_bool'])) + conditions.append((dataframe['roc10'].shift(2) >dataframe['roc10'])|(-0.5 < params['sell_swings_9_roc10_up_or_down_bool'])) + + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + + ##DISTANCES/ROC + + ##FOR MORE TOP SELLERS + #dist50 MORE THAN + if 'include_sell_dist_1_dist50_more_value' in params and params['include_sell_dist_1_dist50_more_value']: + conditions.append(dataframe['dist50'] > (params['sell_dist_1_dist50_more_value'])) + #dist200 MORE THAN + if 'include_sell_dist_2_dist200_more_value' in params and params['include_sell_dist_2_dist200_more_value']: + conditions.append(dataframe['dist200'] > (params['sell_dist_2_dist200_more_value'])) + + #dist400 MORE THAN + if 'include_sell_dist_3_dist400_more_value' in params and params['include_sell_dist_3_dist400_more_value']: + conditions.append(dataframe['dist400'] > (params['sell_dist_3_dist400_more_value'])) + #dist10k MORE THAN + if 'include_sell_dist_4_dist10k_more_value' in params and params['include_sell_dist_4_dist10k_more_value']: + conditions.append(dataframe['dist10k'] > (params['sell_dist_4_dist10k_more_value'])) + + ##FOR MORE TOP SELLERS + #more =further from top bol up + #dist_upbol50 MORE THAN + if 'include_sell_dist_5_dist_upbol50_more_value' in params and params['include_sell_dist_5_dist_upbol50_more_value']: + conditions.append(dataframe['dist_upbol50'] > (params['sell_dist_5_dist_upbol50_more_value']/2)) + #dist_upbol100 MORE THAN + if 'include_sell_dist_6_dist_upbol100_more_value' in params and params['include_sell_dist_6_dist_upbol100_more_value']: + conditions.append(dataframe['dist_upbol100'] > (params['sell_dist_6_dist_upbol100_more_value']/2)) + + + ##for bot bol prevent seller + # #less =closer to bot bol + #dist_upbol50 LESS THAN. + #if 'include_sell_dist_7_dist_lowbol50_more_value' in params and params['include_sell_dist_7_dist_lowbol50_more_value']: + # conditions.append(dataframe['dist_lowbol50'] > (params['sell_dist_7_dist_lowbol50_more_value']/2)) + #dist_upbol100 LESS THAN + # if 'include_sell_dist_8_dist_lowbol100_more_value' in params and params['include_sell_dist_8_dist_lowbol100_more_value']: + # conditions.append(dataframe['dist_lowbol100'] > (params['sell_dist_8_dist_lowbol100_more_value']/2)) + + + + ##others + #roc50sma LESS THAN + if 'include_sell_dist_7_roc50sma_less_value' in params and params['include_sell_dist_7_roc50sma_less_value']: + conditions.append(dataframe['roc50sma'] < (params['sell_dist_7_roc50sma_less_value'])*2) + #roc200sma LESS THAN + if 'include_sell_dist_8_roc200sma_less_value' in params and params['include_sell_dist_8_roc200sma_less_value']: + conditions.append(dataframe['roc200sma'] < (params['sell_dist_8_roc200sma_less_value'])*2) + + ##ENABLE TO BUY AWAY FROM HIGH + # #HIGH500 TO CLOSE MORE THAN + #if 'include_sell_dist_9_high100_more_value' in params and params['include_sell_dist_9_high100_more_value']: + # conditions.append((dataframe['high100']-dataframe['close']) > ((dataframe['high100']/100* (params['sell_dist_9_high100_more_value'])) + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + + + + + + # Check that volume is not 0 + conditions.append(dataframe['volume'] > 0) + + + + + if conditions: + + + # ##ENABLE PRODUCTION BUYS + # dataframe.loc[ + # (add_production_buys_mid(dataframe)), + # 'buy'] = 1 + # + + + dataframe.loc[ + (~(reduce(lambda x, y: x & y, conditions)))&OPTIMIZED_RULE(dataframe,params), + 'buy'] = 1 + + return dataframe + + return populate_buy_trend + + @staticmethod + def indicator_space() -> List[Dimension]: + """ + Define your Hyperopt space for searching strategy parameters + """ + return [ + + +#------------------------------------------------------------------------------------------------------- + + ## CUSTOM RULE TRESHOLDS + + # SKDecimal(5.0, 7.0,decimals=1, name='sell_trigger_0_roc_ab_value'),# 5 range 5-7 or 4-7 + # SKDecimal(3.2, 4.5,decimals=1, name='sell_trigger_0_roc2_ab_value'),#3.8 range 3.2-4.5 + # Integer(77, 82, name='sell_trigger_0_rsi_ab_value'),#79 range 82-77 + # Integer(90, 95, name='sell_trigger_0_rsi5_ab_value'),#94 range 95-90 + # Integer(63, 67, name='sell_trigger_0_rsi50_ab_value'),#66 range 67-63 + +#------------------------------------------------------------------------------------------------------- + + ##MAIN + + Categorical([1, 2, 3], name='sell_main_1_trend_strength'), #BIG TREND STR + Categorical([1, 2, 3], name='sell_main_2_trend_strength'), #SMALL UPTREND STR + + + #Categorical([-1, 0, 1], name='sell_main_2_small_uptrend_downtrend'), #SMALL UPTREND ON/OFF 1 is on -1 is down + +#------------------------------------------------------------------------------------------------------- +#------------------------------------------------------------------------------------------------------- + + ##INCLUDE/EXCLUDE RULES + + Categorical([True, False], name='include_sell_ab_9_rsi_above_value'), + Categorical([True, False], name='include_sell_ab_10_rsi_recent_pit_2_value'), + Categorical([True, False], name='include_sell_ab_11_rsi_recent_pit_4_value'), + Categorical([True, False], name='include_sell_ab_12_rsi5_above_value'), + Categorical([True, False], name='include_sell_ab_13_rsi50_above_value'), + + Categorical([True, False], name='include_sell_ab_14_roc_above_value'), + Categorical([True, False], name='include_sell_ab_15_roc50_above_value'), + Categorical([True, False], name='include_sell_ab_16_roc2_above_value'), + + Categorical([True, False], name='include_sell_ab_17_ppo5_above_value'), + Categorical([True, False], name='include_sell_ab_18_ppo10_above_value'), + Categorical([True, False], name='include_sell_ab_19_ppo25_above_value'), + + Categorical([True, False], name='include_sell_ab_20_ppo50_above_value'), + Categorical([True, False], name='include_sell_ab_21_ppo100_above_value'), + Categorical([True, False], name='include_sell_ab_22_ppo200_above_value'), + Categorical([True, False], name='include_sell_ab_23_ppo500_above_value'), + + ##USE AT A LATER STEP + Categorical([True, False], name='include_sell_ab_24_convsmall_above_value'), + Categorical([True, False], name='include_sell_ab_25_convmedium_above_value'), + Categorical([True, False], name='include_sell_ab_26_convlarge_above_value'), + Categorical([True, False], name='include_sell_ab_27_convultra_above_value'), + Categorical([True, False], name='include_sell_ab_28_convdist_above_value'), + + Categorical([True, False], name='include_sell_dist_1_dist50_more_value'), + Categorical([True, False], name='include_sell_dist_2_dist200_more_value'), + Categorical([True, False], name='include_sell_dist_3_dist400_more_value'), + Categorical([True, False], name='include_sell_dist_4_dist10k_more_value'), + + Categorical([True, False], name='include_sell_dist_5_dist_upbol50_more_value'), + Categorical([True, False], name='include_sell_dist_6_dist_upbol100_more_value'), + + + # FOR MORE DOWNTREND BUYS LIKELY + # Categorical([True, False], name='include_sell_dist_7_dist_lowbol50_more_value'), + # Categorical([True, False], name='include_sell_dist_8_dist_lowbol100_more_value'), + + #MORE LIKE TRIGGERS + Categorical([True, False], name='include_sell_dist_7_roc50sma_less_value'), + Categorical([True, False], name='include_sell_dist_8_roc200sma_less_value'), + + ##below high 100 + #Categorical([True, False], name='include_sell_dist_9_high100_more_value'), + +#------------------------------------------------------------------------------------------------------- +#------------------------------------------------------------------------------------------------------- + + ##ABOVE/BELOW VALUES + + Integer(35, 82, name='sell_ab_9_rsi_above_value'), + Integer(18, 35, name='sell_ab_10_rsi_recent_pit_2_value'), + Integer(18, 35, name='sell_ab_11_rsi_recent_pit_4_value'), + Integer(70, 91, name='sell_ab_12_rsi5_above_value'), + Integer(37, 60, name='sell_ab_13_rsi50_above_value'), + + Integer(-4, 10, name='sell_ab_14_roc_above_value'),#/2 + Integer(-2, 8, name='sell_ab_15_roc50_above_value'), + Integer(-4, 8, name='sell_ab_16_roc2_above_value'),#/2 + +#-------------------------------- + + ##CHANGE DEPENDING WHAT TYPE OF SELL --> PEAK OR DOWTRENDS + Integer(-4, 6, name='sell_ab_17_ppo5_above_value'),#/2 + Integer(-4, 6, name='sell_ab_18_ppo10_above_value'),#/2 + Integer(-10, 8, name='sell_ab_19_ppo25_above_value'),#/2 + + Integer(-10, 8, name='sell_ab_20_ppo50_above_value'),#/2 + Integer(-6, 6, name='sell_ab_21_ppo100_above_value'), + Integer(-6, 6, name='sell_ab_22_ppo200_above_value'), + Integer(-4, 5, name='sell_ab_23_ppo500_above_value'),#*2 + + # ##USE AT A LATER STEP + # + # Integer(-1, 6, name='sell_ab_24_convsmall_above_value'),#/2 # extreme 12 + # Integer(-1, 4, name='sell_ab_25_convmedium_above_value'),# extreme 6 + # Integer(-1, 7, name='sell_ab_26_convlarge_above_value'),# extreme 12 + # Integer(-1, 8, name='sell_ab_27_convultra_above_value'),#/2# extreme 12 + # + # Integer(-1, 6, name='sell_ab_28_convdist_above_value'), #very extreme not useful 10+ + +#------------------------------------------------------------------------------------------------------- + + #SMA'S GOING DOWN + + Categorical([True, False], name='sell_down_0a_sma3'), + Categorical([True, False], name='sell_down_0b_sma5'), + Categorical([True, False], name='sell_down_1_sma10'), + Categorical([True, False], name='sell_down_2_sma25'), + Categorical([True, False], name='sell_down_3_sma50'), + Categorical([True, False], name='sell_down_4_sma100'), + Categorical([True, False], name='sell_down_5_sma200'), + + Categorical([True, False], name='sell_down_6_sma400'), + Categorical([True, False], name='sell_down_7_sma10k'), + # Categorical([True, False], name='sell_down_8_sma20k'), + # Categorical([True, False], name='sell_down_9_sma30k'), + + Categorical([True, False], name='sell_down_10_convsmall'), + Categorical([True, False], name='sell_down_11_convmedium'), + Categorical([True, False], name='sell_down_12_convlarge'), + Categorical([True, False], name='sell_down_13_convultra'), + Categorical([True, False], name='sell_down_14_convdist'), + + Categorical([True, False], name='sell_down_15_vol50'), + Categorical([True, False], name='sell_down_16_vol100'), + Categorical([True, False], name='sell_down_17_vol175'), + Categorical([True, False], name='sell_down_18_vol250'), + Categorical([True, False], name='sell_down_19_vol500'), + + Categorical([True, False], name='sell_down_20_vol1000'), + Categorical([True, False], name='sell_down_21_vol100mean'), + Categorical([True, False], name='sell_down_22_vol250mean'), + +#------------------------------------------------------------------------------------------------------- + + ##ABOVE/BELOW SMAS + + Categorical([-1, 0, 1], name='sell_ab_1_sma10'), + Categorical([-1, 0, 1], name='sell_ab_2_sma25'), + Categorical([-1, 0, 1], name='sell_ab_3_sma50'), + + Categorical([-1, 0, 1], name='sell_ab_4_sma100'), + Categorical([-1, 0, 1], name='sell_ab_5_sma200'), + Categorical([-1, 0, 1], name='sell_ab_6_sma400'), + Categorical([-1, 0, 1], name='sell_ab_7_sma10k'), + +#------------------------------------------------------------------------------------------------------- + + ##DOWNSWINGS / UPSWINGS PPO'S + + ##UP OR DOWN (1 UP, 0 NOTHING, -1 DOWN) + + Categorical([-1, 0, 1], name='sell_swings_1_ppo5_up_or_down_bool'), + Categorical([-1, 0, 1], name='sell_swings_2_ppo10_up_or_down_bool'), + Categorical([-1, 0], name='sell_swings_3_ppo25_up_or_down_bool'), + + Categorical([-1, 0], name='sell_swings_4_ppo50_up_or_down_bool'), + Categorical([-1, 0], name='sell_swings_5_ppo100_up_or_down_bool'), + Categorical([-1, 0], name='sell_swings_6_ppo200_up_or_down_bool'), + Categorical([-1, 0], name='sell_swings_7_ppo500_up_or_down_bool'), + + Categorical([-1, 0], name='sell_swings_8_roc50_up_or_down_bool'), + Categorical([-1, 0], name='sell_swings_9_roc10_up_or_down_bool'), + +#------------------------------------------------------------------------------------------------------- + + #DISTANCES + + #FOR MORE TOP SELLERS + Integer(-6, 14, name='sell_dist_1_dist50_more_value'), #extreme, useless -4 ,30 + Integer(-8, 20, name='sell_dist_2_dist200_more_value'), #extreme, useless -12-40 + Integer(-15, 30, name='sell_dist_3_dist400_more_value'), + Integer(-15, 35, name='sell_dist_4_dist10k_more_value'), + + #FOR MORE TOP SELLERS + Integer(-30, 25, name='sell_dist_5_dist_upbol50_more_value'),#/2 + Integer(-30, 25, name='sell_dist_6_dist_upbol100_more_value'),#/2 + + + #FOR MORE DOWNTREND BUYS LIKELY + # Integer(-8, 50, name='sell_dist_7_dist_lowbol50_more_value'),#/2 ##set to more, as in higher from lower boll + # Integer(-8, 50, name='sell_dist_8_dist_lowbol100_more_value'),#/2 ##set to more, as in higher from lower boll + + # Integer(-70, 40, name='sell_dist_7_roc50sma_more_value'),#*2 ##fix less more + # Integer(-40, 12, name='sell_dist_8_roc200sma_more_value'),#*2 + + ##below high 100 + #Integer(0, 0, name='sell_dist_9_high100_more_value'), + +#------------------------------------------------------------------------------------------------------- + + + + + ] + + + + @staticmethod + def sell_strategy_generator(params: Dict[str, Any]) -> Callable: + """ + Define the sell strategy parameters to be used by hyperopt + """ + def populate_sell_trend(dataframe: DataFrame, metadata: dict) -> DataFrame: + """ + Sell strategy Hyperopt will build and use + """ + # print(params) + conditions = [] + # GUARDS AND TRENDS + + +#--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +#--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + + ##MAIN SELECTORS + +#-------------------- + + ##VOLATILITY + + conditions.append(dataframe['vol_mid'] > 0 ) + + # conditions.append((dataframe['vol_low'] > 0) |(dataframe['vol_mid'] > 0) ) + + # conditions.append((dataframe['vol_high'] > 0) |(dataframe['vol_mid'] > 0) ) + +#-------------------- + + + ##PICKS TREND COMBO + + conditions.append( + + (dataframe['uptrend'] >= params['main_1_trend_strength']) + |#OR & + (dataframe['uptrendsmall'] >= params['main_2_trend_strength']) + + ) + + ##UPTREND + #conditions.append(dataframe['uptrend'] >= params['main_1_trend_strength']) + ##DOWNTREND + #conditions.append(dataframe['downtrend'] >= params['main_1_trend_strength']) + ##NOTREND + #conditions.append((dataframe['uptrend'] <1)&(dataframe['downtrend'] <1)) + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + ##ABOVE/BELOW VALUES + + #RSI BELOW + if 'include_ab_9_rsi_below_value' in params and params['include_ab_9_rsi_below_value']: + conditions.append(dataframe['rsi'] < params['ab_9_rsi_below_value']) + #RSI RECENT PEAK 5 + if 'include_ab_10_rsi_recent_peak_2_value' in params and params['include_ab_10_rsi_recent_peak_2_value']: + conditions.append(dataframe['rsi'].rolling(2).max() < params['ab_10_rsi_recent_peak_2_value']) + + #RSI RECENT PEAK 12 + if 'include_ab_11_rsi_recent_peak_4_value' in params and params['include_ab_11_rsi_recent_peak_4_value']: + conditions.append(dataframe['rsi'].rolling(4).max() < params['ab_11_rsi_recent_peak_4_value']) + #RSI5 BELOW + if 'include_ab_12_rsi5_below_value' in params and params['include_ab_12_rsi5_below_value']: + conditions.append(dataframe['rsi5'] < params['ab_12_rsi5_below_value']) + #RSI50 BELOW + if 'include_ab_13_rsi50_below_value' in params and params['include_ab_13_rsi50_below_value']: + conditions.append(dataframe['rsi50'] < params['ab_13_rsi50_below_value']) + +#----------------------- + + #ROC BELOW + if 'include_ab_14_roc_below_value' in params and params['include_ab_14_roc_below_value']: + conditions.append(dataframe['roc'] < (params['ab_14_roc_below_value']/2)) + #ROC50 BELOW + if 'include_ab_15_roc50_below_value' in params and params['include_ab_15_roc50_below_value']: + conditions.append(dataframe['roc50'] < (params['ab_15_roc50_below_value'])) + #ROC2 BELOW + if 'include_ab_16_roc2_below_value' in params and params['include_ab_16_roc2_below_value']: + conditions.append(dataframe['roc2'] < (params['ab_16_roc2_below_value']/2)) + +#----------------------- + + #PPO5 BELOW + if 'include_ab_17_ppo5_below_value' in params and params['include_ab_17_ppo5_below_value']: + conditions.append(dataframe['ppo5'] < (params['ab_17_ppo5_below_value']/2)) + #PPO10 BELOW + if 'include_ab_18_ppo10_below_value' in params and params['include_ab_18_ppo10_below_value']: + conditions.append(dataframe['ppo10'] < (params['ab_18_ppo10_below_value']/2)) + #PPO25 BELOW + if 'include_ab_19_ppo25_below_value' in params and params['include_ab_19_ppo25_below_value']: + conditions.append(dataframe['ppo25'] < (params['ab_19_ppo25_below_value']/2)) + + #PPO50 BELOW + if 'include_ab_20_ppo50_below_value' in params and params['include_ab_20_ppo50_below_value']: + conditions.append(dataframe['ppo50'] < (params['ab_20_ppo50_below_value']/2)) + #PPO100 BELOW + if 'include_ab_21_ppo100_below_value' in params and params['include_ab_21_ppo100_below_value']: + conditions.append(dataframe['ppo100'] < (params['ab_21_ppo100_below_value'])) + #PPO200 BELOW + if 'include_ab_22_ppo200_below_value' in params and params['include_ab_22_ppo200_below_value']: + conditions.append(dataframe['ppo200'] < (params['ab_22_ppo200_below_value'])) + #PPO500 BELOW + if 'include_ab_23_ppo500_below_value' in params and params['include_ab_23_ppo500_below_value']: + conditions.append(dataframe['ppo500'] < (params['ab_23_ppo500_below_value']*2)) + + ##USE AT A LATER STEP + + #convsmall BELOW + if 'include_ab_24_convsmall_below_value' in params and params['include_ab_24_convsmall_below_value']: + conditions.append(dataframe['convsmall'] < (params['ab_24_convsmall_below_value']/2)) + #convmedium BELOW + if 'include_ab_25_convmedium_below_value' in params and params['include_ab_25_convmedium_below_value']: + conditions.append(dataframe['convmedium'] < (params['ab_25_convmedium_below_value'])) + #convlarge BELOW + if 'include_ab_26_convlarge_below_value' in params and params['include_ab_26_convlarge_below_value']: + conditions.append(dataframe['convlarge'] < (params['ab_26_convlarge_below_value'])) + #convultra BELOW + if 'include_ab_27_convultra_below_value' in params and params['include_ab_27_convultra_below_value']: + conditions.append(dataframe['convultra'] < (params['ab_27_convultra_below_value']/2)) + #convdist BELOW + if 'include_ab_28_convdist_below_value' in params and params['include_ab_28_convdist_below_value']: + conditions.append(dataframe['convdist'] < (params['ab_28_convdist_below_value'])) + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + + ##SMA'S GOING UP + + if 'up_0a_sma3' in params and params['up_0a_sma3']: + conditions.append((dataframe['sma3'].shift(1) dataframe['sma10'])|(0.5 > params['ab_1_sma10'])) + conditions.append((dataframe['close'] < dataframe['sma10'])|(-0.5 < params['ab_1_sma10'])) + #SMA25 + conditions.append((dataframe['close'] > dataframe['sma25'])|(0.5 > params['ab_2_sma25'])) + conditions.append((dataframe['close'] < dataframe['sma25'])|(-0.5 < params['ab_2_sma25'])) + #SMA50 + conditions.append((dataframe['close'] > dataframe['sma50'])|(0.5 > params['ab_3_sma50'])) + conditions.append((dataframe['close'] < dataframe['sma50'])|(-0.5 < params['ab_3_sma50'])) + + + #SMA100 + conditions.append((dataframe['close'] > dataframe['sma100'])|(0.5 > params['ab_4_sma100'])) + conditions.append((dataframe['close'] < dataframe['sma100'])|(-0.5 < params['ab_4_sma100'])) + #SMA100 + conditions.append((dataframe['close'] > dataframe['sma200'])|(0.5 > params['ab_5_sma200'])) + conditions.append((dataframe['close'] < dataframe['sma200'])|(-0.5 < params['ab_5_sma200'])) + #SMA400 + conditions.append((dataframe['close'] > dataframe['sma400'])|(0.5 > params['ab_6_sma400'])) + conditions.append((dataframe['close'] < dataframe['sma400'])|(-0.5 < params['ab_6_sma400'])) + #SMA10k + conditions.append((dataframe['close'] > dataframe['sma10k'])|(0.5 > params['ab_7_sma10k'])) + conditions.append((dataframe['close'] < dataframe['sma10k'])|(-0.5 < params['ab_7_sma10k'])) + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + ##DOWNSWINGS / UPSWINGS PPO'S + + #ppo5 UP OR DOWN (1 UP, 0 NOTHING, -1 DOWN) + conditions.append((dataframe['ppo5'].shift(1) params['swings_1_ppo5_up_or_down_bool'])) + conditions.append((dataframe['ppo5'].shift(1) >dataframe['ppo5'])|(-0.5 < params['swings_1_ppo5_up_or_down_bool'])) + #ppo10 + conditions.append((dataframe['ppo10'].shift(1) params['swings_2_ppo10_up_or_down_bool'])) + conditions.append((dataframe['ppo10'].shift(1) >dataframe['ppo10'])|(-0.5 < params['swings_2_ppo10_up_or_down_bool'])) + #ppo25 + conditions.append((dataframe['ppo25'].shift(1) params['swings_3_ppo25_up_or_down_bool'])) + #conditions.append((dataframe['ppo25'].shift(1) >dataframe['ppo25'])|(-0.5 < params['swings_3_ppo25_up_or_down_bool'])) + + #ppo50 + conditions.append((dataframe['ppo50'].shift(2) params['swings_4_ppo50_up_or_down_bool'])) + #conditions.append((dataframe['ppo50'].shift(2) >dataframe['ppo50'])|(-0.5 < params['swings_4_ppo50_up_or_down_bool'])) + #ppo100 + conditions.append((dataframe['ppo100'].shift(3) params['swings_5_ppo100_up_or_down_bool'])) + #conditions.append((dataframe['ppo100'].shift(3) >dataframe['ppo100'])|(-0.5 < params['swings_5_ppo100_up_or_down_bool'])) + #ppo200 + conditions.append((dataframe['ppo200'].shift(4) params['swings_6_ppo200_up_or_down_bool'])) + #conditions.append((dataframe['ppo200'].shift(4) >dataframe['ppo200'])|(-0.5 < params['swings_6_ppo200_up_or_down_bool'])) + #ppo500 + conditions.append((dataframe['ppo500'].shift(5) params['swings_7_ppo500_up_or_down_bool'])) + #conditions.append((dataframe['ppo500'].shift(5) >dataframe['ppo500'])|(-0.5 < params['swings_7_ppo500_up_or_down_bool'])) + + #roc50 + conditions.append((dataframe['roc50'].shift(2) params['swings_8_roc50_up_or_down_bool'])) + #conditions.append((dataframe['roc50'].shift(3) >dataframe['roc50'])|(-0.5 < params['swings_8_roc50_up_or_down_bool'])) + #roc10 + conditions.append((dataframe['roc10'].shift(1) params['swings_9_roc10_up_or_down_bool'])) + #conditions.append((dataframe['roc10'].shift(2) >dataframe['roc10'])|(-0.5 < params['swings_9_roc10_up_or_down_bool'])) + + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + + ##DISTANCES/ROC + + #dist50 LESS THAN + if 'include_dist_1_dist50_less_value' in params and params['include_dist_1_dist50_less_value']: + conditions.append(dataframe['dist50'] < (params['dist_1_dist50_less_value'])) + #dist200 LESS THAN + if 'include_dist_2_dist200_less_value' in params and params['include_dist_2_dist200_less_value']: + conditions.append(dataframe['dist200'] < (params['dist_2_dist200_less_value'])) + + #dist400 LESS THAN + if 'include_dist_3_dist400_less_value' in params and params['include_dist_3_dist400_less_value']: + conditions.append(dataframe['dist400'] < (params['dist_3_dist400_less_value'])) + #dist10k LESS THAN + if 'include_dist_4_dist10k_less_value' in params and params['include_dist_4_dist10k_less_value']: + conditions.append(dataframe['dist10k'] < (params['dist_4_dist10k_less_value'])) + + #less =further from top bol + #dist_upbol50 LESS THAN + if 'include_dist_5_dist_upbol50_less_value' in params and params['include_dist_5_dist_upbol50_less_value']: + conditions.append(dataframe['dist_upbol50'] < (params['dist_5_dist_upbol50_less_value']/2)) + #dist_upbol100 LESS THAN + if 'include_dist_6_dist_upbol100_less_value' in params and params['include_dist_6_dist_upbol100_less_value']: + conditions.append(dataframe['dist_upbol100'] < (params['dist_6_dist_upbol100_less_value']/2)) + + # #less =closer to bot bol + # #dist_upbol50 LESS THAN + # if 'include_dist_7_dist_lowbol50_less_value' in params and params['include_dist_7_dist_lowbol50_less_value']: + # conditions.append(dataframe['dist_lowbol50'] < (params['dist_7_dist_lowbol50_less_value']/2)) + # #dist_upbol100 LESS THAN + # if 'include_dist_8_dist_lowbol100_less_value' in params and params['include_dist_8_dist_lowbol100_less_value']: + # conditions.append(dataframe['dist_lowbol100'] < (params['dist_8_dist_lowbol100_less_value']/2)) + + + + #others + ##roc50sma MORE THAN + if 'include_dist_7_roc50sma_less_value' in params and params['include_dist_7_roc50sma_less_value']: + conditions.append(dataframe['roc50sma'] < (params['dist_7_roc50sma_less_value']*2)) + #roc200sma MORE THAN + if 'include_dist_8_roc200sma_less_value' in params and params['include_dist_8_roc200sma_less_value']: + conditions.append(dataframe['roc200sma'] < (params['dist_8_roc200sma_less_value']*2)) + + ##ENABLE TO BUY AWAY FROM HIGH + # #HIGH500 TO CLOSE MORE THAN + #if 'include_dist_9_high100_more_value' in params and params['include_dist_9_high100_more_value']: + # conditions.append((dataframe['high100']-dataframe['close']) > ((dataframe['high100']/100* (params['dist_9_high100_more_value'])) + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + + + + + # Check that volume is not 0 + conditions.append(dataframe['volume'] > 0) + + if conditions: + + + # ##ENABLE SELLS ALWAYS ON OTHER VOLATILITYS + # dataframe.loc[ + # ((dataframe['vol_low'] > 0) |(dataframe['vol_high'] > 0) ), + # 'sell'] = 1 + + + # ##ENABLE PRODUCTION SELLS + # dataframe.loc[ + # (add_production_sells_low(dataframe)), + # 'sell'] = 1 + # + + dataframe.loc[ + (~(reduce(lambda x, y: x & y, conditions)))&OPTIMIZED_RULE(dataframe,params), + 'sell'] = 1 + + return dataframe + + return populate_sell_trend + + @staticmethod + def sell_indicator_space() -> List[Dimension]: + """ + Define your Hyperopt space for searching sell strategy parameters + """ + return [ + + +#------------------------------------------------------------------------------------------------------- + + ## CUSTOM RULE TRESHOLDS + + # SKDecimal(5.0, 7.0,decimals=1, name='sell_trigger_0_roc_ab_value'),# 5 range 5-7 or 4-7 + # SKDecimal(3.2, 4.5,decimals=1, name='sell_trigger_0_roc2_ab_value'),#3.8 range 3.2-4.5 + # Integer(77, 82, name='sell_trigger_0_rsi_ab_value'),#79 range 82-77 + # Integer(90, 95, name='sell_trigger_0_rsi5_ab_value'),#94 range 95-90 + # Integer(63, 67, name='sell_trigger_0_rsi50_ab_value'),#66 range 67-63 + +#------------------------------------------------------------------------------------------------------- + + ##MAIN + + Categorical([1, 2, 3], name='main_1_trend_strength'), #UPTREND STR + Categorical([1, 2, 3], name='main_2_trend_strength'), #SMALL UPTREND STR + + + #Categorical([-1, 0, 1], name='main_2_small_uptrend_downtrend'), #SMALL UPTREND ON/OFF 1 is on -1 is down + +#------------------------------------------------------------------------------------------------------- + + ##INCLUDE/EXCLUDE RULES + + Categorical([True, False], name='include_ab_9_rsi_below_value'), + Categorical([True, False], name='include_ab_10_rsi_recent_peak_2_value'), + Categorical([True, False], name='include_ab_11_rsi_recent_peak_4_value'), + Categorical([True, False], name='include_ab_12_rsi5_below_value'), + Categorical([True, False], name='include_ab_13_rsi50_below_value'), + + Categorical([True, False], name='include_ab_14_roc_below_value'), + Categorical([True, False], name='include_ab_15_roc50_below_value'), + Categorical([True, False], name='include_ab_16_roc2_below_value'), + + Categorical([True, False], name='include_ab_17_ppo5_below_value'), + Categorical([True, False], name='include_ab_18_ppo10_below_value'), + Categorical([True, False], name='include_ab_19_ppo25_below_value'), + + Categorical([True, False], name='include_ab_20_ppo50_below_value'), + Categorical([True, False], name='include_ab_21_ppo100_below_value'), + Categorical([True, False], name='include_ab_22_ppo200_below_value'), + Categorical([True, False], name='include_ab_23_ppo500_below_value'), + + ##USE AT A LATER STEP + Categorical([True, False], name='include_ab_24_convsmall_below_value'), + Categorical([True, False], name='include_ab_25_convmedium_below_value'), + Categorical([True, False], name='include_ab_26_convlarge_below_value'), + Categorical([True, False], name='include_ab_27_convultra_below_value'), + + Categorical([True, False], name='include_ab_28_convdist_below_value'), + + Categorical([True, False], name='include_dist_1_dist50_less_value'), + Categorical([True, False], name='include_dist_2_dist200_less_value'), + Categorical([True, False], name='include_dist_3_dist400_less_value'), + Categorical([True, False], name='include_dist_4_dist10k_less_value'), + + Categorical([True, False], name='include_dist_5_dist_upbol50_less_value'), + Categorical([True, False], name='include_dist_6_dist_upbol100_less_value'), + + + # FOR MORE DOWNTREND BUYS LIKELY + # Categorical([True, False], name='include_dist_7_dist_lowbol50_less_value'), + # Categorical([True, False], name='include_dist_8_dist_lowbol100_less_value'), + + #MORE LIKE TRIGGERS + Categorical([True, False], name='include_dist_7_roc50sma_less_value'), + Categorical([True, False], name='include_dist_8_roc200sma_less_value'), + + ##below high 100 + #Categorical([True, False], name='include_dist_9_high100_more_value'), + + + +#------------------------------------------------------------------------------------------------------- + + ##ABOVE/BELOW VALUES + + Integer(35, 75, name='ab_9_rsi_below_value'), + Integer(60, 82, name='ab_10_rsi_recent_peak_2_value'), + Integer(60, 82, name='ab_11_rsi_recent_peak_4_value'), + Integer(40, 101, name='ab_12_rsi5_below_value'), + Integer(37, 73, name='ab_13_rsi50_below_value'), + + Integer(-6, 10, name='ab_14_roc_below_value'),#/2 + Integer(-8, 8, name='ab_15_roc50_below_value'), + Integer(-4, 6, name='ab_16_roc2_below_value'),#/2 + +#-------------------------------- + + Integer(-4, 4, name='ab_17_ppo5_below_value'),#/2 + Integer(-5, 5, name='ab_18_ppo10_below_value'),#/2 + Integer(-8, 10, name='ab_19_ppo25_below_value'),#/2 + + Integer(-6, 7, name='ab_20_ppo50_below_value'),#/2 + Integer(-6, 7, name='ab_21_ppo100_below_value'), + Integer(-5, 7, name='ab_22_ppo200_below_value'), + Integer(-4, 4, name='ab_23_ppo500_below_value'),#*2 + + ##USE AT A LATER STEP + + Integer(1, 12, name='ab_24_convsmall_below_value'),#/2 #final + Integer(1, 6, name='ab_25_convmedium_below_value'),#final + Integer(1, 15, name='ab_26_convlarge_below_value'), #final + Integer(2, 12, name='ab_27_convultra_below_value'),#/2 #final + + Integer(2, 30, name='ab_28_convdist_below_value'), + +#------------------------------------------------------------------------------------------------------- + + #SMA'S GOING UP + + Categorical([True, False], name='up_0a_sma3'), + Categorical([True, False], name='up_0b_sma5'), + Categorical([True, False], name='up_1_sma10'), + Categorical([True, False], name='up_2_sma25'), + Categorical([True, False], name='up_3_sma50'), + Categorical([True, False], name='up_4_sma100'), + Categorical([True, False], name='up_5_sma200'), + + Categorical([True, False], name='up_6_sma400'), + Categorical([True, False], name='up_7_sma10k'), + # Categorical([True, False], name='up_8_sma20k'), + # Categorical([True, False], name='up_9_sma30k'), + + Categorical([True, False], name='up_10_convsmall'), + Categorical([True, False], name='up_11_convmedium'), + Categorical([True, False], name='up_12_convlarge'), + Categorical([True, False], name='up_13_convultra'), + Categorical([True, False], name='up_14_convdist'), + + Categorical([True, False], name='up_15_vol50'), + Categorical([True, False], name='up_16_vol100'), + Categorical([True, False], name='up_17_vol175'), + Categorical([True, False], name='up_18_vol250'), + Categorical([True, False], name='up_19_vol500'), + + Categorical([True, False], name='up_20_vol1000'), + Categorical([True, False], name='up_21_vol100mean'), + Categorical([True, False], name='up_22_vol250mean'), + +#------------------------------------------------------------------------------------------------------- + + ##ABOVE/BELOW SMAS + + Categorical([-1, 0, 1], name='ab_1_sma10'), + Categorical([-1, 0, 1], name='ab_2_sma25'), + Categorical([-1, 0, 1], name='ab_3_sma50'), + + Categorical([-1, 0, 1], name='ab_4_sma100'), + Categorical([-1, 0, 1], name='ab_5_sma200'), + Categorical([-1, 0, 1], name='ab_6_sma400'), + Categorical([-1, 0, 1], name='ab_7_sma10k'), + +#------------------------------------------------------------------------------------------------------- + + ##DOWNSWINGS / UPSWINGS PPO'S + + ##UP OR DOWN (1 UP, 0 NOTHING, -1 DOWN) + + Categorical([-1, 0, 1], name='swings_1_ppo5_up_or_down_bool'), # -1 down, 1 up , 0 off + Categorical([-1, 0, 1],name='swings_2_ppo10_up_or_down_bool'), + Categorical([-1, 0, 1], name='swings_3_ppo25_up_or_down_bool'), #1 up , 0 off + + Categorical([0, 1], name='swings_4_ppo50_up_or_down_bool'), + Categorical([0, 1], name='swings_5_ppo100_up_or_down_bool'), + Categorical([0, 1], name='swings_6_ppo200_up_or_down_bool'), + Categorical([ 0, 1],name='swings_7_ppo500_up_or_down_bool'), + + Categorical([0, 1], name='swings_8_roc50_up_or_down_bool'), + Categorical([0, 1], name='swings_9_roc10_up_or_down_bool'), + +#------------------------------------------------------------------------------------------------------- + + ##DISTANCES + + Integer(-7, 14, name='dist_1_dist50_less_value'), ##extreme 8-30 + Integer(-8, 25, name='dist_2_dist200_less_value'), ##extreme 12 -40 + Integer(-12, 35, name='dist_3_dist400_less_value'), + Integer(-12, 40, name='dist_4_dist10k_less_value'), + + Integer(-25, 30, name='dist_5_dist_upbol50_less_value'),#/2 + Integer(-25, 30, name='dist_6_dist_upbol100_less_value'),#/2 + + + # FOR MORE DOWNTREND BUYS LIKELY + # Integer(-6, 100, name='dist_7_dist_lowbol50_less_value'),#/2 + # Integer(-6, 100, name='dist_8_dist_lowbol100_less_value'),#/2 + + ##MORE LIKE TRIGGERS + # Integer(-40, 70, name='dist_7_roc50sma_less_value'),#*2 ##pretty extreme + # Integer(-12, 40, name='dist_8_roc200sma_less_value'),#*2 + + ##below high 100 + #Integer(0, 0, name='dist_9_high100_more_value'), + +#------------------------------------------------------------------------------------------------------- + + + + + + ] + + +def OPTIMIZED_RULE(dataframe,params): + return( + + (dataframe['sma100'] < dataframe['close']) + + ) + +def add_production_buys_mid(dataframe): + return( + + MID_VOLATILITY(dataframe) + & + mid_volatility_buyer(dataframe) + ) + +def add_production_sells_mid(dataframe): + return( + + MID_VOLATILITY(dataframe) + & + mid_volatility_seller(dataframe) + ) + +