diff --git a/docs/strategy_analysis_example.md b/docs/strategy_analysis_example.md index fbfce37d1..1526ea038 100644 --- a/docs/strategy_analysis_example.md +++ b/docs/strategy_analysis_example.md @@ -14,7 +14,7 @@ from freqtrade.configuration import Configuration # Initialize empty configuration object config = Configuration.from_files([]) -# Optionally, use existing configuration file +# Optionally (recommended), use existing configuration file # config = Configuration.from_files(["config.json"]) # Define some constants @@ -22,7 +22,7 @@ config["timeframe"] = "5m" # Name of the strategy class config["strategy"] = "SampleStrategy" # Location of the data -data_location = Path(config['user_data_dir'], 'data', 'binance') +data_location = config['datadir'] # Pair to analyze - Only use one pair here pair = "BTC/USDT" ``` diff --git a/docs/utils.md b/docs/utils.md index 0dd88b242..5646365e4 100644 --- a/docs/utils.md +++ b/docs/utils.md @@ -611,6 +611,26 @@ Common arguments: ``` +### Webserver mode - docker + +You can also use webserver mode via docker. +Starting a one-off container requires the configuration of the port explicitly, as ports are not exposed by default. +You can use `docker-compose run --rm -p 127.0.0.1:8080:8080 freqtrade webserver` to start a one-off container that'll be removed once you stop it. This assumes that port 8080 is still available and no other bot is running on that port. + +Alternatively, you can reconfigure the docker-compose file to have the command updated: + +``` yml + command: > + webserver + --config /freqtrade/user_data/config.json +``` + +You can now use `docker-compose up` to start the webserver. +This assumes that the configuration has a webserver enabled and configured for docker (listening port = `0.0.0.0`). + +!!! Tip + Don't forget to reset the command back to the trade command if you want to start a live or dry-run bot. + ## Show previous Backtest results Allows you to show previous backtest results. diff --git a/freqtrade/data/history/hdf5datahandler.py b/freqtrade/data/history/hdf5datahandler.py index dadc9c7e6..135d97c79 100644 --- a/freqtrade/data/history/hdf5datahandler.py +++ b/freqtrade/data/history/hdf5datahandler.py @@ -7,9 +7,8 @@ import numpy as np import pandas as pd from freqtrade.configuration import TimeRange -from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, - ListPairsWithTimeframes, TradeList) -from freqtrade.enums import CandleType, TradingMode +from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TradeList +from freqtrade.enums import CandleType from .idatahandler import IDataHandler @@ -21,29 +20,6 @@ class HDF5DataHandler(IDataHandler): _columns = DEFAULT_DATAFRAME_COLUMNS - @classmethod - def ohlcv_get_available_data( - cls, datadir: Path, trading_mode: TradingMode) -> ListPairsWithTimeframes: - """ - Returns a list of all pairs with ohlcv data available in this datadir - :param datadir: Directory to search for ohlcv files - :param trading_mode: trading-mode to be used - :return: List of Tuples of (pair, timeframe) - """ - if trading_mode == TradingMode.FUTURES: - datadir = datadir.joinpath('futures') - _tmp = [ - re.search( - cls._OHLCV_REGEX, p.name - ) for p in datadir.glob("*.h5") - ] - return [ - ( - cls.rebuild_pair_from_filename(match[1]), - cls.rebuild_timeframe_from_filename(match[2]), - CandleType.from_string(match[3]) - ) for match in _tmp if match and len(match.groups()) > 1] - @classmethod def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]: """ diff --git a/freqtrade/data/history/history_utils.py b/freqtrade/data/history/history_utils.py index c972c841c..cba1b60db 100644 --- a/freqtrade/data/history/history_utils.py +++ b/freqtrade/data/history/history_utils.py @@ -56,7 +56,7 @@ def load_pair_history(pair: str, fill_missing=fill_up_missing, drop_incomplete=drop_incomplete, startup_candles=startup_candles, - candle_type=candle_type + candle_type=candle_type, ) @@ -97,14 +97,15 @@ def load_data(datadir: Path, fill_up_missing=fill_up_missing, startup_candles=startup_candles, data_handler=data_handler, - candle_type=candle_type + candle_type=candle_type, ) if not hist.empty: result[pair] = hist else: if candle_type is CandleType.FUNDING_RATE and user_futures_funding_rate is not None: logger.warn(f"{pair} using user specified [{user_futures_funding_rate}]") - result[pair] = DataFrame(columns=["open", "close", "high", "low", "volume"]) + elif candle_type not in (CandleType.SPOT, CandleType.FUTURES): + result[pair] = DataFrame(columns=["date", "open", "close", "high", "low", "volume"]) if fail_without_data and not result: raise OperationalException("No data found. Terminating.") diff --git a/freqtrade/data/history/idatahandler.py b/freqtrade/data/history/idatahandler.py index 07dc7c763..eb2441abe 100644 --- a/freqtrade/data/history/idatahandler.py +++ b/freqtrade/data/history/idatahandler.py @@ -39,15 +39,26 @@ class IDataHandler(ABC): raise NotImplementedError() @classmethod - @abstractmethod def ohlcv_get_available_data( cls, datadir: Path, trading_mode: TradingMode) -> ListPairsWithTimeframes: """ Returns a list of all pairs with ohlcv data available in this datadir :param datadir: Directory to search for ohlcv files :param trading_mode: trading-mode to be used - :return: List of Tuples of (pair, timeframe) + :return: List of Tuples of (pair, timeframe, CandleType) """ + if trading_mode == TradingMode.FUTURES: + datadir = datadir.joinpath('futures') + _tmp = [ + re.search( + cls._OHLCV_REGEX, p.name + ) for p in datadir.glob(f"*.{cls._get_file_extension()}")] + return [ + ( + cls.rebuild_pair_from_filename(match[1]), + cls.rebuild_timeframe_from_filename(match[2]), + CandleType.from_string(match[3]) + ) for match in _tmp if match and len(match.groups()) > 1] @classmethod @abstractmethod diff --git a/freqtrade/data/history/jsondatahandler.py b/freqtrade/data/history/jsondatahandler.py index 83ec183df..a62e5e381 100644 --- a/freqtrade/data/history/jsondatahandler.py +++ b/freqtrade/data/history/jsondatahandler.py @@ -8,9 +8,9 @@ from pandas import DataFrame, read_json, to_datetime from freqtrade import misc from freqtrade.configuration import TimeRange -from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, ListPairsWithTimeframes, TradeList +from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList from freqtrade.data.converter import trades_dict_to_list -from freqtrade.enums import CandleType, TradingMode +from freqtrade.enums import CandleType from .idatahandler import IDataHandler @@ -23,28 +23,6 @@ class JsonDataHandler(IDataHandler): _use_zip = False _columns = DEFAULT_DATAFRAME_COLUMNS - @classmethod - def ohlcv_get_available_data( - cls, datadir: Path, trading_mode: TradingMode) -> ListPairsWithTimeframes: - """ - Returns a list of all pairs with ohlcv data available in this datadir - :param datadir: Directory to search for ohlcv files - :param trading_mode: trading-mode to be used - :return: List of Tuples of (pair, timeframe) - """ - if trading_mode == 'futures': - datadir = datadir.joinpath('futures') - _tmp = [ - re.search( - cls._OHLCV_REGEX, p.name - ) for p in datadir.glob(f"*.{cls._get_file_extension()}")] - return [ - ( - cls.rebuild_pair_from_filename(match[1]), - cls.rebuild_timeframe_from_filename(match[2]), - CandleType.from_string(match[3]) - ) for match in _tmp if match and len(match.groups()) > 1] - @classmethod def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]: """ diff --git a/freqtrade/exchange/exchange.py b/freqtrade/exchange/exchange.py index dbc3447be..c15481ca5 100644 --- a/freqtrade/exchange/exchange.py +++ b/freqtrade/exchange/exchange.py @@ -2377,7 +2377,8 @@ class Exchange: return try: - self._api.set_leverage(symbol=pair, leverage=leverage) + res = self._api.set_leverage(symbol=pair, leverage=leverage) + self._log_exchange_response('set_leverage', res) except ccxt.DDoSProtection as e: raise DDosProtection(e) from e except (ccxt.NetworkError, ccxt.ExchangeError) as e: @@ -2405,7 +2406,6 @@ class Exchange: if self.trading_mode in TradingMode.SPOT: return None elif ( - self.margin_mode == MarginMode.ISOLATED and self.trading_mode == TradingMode.FUTURES ): wallet_balance = (amount * open_rate) / leverage @@ -2421,7 +2421,7 @@ class Exchange: return isolated_liq else: raise OperationalException( - "Freqtrade only supports isolated futures for leverage trading") + "Freqtrade currently only supports futures for leverage trading.") def funding_fee_cutoff(self, open_date: datetime): """ @@ -2441,7 +2441,8 @@ class Exchange: return try: - self._api.set_margin_mode(margin_mode.value, pair, params) + res = self._api.set_margin_mode(margin_mode.value, pair, params) + self._log_exchange_response('set_margin_mode', res) except ccxt.DDoSProtection as e: raise DDosProtection(e) from e except (ccxt.NetworkError, ccxt.ExchangeError) as e: @@ -2599,7 +2600,7 @@ class Exchange: """ if self.trading_mode == TradingMode.SPOT: return None - elif (self.trading_mode != TradingMode.FUTURES and self.margin_mode != MarginMode.ISOLATED): + elif (self.trading_mode != TradingMode.FUTURES): raise OperationalException( f"{self.name} does not support {self.margin_mode.value} {self.trading_mode.value}") diff --git a/freqtrade/exchange/gateio.py b/freqtrade/exchange/gateio.py index 6df3425d2..c6ed0c66c 100644 --- a/freqtrade/exchange/gateio.py +++ b/freqtrade/exchange/gateio.py @@ -34,6 +34,7 @@ class Gateio(Exchange): _ft_has_futures: Dict = { "needs_trading_fees": True, + "ohlcv_volume_currency": "base", "fee_cost_in_contracts": False, # Set explicitly to false for clarity "order_props_in_contracts": ['amount', 'filled', 'remaining'], } diff --git a/freqtrade/freqai/data_kitchen.py b/freqtrade/freqai/data_kitchen.py index 85041515a..48f0bb5d1 100644 --- a/freqtrade/freqai/data_kitchen.py +++ b/freqtrade/freqai/data_kitchen.py @@ -659,6 +659,114 @@ class FreqaiDataKitchen: return + def compute_inlier_metric(self, set_='train') -> None: + """ + + Compute inlier metric from backwards distance distributions. + This metric defines how well features from a timepoint fit + into previous timepoints. + """ + + import scipy.stats as ss + + no_prev_pts = self.freqai_config["feature_parameters"]["inlier_metric_window"] + weib_pct = self.freqai_config["feature_parameters"]["inlier_metric_weibull_cutoff"] + + if set_ == 'train': + compute_df = copy.deepcopy(self.data_dictionary['train_features']) + elif set_ == 'test': + compute_df = copy.deepcopy(self.data_dictionary['test_features']) + else: + compute_df = copy.deepcopy(self.data_dictionary['prediction_features']) + + compute_df_reindexed = compute_df.reindex( + index=np.flip(compute_df.index) + ) + + pairwise = pd.DataFrame( + np.triu( + pairwise_distances(compute_df_reindexed, n_jobs=self.thread_count) + ), + columns=compute_df_reindexed.index, + index=compute_df_reindexed.index + ) + pairwise = pairwise.round(5) + + column_labels = [ + '{}{}'.format('d', i) for i in range(1, no_prev_pts + 1) + ] + distances = pd.DataFrame( + columns=column_labels, index=compute_df.index + ) + + for index in compute_df.index[no_prev_pts:]: + current_row = pairwise.loc[[index]] + current_row_no_zeros = current_row.loc[ + :, (current_row != 0).any(axis=0) + ] + distances.loc[[index]] = current_row_no_zeros.iloc[ + :, :no_prev_pts + ] + distances = distances.replace([np.inf, -np.inf], np.nan) + drop_index = pd.isnull(distances).any(1) + distances = distances[drop_index == 0] + + inliers = pd.DataFrame(index=distances.index) + for key in distances.keys(): + current_distances = distances[key].dropna() + fit_params = ss.weibull_min.fit(current_distances) + cutoff = ss.weibull_min.ppf(weib_pct, *fit_params) + is_inlier = np.where( + current_distances <= cutoff, 1, 0 + ) + df_inlier = pd.DataFrame( + {key + '_IsInlier': is_inlier}, index=distances.index + ) + inliers = pd.concat( + [inliers, df_inlier], axis=1 + ) + + inlier_metric = pd.DataFrame( + data=inliers.sum(axis=1) / no_prev_pts, + columns=['inlier_metric'], + index=compute_df.index + ) + + inlier_metric = 2 * (inlier_metric - inlier_metric.min()) / \ + (inlier_metric.max() - inlier_metric.min()) - 1 + + if set_ in ('train', 'test'): + inlier_metric = inlier_metric.iloc[no_prev_pts:] + compute_df = compute_df.iloc[no_prev_pts:] + self.remove_beginning_points_from_data_dict(set_, no_prev_pts) + self.data_dictionary[f'{set_}_features'] = pd.concat( + [compute_df, inlier_metric], axis=1) + else: + self.data_dictionary['prediction_features'] = pd.concat( + [compute_df, inlier_metric], axis=1) + self.data_dictionary['prediction_features'].fillna(0, inplace=True) + + return None + + def remove_beginning_points_from_data_dict(self, set_='train', no_prev_pts: int = 10): + features = self.data_dictionary[f'{set_}_features'] + weights = self.data_dictionary[f'{set_}_weights'] + labels = self.data_dictionary[f'{set_}_labels'] + self.data_dictionary[f'{set_}_weights'] = weights[no_prev_pts:] + self.data_dictionary[f'{set_}_features'] = features.iloc[no_prev_pts:] + self.data_dictionary[f'{set_}_labels'] = labels.iloc[no_prev_pts:] + + def add_noise_to_training_features(self) -> None: + """ + Add noise to train features to reduce the risk of overfitting. + """ + mu = 0 # no shift + sigma = self.freqai_config["feature_parameters"]["noise_standard_deviation"] + compute_df = self.data_dictionary['train_features'] + noise = np.random.normal(mu, sigma, [compute_df.shape[0], compute_df.shape[1]]) + self.data_dictionary['train_features'] += noise + return + def find_features(self, dataframe: DataFrame) -> None: """ Find features in the strategy provided dataframe diff --git a/freqtrade/freqai/freqai_interface.py b/freqtrade/freqai/freqai_interface.py index 49e4ce5c3..07303b49f 100644 --- a/freqtrade/freqai/freqai_interface.py +++ b/freqtrade/freqai/freqai_interface.py @@ -66,7 +66,6 @@ class IFreqaiModel(ABC): "data_split_parameters", {}) self.model_training_parameters: Dict[str, Any] = config.get("freqai", {}).get( "model_training_parameters", {}) - self.feature_parameters = config.get("freqai", {}).get("feature_parameters") self.retrain = False self.first = True self.set_full_path() @@ -74,11 +73,14 @@ class IFreqaiModel(ABC): self.dd = FreqaiDataDrawer(Path(self.full_path), self.config, self.follow_mode) self.identifier: str = self.freqai_info.get("identifier", "no_id_provided") self.scanning = False + self.ft_params = self.freqai_info["feature_parameters"] self.keras: bool = self.freqai_info.get("keras", False) - if self.keras and self.freqai_info.get("feature_parameters", {}).get("DI_threshold", 0): - self.freqai_info["feature_parameters"]["DI_threshold"] = 0 + if self.keras and self.ft_params.get("DI_threshold", 0): + self.ft_params["DI_threshold"] = 0 logger.warning("DI threshold is not configured for Keras models yet. Deactivating.") self.CONV_WIDTH = self.freqai_info.get("conv_width", 2) + if self.ft_params.get("inlier_metric_window", 0): + self.CONV_WIDTH = self.ft_params.get("inlier_metric_window", 0) * 2 self.pair_it = 0 self.total_pairs = len(self.config.get("exchange", {}).get("pair_whitelist")) self.last_trade_database_summary: DataFrame = {} @@ -383,24 +385,25 @@ class IFreqaiModel(ABC): def data_cleaning_train(self, dk: FreqaiDataKitchen) -> None: """ - Base data cleaning method for train - Any function inside this method should drop training data points from the filtered_dataframe - based on user decided logic. See FreqaiDataKitchen::use_SVM_to_remove_outliers() for an - example of how outlier data points are dropped from the dataframe used for training. + Base data cleaning method for train. + Functions here improve/modify the input data by identifying outliers, + computing additional metrics, adding noise, reducing dimensionality etc. """ - if self.freqai_info["feature_parameters"].get( + ft_params = self.freqai_info["feature_parameters"] + + if ft_params.get( "principal_component_analysis", False ): dk.principal_component_analysis() - if self.freqai_info["feature_parameters"].get("use_SVM_to_remove_outliers", False): + if ft_params.get("use_SVM_to_remove_outliers", False): dk.use_SVM_to_remove_outliers(predict=False) - if self.freqai_info["feature_parameters"].get("DI_threshold", 0): + if ft_params.get("DI_threshold", 0): dk.data["avg_mean_dist"] = dk.compute_distances() - if self.freqai_info["feature_parameters"].get("use_DBSCAN_to_remove_outliers", False): + if ft_params.get("use_DBSCAN_to_remove_outliers", False): if dk.pair in self.dd.old_DBSCAN_eps: eps = self.dd.old_DBSCAN_eps[dk.pair] else: @@ -408,29 +411,36 @@ class IFreqaiModel(ABC): dk.use_DBSCAN_to_remove_outliers(predict=False, eps=eps) self.dd.old_DBSCAN_eps[dk.pair] = dk.data['DBSCAN_eps'] + if ft_params.get('inlier_metric_window', 0): + dk.compute_inlier_metric(set_='train') + if self.freqai_info["data_split_parameters"]["test_size"] > 0: + dk.compute_inlier_metric(set_='test') + + if self.freqai_info["feature_parameters"].get('noise_standard_deviation', 0): + dk.add_noise_to_training_features() + def data_cleaning_predict(self, dk: FreqaiDataKitchen, dataframe: DataFrame) -> None: """ Base data cleaning method for predict. - These functions each modify dk.do_predict, which is a dataframe with equal length - to the number of candles coming from and returning to the strategy. Inside do_predict, - 1 allows prediction and < 0 signals to the strategy that the model is not confident in - the prediction. - See FreqaiDataKitchen::remove_outliers() for an example - of how the do_predict vector is modified. do_predict is ultimately passed back to strategy - for buy signals. + Functions here are complementary to the functions of data_cleaning_train. """ - if self.freqai_info["feature_parameters"].get( + ft_params = self.freqai_info["feature_parameters"] + + if ft_params.get('inlier_metric_window', 0): + dk.compute_inlier_metric(set_='predict') + + if ft_params.get( "principal_component_analysis", False ): dk.pca_transform(dataframe) - if self.freqai_info["feature_parameters"].get("use_SVM_to_remove_outliers", False): + if ft_params.get("use_SVM_to_remove_outliers", False): dk.use_SVM_to_remove_outliers(predict=True) - if self.freqai_info["feature_parameters"].get("DI_threshold", 0): + if ft_params.get("DI_threshold", 0): dk.check_if_pred_in_training_spaces() - if self.freqai_info["feature_parameters"].get("use_DBSCAN_to_remove_outliers", False): + if ft_params.get("use_DBSCAN_to_remove_outliers", False): dk.use_DBSCAN_to_remove_outliers(predict=True) def model_exists( diff --git a/freqtrade/freqtradebot.py b/freqtrade/freqtradebot.py index a214efd76..4e3af64ea 100644 --- a/freqtrade/freqtradebot.py +++ b/freqtrade/freqtradebot.py @@ -418,7 +418,7 @@ class FreqtradeBot(LoggingMixin): whitelist = copy.deepcopy(self.active_pair_whitelist) if not whitelist: - logger.info("Active pair whitelist is empty.") + self.log_once("Active pair whitelist is empty.", logger.info) return trades_created # Remove pairs for currently opened trades from the whitelist for trade in Trade.get_open_trades(): @@ -427,8 +427,8 @@ class FreqtradeBot(LoggingMixin): logger.debug('Ignoring %s in pair whitelist', trade.pair) if not whitelist: - logger.info("No currency pair in active pair whitelist, " - "but checking to exit open trades.") + self.log_once("No currency pair in active pair whitelist, " + "but checking to exit open trades.", logger.info) return trades_created if PairLocks.is_global_lock(side='*'): # This only checks for total locks (both sides). diff --git a/freqtrade/persistence/migrations.py b/freqtrade/persistence/migrations.py index e54675f16..311554359 100644 --- a/freqtrade/persistence/migrations.py +++ b/freqtrade/persistence/migrations.py @@ -307,7 +307,9 @@ def check_migrate(engine, decl_base, previous_tables) -> None: # Migrates both trades and orders table! # if ('orders' not in previous_tables # or not has_column(cols_orders, 'stop_price')): + migrating = False if not has_column(cols_trades, 'precision_mode'): + migrating = True logger.info(f"Running database migration for trades - " f"backup: {table_back_name}, {order_table_bak_name}") migrate_trades_and_orders_table( @@ -315,6 +317,7 @@ def check_migrate(engine, decl_base, previous_tables) -> None: order_table_bak_name, cols_orders) if not has_column(cols_pairlocks, 'side'): + migrating = True logger.info(f"Running database migration for pairlocks - " f"backup: {pairlock_table_bak_name}") @@ -329,3 +332,6 @@ def check_migrate(engine, decl_base, previous_tables) -> None: set_sqlite_to_wal(engine) fix_old_dry_orders(engine) + + if migrating: + logger.info("Database migration finished.") diff --git a/freqtrade/persistence/models.py b/freqtrade/persistence/models.py index f0fa05343..7f851322e 100644 --- a/freqtrade/persistence/models.py +++ b/freqtrade/persistence/models.py @@ -53,7 +53,7 @@ def init_db(db_url: str) -> None: # https://docs.sqlalchemy.org/en/13/orm/contextual.html#thread-local-scope # Scoped sessions proxy requests to the appropriate thread-local session. # We should use the scoped_session object - not a seperately initialized version - Trade._session = scoped_session(sessionmaker(bind=engine, autoflush=True)) + Trade._session = scoped_session(sessionmaker(bind=engine, autoflush=False)) Trade.query = Trade._session.query_property() Order.query = Trade._session.query_property() PairLock.query = Trade._session.query_property() diff --git a/freqtrade/plugins/pairlist/PrecisionFilter.py b/freqtrade/plugins/pairlist/PrecisionFilter.py index 521f38635..dcd153d8e 100644 --- a/freqtrade/plugins/pairlist/PrecisionFilter.py +++ b/freqtrade/plugins/pairlist/PrecisionFilter.py @@ -51,6 +51,11 @@ class PrecisionFilter(IPairList): :param ticker: ticker dict as returned from ccxt.fetch_tickers() :return: True if the pair can stay, false if it should be removed """ + if ticker.get('last', None) is None: + self.log_once(f"Removed {ticker['symbol']} from whitelist, because " + "ticker['last'] is empty (Usually no trade in the last 24h).", + logger.info) + return False stop_price = ticker['last'] * self._stoploss # Adjust stop-prices to precision diff --git a/freqtrade/templates/strategy_analysis_example.ipynb b/freqtrade/templates/strategy_analysis_example.ipynb index a7430c225..77444a023 100644 --- a/freqtrade/templates/strategy_analysis_example.ipynb +++ b/freqtrade/templates/strategy_analysis_example.ipynb @@ -30,7 +30,7 @@ "\n", "# Initialize empty configuration object\n", "config = Configuration.from_files([])\n", - "# Optionally, use existing configuration file\n", + "# Optionally (recommended), use existing configuration file\n", "# config = Configuration.from_files([\"config.json\"])\n", "\n", "# Define some constants\n", @@ -38,7 +38,7 @@ "# Name of the strategy class\n", "config[\"strategy\"] = \"SampleStrategy\"\n", "# Location of the data\n", - "data_location = Path(config['user_data_dir'], 'data', 'binance')\n", + "data_location = config['datadir']\n", "# Pair to analyze - Only use one pair here\n", "pair = \"BTC/USDT\"" ] @@ -365,7 +365,7 @@ "metadata": { "file_extension": ".py", "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3.9.7 64-bit ('trade_397')", "language": "python", "name": "python3" }, @@ -379,7 +379,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.9.7" }, "mimetype": "text/x-python", "name": "python", @@ -427,7 +427,12 @@ ], "window_display": false }, - "version": 3 + "version": 3, + "vscode": { + "interpreter": { + "hash": "675f32a300d6d26767470181ad0b11dd4676bcce7ed1dd2ffe2fbc370c95fc7c" + } + } }, "nbformat": 4, "nbformat_minor": 4 diff --git a/freqtrade/wallets.py b/freqtrade/wallets.py index 14e5a6743..41115c72e 100644 --- a/freqtrade/wallets.py +++ b/freqtrade/wallets.py @@ -148,7 +148,7 @@ class Wallets: # Position is not open ... continue size = self._exchange._contracts_to_amount(symbol, position['contracts']) - collateral = position['collateral'] + collateral = position['collateral'] or 0.0 leverage = position['leverage'] self._positions[symbol] = PositionWallet( symbol, position=size, diff --git a/tests/plugins/test_pairlist.py b/tests/plugins/test_pairlist.py index 5974bee89..48a0f81cb 100644 --- a/tests/plugins/test_pairlist.py +++ b/tests/plugins/test_pairlist.py @@ -366,6 +366,9 @@ def test_VolumePairList_refresh_empty(mocker, markets_empty, whitelist_conf): ([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"}, {"method": "PrecisionFilter"}], "BTC", ['ETH/BTC', 'TKN/BTC', 'LTC/BTC', 'XRP/BTC']), + ([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"}, + {"method": "PrecisionFilter"}], + "USDT", ['ETH/USDT', 'NANO/USDT']), # PriceFilter and VolumePairList ([{"method": "VolumePairList", "number_assets": 5, "sort_key": "quoteVolume"}, {"method": "PriceFilter", "low_price_ratio": 0.03}], diff --git a/tests/plugins/test_protections.py b/tests/plugins/test_protections.py index 8a5356b3e..acfe124a8 100644 --- a/tests/plugins/test_protections.py +++ b/tests/plugins/test_protections.py @@ -67,6 +67,8 @@ def generate_mock_trade(pair: str, fee: float, is_open: bool, trade.close(open_rate * (2 - profit_rate if is_short else profit_rate)) trade.exit_reason = exit_reason + Trade.query.session.add(trade) + Trade.commit() return trade @@ -125,33 +127,33 @@ def test_stoploss_guard(mocker, default_conf, fee, caplog, is_short): assert not log_has_re(message, caplog) caplog.clear() - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=200, min_ago_close=30, is_short=is_short, - )) + ) assert not freqtrade.protections.global_stop() assert not log_has_re(message, caplog) caplog.clear() # This trade does not count, as it's closed too long ago - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'BCH/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=250, min_ago_close=100, is_short=is_short, - )) + ) - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'ETH/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=240, min_ago_close=30, is_short=is_short, - )) + ) # 3 Trades closed - but the 2nd has been closed too long ago. assert not freqtrade.protections.global_stop() assert not log_has_re(message, caplog) caplog.clear() - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'LTC/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=180, min_ago_close=30, is_short=is_short, - )) + ) assert freqtrade.protections.global_stop() assert log_has_re(message, caplog) @@ -186,25 +188,25 @@ def test_stoploss_guard_perpair(mocker, default_conf, fee, caplog, only_per_pair assert not log_has_re(message, caplog) caplog.clear() - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( pair, fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=200, min_ago_close=30, profit_rate=0.9, is_short=is_short - )) + ) assert not freqtrade.protections.stop_per_pair(pair) assert not freqtrade.protections.global_stop() assert not log_has_re(message, caplog) caplog.clear() # This trade does not count, as it's closed too long ago - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( pair, fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=250, min_ago_close=100, profit_rate=0.9, is_short=is_short - )) + ) # Trade does not count for per pair stop as it's the wrong pair. - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'ETH/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=240, min_ago_close=30, profit_rate=0.9, is_short=is_short - )) + ) # 3 Trades closed - but the 2nd has been closed too long ago. assert not freqtrade.protections.stop_per_pair(pair) assert freqtrade.protections.global_stop() != only_per_pair @@ -216,10 +218,10 @@ def test_stoploss_guard_perpair(mocker, default_conf, fee, caplog, only_per_pair caplog.clear() # Trade does not count potentially, as it's in the wrong direction - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( pair, fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=150, min_ago_close=25, profit_rate=0.9, is_short=not is_short - )) + ) freqtrade.protections.stop_per_pair(pair) assert freqtrade.protections.global_stop() != only_per_pair assert PairLocks.is_pair_locked(pair, side=check_side) != (only_per_side and only_per_pair) @@ -231,10 +233,10 @@ def test_stoploss_guard_perpair(mocker, default_conf, fee, caplog, only_per_pair caplog.clear() # 2nd Trade that counts with correct pair - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( pair, fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=180, min_ago_close=30, profit_rate=0.9, is_short=is_short - )) + ) freqtrade.protections.stop_per_pair(pair) assert freqtrade.protections.global_stop() != only_per_pair @@ -259,20 +261,20 @@ def test_CooldownPeriod(mocker, default_conf, fee, caplog): assert not log_has_re(message, caplog) caplog.clear() - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=200, min_ago_close=30, - )) + ) assert not freqtrade.protections.global_stop() assert freqtrade.protections.stop_per_pair('XRP/BTC') assert PairLocks.is_pair_locked('XRP/BTC') assert not PairLocks.is_global_lock() - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'ETH/BTC', fee.return_value, False, exit_reason=ExitType.ROI.value, min_ago_open=205, min_ago_close=35, - )) + ) assert not freqtrade.protections.global_stop() assert not PairLocks.is_pair_locked('ETH/BTC') @@ -300,10 +302,10 @@ def test_LowProfitPairs(mocker, default_conf, fee, caplog, only_per_side): assert not log_has_re(message, caplog) caplog.clear() - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=800, min_ago_close=450, profit_rate=0.9, - )) + ) Trade.commit() # Not locked with 1 trade @@ -312,10 +314,10 @@ def test_LowProfitPairs(mocker, default_conf, fee, caplog, only_per_side): assert not PairLocks.is_pair_locked('XRP/BTC') assert not PairLocks.is_global_lock() - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=200, min_ago_close=120, profit_rate=0.9, - )) + ) Trade.commit() # Not locked with 1 trade (first trade is outside of lookback_period) @@ -325,19 +327,19 @@ def test_LowProfitPairs(mocker, default_conf, fee, caplog, only_per_side): assert not PairLocks.is_global_lock() # Add positive trade - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.ROI.value, min_ago_open=20, min_ago_close=10, profit_rate=1.15, is_short=True - )) + ) Trade.commit() assert freqtrade.protections.stop_per_pair('XRP/BTC') != only_per_side assert not PairLocks.is_pair_locked('XRP/BTC', side='*') assert PairLocks.is_pair_locked('XRP/BTC', side='long') == only_per_side - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=110, min_ago_close=21, profit_rate=0.8, - )) + ) Trade.commit() # Locks due to 2nd trade @@ -365,36 +367,38 @@ def test_MaxDrawdown(mocker, default_conf, fee, caplog): assert not freqtrade.protections.stop_per_pair('XRP/BTC') caplog.clear() - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=1000, min_ago_close=900, profit_rate=1.1, - )) - Trade.query.session.add(generate_mock_trade( + ) + generate_mock_trade( 'ETH/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=1000, min_ago_close=900, profit_rate=1.1, - )) - Trade.query.session.add(generate_mock_trade( + ) + generate_mock_trade( 'NEO/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=1000, min_ago_close=900, profit_rate=1.1, - )) + ) + Trade.commit() # No losing trade yet ... so max_drawdown will raise exception assert not freqtrade.protections.global_stop() assert not freqtrade.protections.stop_per_pair('XRP/BTC') - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=500, min_ago_close=400, profit_rate=0.9, - )) + ) # Not locked with one trade assert not freqtrade.protections.global_stop() assert not freqtrade.protections.stop_per_pair('XRP/BTC') assert not PairLocks.is_pair_locked('XRP/BTC') assert not PairLocks.is_global_lock() - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value, min_ago_open=1200, min_ago_close=1100, profit_rate=0.5, - )) + ) + Trade.commit() # Not locked with 1 trade (2nd trade is outside of lookback_period) assert not freqtrade.protections.global_stop() @@ -404,20 +408,22 @@ def test_MaxDrawdown(mocker, default_conf, fee, caplog): assert not log_has_re(message, caplog) # Winning trade ... (should not lock, does not change drawdown!) - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.ROI.value, min_ago_open=320, min_ago_close=410, profit_rate=1.5, - )) + ) + Trade.commit() assert not freqtrade.protections.global_stop() assert not PairLocks.is_global_lock() caplog.clear() # Add additional negative trade, causing a loss of > 15% - Trade.query.session.add(generate_mock_trade( + generate_mock_trade( 'XRP/BTC', fee.return_value, False, exit_reason=ExitType.ROI.value, min_ago_open=20, min_ago_close=10, profit_rate=0.8, - )) + ) + Trade.commit() assert not freqtrade.protections.stop_per_pair('XRP/BTC') # local lock not supported assert not PairLocks.is_pair_locked('XRP/BTC') diff --git a/tests/test_freqtradebot.py b/tests/test_freqtradebot.py index ace77a3b6..a1a16c039 100644 --- a/tests/test_freqtradebot.py +++ b/tests/test_freqtradebot.py @@ -677,6 +677,7 @@ def test_process_trade_no_whitelist_pair(default_conf_usdt, ticker_usdt, limit_b open_rate=0.001, exchange='binance', )) + Trade.commit() assert pair not in freqtrade.active_pair_whitelist freqtrade.process() @@ -2414,6 +2415,7 @@ def test_manage_open_orders_entry_usercustom( open_trade.orders[0].side = 'sell' if is_short else 'buy' open_trade.orders[0].ft_order_side = 'sell' if is_short else 'buy' Trade.query.session.add(open_trade) + Trade.commit() # Ensure default is to return empty (so not mocked yet) freqtrade.manage_open_orders() @@ -2472,6 +2474,7 @@ def test_manage_open_orders_entry( open_trade.is_short = is_short Trade.query.session.add(open_trade) + Trade.commit() freqtrade.strategy.check_entry_timeout = MagicMock(return_value=False) freqtrade.strategy.adjust_entry_price = MagicMock(return_value=1234) @@ -2509,6 +2512,7 @@ def test_adjust_entry_cancel( open_trade.is_short = is_short Trade.query.session.add(open_trade) + Trade.commit() # Timeout to not interfere freqtrade.strategy.ft_check_timed_out = MagicMock(return_value=False) @@ -2549,6 +2553,7 @@ def test_adjust_entry_maintain_replace( open_trade.is_short = is_short Trade.query.session.add(open_trade) + Trade.commit() # Timeout to not interfere freqtrade.strategy.ft_check_timed_out = MagicMock(return_value=False) @@ -2601,6 +2606,7 @@ def test_check_handle_cancelled_buy( open_trade.orders = [] open_trade.is_short = is_short Trade.query.session.add(open_trade) + Trade.commit() # check it does cancel buy orders over the time limit freqtrade.manage_open_orders() @@ -2631,6 +2637,7 @@ def test_manage_open_orders_buy_exception( open_trade.is_short = is_short Trade.query.session.add(open_trade) + Trade.commit() # check it does cancel buy orders over the time limit freqtrade.manage_open_orders() @@ -2672,6 +2679,7 @@ def test_manage_open_orders_exit_usercustom( open_trade_usdt.is_open = False Trade.query.session.add(open_trade_usdt) + Trade.commit() # Ensure default is false freqtrade.manage_open_orders() assert cancel_order_mock.call_count == 0 @@ -2754,6 +2762,7 @@ def test_manage_open_orders_exit( open_trade_usdt.is_short = is_short Trade.query.session.add(open_trade_usdt) + Trade.commit() freqtrade.strategy.check_exit_timeout = MagicMock(return_value=False) freqtrade.strategy.check_entry_timeout = MagicMock(return_value=False) @@ -2794,6 +2803,7 @@ def test_check_handle_cancelled_exit( open_trade_usdt.is_short = is_short Trade.query.session.add(open_trade_usdt) + Trade.commit() # check it does cancel sell orders over the time limit freqtrade.manage_open_orders() @@ -2830,6 +2840,7 @@ def test_manage_open_orders_partial( freqtrade = FreqtradeBot(default_conf_usdt) prior_stake = open_trade.stake_amount Trade.query.session.add(open_trade) + Trade.commit() # check it does cancel buy orders over the time limit # note this is for a partially-complete buy order @@ -2874,6 +2885,7 @@ def test_manage_open_orders_partial_fee( open_trade.fee_open = fee() open_trade.fee_close = fee() Trade.query.session.add(open_trade) + Trade.commit() # cancelling a half-filled order should update the amount to the bought amount # and apply fees if necessary. freqtrade.manage_open_orders() @@ -2923,6 +2935,7 @@ def test_manage_open_orders_partial_except( open_trade.fee_open = fee() open_trade.fee_close = fee() Trade.query.session.add(open_trade) + Trade.commit() # cancelling a half-filled order should update the amount to the bought amount # and apply fees if necessary. freqtrade.manage_open_orders() @@ -2961,6 +2974,7 @@ def test_manage_open_orders_exception(default_conf_usdt, ticker_usdt, open_trade freqtrade = FreqtradeBot(default_conf_usdt) Trade.query.session.add(open_trade_usdt) + Trade.commit() caplog.clear() freqtrade.manage_open_orders() diff --git a/tests/test_persistence.py b/tests/test_persistence.py index f68791b72..50d0788ca 100644 --- a/tests/test_persistence.py +++ b/tests/test_persistence.py @@ -1387,6 +1387,7 @@ def test_migrate_new(mocker, default_conf, fee, caplog): assert log_has("trying trades_bak2", caplog) assert log_has("Running database migration for trades - backup: trades_bak2, orders_bak0", caplog) + assert log_has("Database migration finished.", caplog) assert pytest.approx(trade.open_trade_value) == trade._calc_open_trade_value( trade.amount, trade.open_rate) assert trade.close_profit_abs is None @@ -1885,6 +1886,7 @@ def test_stoploss_reinitialization(default_conf, fee): assert trade.initial_stop_loss == 0.95 assert trade.initial_stop_loss_pct == -0.05 Trade.query.session.add(trade) + Trade.commit() # Lower stoploss Trade.stoploss_reinitialization(0.06) @@ -1946,6 +1948,7 @@ def test_stoploss_reinitialization_leverage(default_conf, fee): assert trade.initial_stop_loss == 0.98 assert trade.initial_stop_loss_pct == -0.1 Trade.query.session.add(trade) + Trade.commit() # Lower stoploss Trade.stoploss_reinitialization(0.15) @@ -2007,6 +2010,7 @@ def test_stoploss_reinitialization_short(default_conf, fee): assert trade.initial_stop_loss == 1.02 assert trade.initial_stop_loss_pct == -0.1 Trade.query.session.add(trade) + Trade.commit() # Lower stoploss Trade.stoploss_reinitialization(-0.15) trades = Trade.get_open_trades()