diff --git a/freqtrade/configuration/load_config.py b/freqtrade/configuration/load_config.py index a1a77815a..57424468d 100644 --- a/freqtrade/configuration/load_config.py +++ b/freqtrade/configuration/load_config.py @@ -58,7 +58,7 @@ def load_config_file(path: str) -> Dict[str, Any]: """ try: # Read config from stdin if requested in the options - with open(path) if path != '-' else sys.stdin as file: + with Path(path).open() if path != '-' else sys.stdin as file: config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE) except FileNotFoundError: raise OperationalException( diff --git a/freqtrade/data/entryexitanalysis.py b/freqtrade/data/entryexitanalysis.py index b2679bcea..5d67655cd 100644 --- a/freqtrade/data/entryexitanalysis.py +++ b/freqtrade/data/entryexitanalysis.py @@ -24,9 +24,9 @@ def _load_signal_candles(backtest_dir: Path): scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_signals.pkl") try: - scp = open(scpf, "rb") - signal_candles = joblib.load(scp) - logger.info(f"Loaded signal candles: {str(scpf)}") + with scpf.open("rb") as scp: + signal_candles = joblib.load(scp) + logger.info(f"Loaded signal candles: {str(scpf)}") except Exception as e: logger.error("Cannot load signal candles from pickled results: ", e) diff --git a/freqtrade/exchange/binance.py b/freqtrade/exchange/binance.py index 740d6e8a0..9580bc690 100644 --- a/freqtrade/exchange/binance.py +++ b/freqtrade/exchange/binance.py @@ -195,7 +195,7 @@ class Binance(Exchange): leverage_tiers_path = ( Path(__file__).parent / 'binance_leverage_tiers.json' ) - with open(leverage_tiers_path) as json_file: + with leverage_tiers_path.open() as json_file: return json_load(json_file) else: try: diff --git a/freqtrade/freqai/data_drawer.py b/freqtrade/freqai/data_drawer.py index 3b43e20bd..14986d854 100644 --- a/freqtrade/freqai/data_drawer.py +++ b/freqtrade/freqai/data_drawer.py @@ -126,7 +126,7 @@ class FreqaiDataDrawer: """ exists = self.global_metadata_path.is_file() if exists: - with open(self.global_metadata_path, "r") as fp: + with self.global_metadata_path.open("r") as fp: metatada_dict = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE) return metatada_dict return {} @@ -139,7 +139,7 @@ class FreqaiDataDrawer: """ exists = self.pair_dictionary_path.is_file() if exists: - with open(self.pair_dictionary_path, "r") as fp: + with self.pair_dictionary_path.open("r") as fp: self.pair_dict = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE) else: logger.info("Could not find existing datadrawer, starting from scratch") @@ -152,7 +152,7 @@ class FreqaiDataDrawer: if self.freqai_info.get('write_metrics_to_disk', False): exists = self.metric_tracker_path.is_file() if exists: - with open(self.metric_tracker_path, "r") as fp: + with self.metric_tracker_path.open("r") as fp: self.metric_tracker = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE) logger.info("Loading existing metric tracker from disk.") else: @@ -166,7 +166,7 @@ class FreqaiDataDrawer: exists = self.historic_predictions_path.is_file() if exists: try: - with open(self.historic_predictions_path, "rb") as fp: + with self.historic_predictions_path.open("rb") as fp: self.historic_predictions = cloudpickle.load(fp) logger.info( f"Found existing historic predictions at {self.full_path}, but beware " @@ -176,7 +176,7 @@ class FreqaiDataDrawer: except EOFError: logger.warning( 'Historical prediction file was corrupted. Trying to load backup file.') - with open(self.historic_predictions_bkp_path, "rb") as fp: + with self.historic_predictions_bkp_path.open("rb") as fp: self.historic_predictions = cloudpickle.load(fp) logger.warning('FreqAI successfully loaded the backup historical predictions file.') @@ -189,7 +189,7 @@ class FreqaiDataDrawer: """ Save historic predictions pickle to disk """ - with open(self.historic_predictions_path, "wb") as fp: + with self.historic_predictions_path.open("wb") as fp: cloudpickle.dump(self.historic_predictions, fp, protocol=cloudpickle.DEFAULT_PROTOCOL) # create a backup @@ -200,16 +200,16 @@ class FreqaiDataDrawer: Save metric tracker of all pair metrics collected. """ with self.save_lock: - with open(self.metric_tracker_path, 'w') as fp: + with self.metric_tracker_path.open('w') as fp: rapidjson.dump(self.metric_tracker, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE) - def save_drawer_to_disk(self): + def save_drawer_to_disk(self) -> None: """ Save data drawer full of all pair model metadata in present model folder. """ with self.save_lock: - with open(self.pair_dictionary_path, 'w') as fp: + with self.pair_dictionary_path.open('w') as fp: rapidjson.dump(self.pair_dict, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE) @@ -218,7 +218,7 @@ class FreqaiDataDrawer: Save global metadata json to disk """ with self.save_lock: - with open(self.global_metadata_path, 'w') as fp: + with self.global_metadata_path.open('w') as fp: rapidjson.dump(metadata, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE) @@ -424,7 +424,7 @@ class FreqaiDataDrawer: dk.data["training_features_list"] = list(dk.data_dictionary["train_features"].columns) dk.data["label_list"] = dk.label_list - with open(save_path / f"{dk.model_filename}_metadata.json", "w") as fp: + with (save_path / f"{dk.model_filename}_metadata.json").open("w") as fp: rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE) return @@ -457,7 +457,7 @@ class FreqaiDataDrawer: dk.data["training_features_list"] = dk.training_features_list dk.data["label_list"] = dk.label_list # store the metadata - with open(save_path / f"{dk.model_filename}_metadata.json", "w") as fp: + with (save_path / f"{dk.model_filename}_metadata.json").open("w") as fp: rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE) # save the train data to file so we can check preds for area of applicability later @@ -471,7 +471,7 @@ class FreqaiDataDrawer: if self.freqai_info["feature_parameters"].get("principal_component_analysis"): cloudpickle.dump( - dk.pca, open(dk.data_path / f"{dk.model_filename}_pca_object.pkl", "wb") + dk.pca, (dk.data_path / f"{dk.model_filename}_pca_object.pkl").open("wb") ) self.model_dictionary[coin] = model @@ -491,7 +491,7 @@ class FreqaiDataDrawer: Load only metadata into datakitchen to increase performance during presaved backtesting (prediction file loading). """ - with open(dk.data_path / f"{dk.model_filename}_metadata.json", "r") as fp: + with (dk.data_path / f"{dk.model_filename}_metadata.json").open("r") as fp: dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE) dk.training_features_list = dk.data["training_features_list"] dk.label_list = dk.data["label_list"] @@ -514,7 +514,7 @@ class FreqaiDataDrawer: dk.data = self.meta_data_dictionary[coin]["meta_data"] dk.data_dictionary["train_features"] = self.meta_data_dictionary[coin]["train_df"] else: - with open(dk.data_path / f"{dk.model_filename}_metadata.json", "r") as fp: + with (dk.data_path / f"{dk.model_filename}_metadata.json").open("r") as fp: dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE) dk.data_dictionary["train_features"] = pd.read_pickle( @@ -552,7 +552,7 @@ class FreqaiDataDrawer: if self.config["freqai"]["feature_parameters"]["principal_component_analysis"]: dk.pca = cloudpickle.load( - open(dk.data_path / f"{dk.model_filename}_pca_object.pkl", "rb") + (dk.data_path / f"{dk.model_filename}_pca_object.pkl").open("rb") ) return model diff --git a/freqtrade/freqai/utils.py b/freqtrade/freqai/utils.py index 806e3ca15..2ba49ac40 100644 --- a/freqtrade/freqai/utils.py +++ b/freqtrade/freqai/utils.py @@ -211,7 +211,7 @@ def record_params(config: Dict[str, Any], full_path: Path) -> None: "pairs": config.get('exchange', {}).get('pair_whitelist') } - with open(params_record_path, "w") as handle: + with params_record_path.open("w") as handle: rapidjson.dump( run_params, handle, diff --git a/freqtrade/misc.py b/freqtrade/misc.py index 9d9cf38d7..87cea54c0 100644 --- a/freqtrade/misc.py +++ b/freqtrade/misc.py @@ -81,7 +81,7 @@ def file_dump_json(filename: Path, data: Any, is_zip: bool = False, log: bool = else: if log: logger.info(f'dumping json to "{filename}"') - with open(filename, 'w') as fp: + with filename.open('w') as fp: rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE) logger.debug(f'done json to "{filename}"') @@ -98,7 +98,7 @@ def file_dump_joblib(filename: Path, data: Any, log: bool = True) -> None: if log: logger.info(f'dumping joblib to "{filename}"') - with open(filename, 'wb') as fp: + with filename.open('wb') as fp: joblib.dump(data, fp) logger.debug(f'done joblib dump to "{filename}"') @@ -112,7 +112,7 @@ def json_load(datafile: IO) -> Any: return rapidjson.load(datafile, number_mode=rapidjson.NM_NATIVE) -def file_load_json(file): +def file_load_json(file: Path): if file.suffix != ".gz": gzipfile = file.with_suffix(file.suffix + '.gz') @@ -125,7 +125,7 @@ def file_load_json(file): pairdata = json_load(datafile) elif file.is_file(): logger.debug(f"Loading historical data from file {file}") - with open(file) as datafile: + with file.open() as datafile: pairdata = json_load(datafile) else: return None diff --git a/freqtrade/optimize/backtest_caching.py b/freqtrade/optimize/backtest_caching.py index d9d270072..f34bbffef 100644 --- a/freqtrade/optimize/backtest_caching.py +++ b/freqtrade/optimize/backtest_caching.py @@ -29,7 +29,7 @@ def get_strategy_run_id(strategy) -> str: # Include _ft_params_from_file - so changing parameter files cause cache eviction digest.update(rapidjson.dumps( strategy._ft_params_from_file, default=str, number_mode=rapidjson.NM_NAN).encode('utf-8')) - with open(strategy.__file__, 'rb') as fp: + with Path(strategy.__file__).open('rb') as fp: digest.update(fp.read()) return digest.hexdigest().lower() diff --git a/freqtrade/plugins/pairlist/RemotePairList.py b/freqtrade/plugins/pairlist/RemotePairList.py index b54be1fa7..764c16f1a 100644 --- a/freqtrade/plugins/pairlist/RemotePairList.py +++ b/freqtrade/plugins/pairlist/RemotePairList.py @@ -157,7 +157,7 @@ class RemotePairList(IPairList): file_path = Path(filename) if file_path.exists(): - with open(filename) as json_file: + with file_path.open() as json_file: # Load the JSON data into a dictionary jsonparse = json.load(json_file) diff --git a/pyproject.toml b/pyproject.toml index 698a621b4..6f9e5205c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,7 +72,7 @@ extend-select = [ # "DTZ", # flake8-datetimez # "RSE", # flake8-raise # "TCH", # flake8-type-checking - # "PTH", # flake8-use-pathlib + "PTH", # flake8-use-pathlib ] [tool.ruff.mccabe] diff --git a/tests/data/test_history.py b/tests/data/test_history.py index 7d313c446..5cd7327fd 100644 --- a/tests/data/test_history.py +++ b/tests/data/test_history.py @@ -191,7 +191,7 @@ def test_load_cached_data_for_updating(mocker, testdatadir) -> None: test_data = None test_filename = testdatadir.joinpath('UNITTEST_BTC-1m.json') - with open(test_filename, "rt") as file: + with test_filename.open("rt") as file: test_data = json.load(file) test_data_df = ohlcv_to_dataframe(test_data, '1m', 'UNITTEST/BTC', diff --git a/tests/optimize/test_optimize_reports.py b/tests/optimize/test_optimize_reports.py index 549202284..f71e6c492 100644 --- a/tests/optimize/test_optimize_reports.py +++ b/tests/optimize/test_optimize_reports.py @@ -255,7 +255,7 @@ def test_write_read_backtest_candles(tmpdir): # test directory exporting stored_file = store_backtest_signal_candles(Path(tmpdir), candle_dict, '2022_01_01_15_05_13') - scp = open(stored_file, "rb") + scp = stored_file.open("rb") pickled_signal_candles = joblib.load(scp) scp.close() @@ -269,7 +269,7 @@ def test_write_read_backtest_candles(tmpdir): # test file exporting filename = Path(tmpdir / 'testresult') stored_file = store_backtest_signal_candles(filename, candle_dict, '2022_01_01_15_05_13') - scp = open(stored_file, "rb") + scp = stored_file.open("rb") pickled_signal_candles = joblib.load(scp) scp.close() diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 4a94a3c2e..aab868bec 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -59,7 +59,7 @@ def test_load_config_incorrect_stake_amount(default_conf) -> None: def test_load_config_file(default_conf, mocker, caplog) -> None: del default_conf['user_data_dir'] default_conf['datadir'] = str(default_conf['datadir']) - file_mock = mocker.patch('freqtrade.configuration.load_config.open', mocker.mock_open( + file_mock = mocker.patch('freqtrade.configuration.load_config.Path.open', mocker.mock_open( read_data=json.dumps(default_conf) )) @@ -73,7 +73,8 @@ def test_load_config_file_error(default_conf, mocker, caplog) -> None: default_conf['datadir'] = str(default_conf['datadir']) filedata = json.dumps(default_conf).replace( '"stake_amount": 0.001,', '"stake_amount": .001,') - mocker.patch('freqtrade.configuration.load_config.open', mocker.mock_open(read_data=filedata)) + mocker.patch('freqtrade.configuration.load_config.Path.open', + mocker.mock_open(read_data=filedata)) mocker.patch.object(Path, "read_text", MagicMock(return_value=filedata)) with pytest.raises(OperationalException, match=r".*Please verify the following segment.*"): @@ -272,7 +273,7 @@ def test_load_config_max_open_trades_minus_one(default_conf, mocker, caplog) -> def test_load_config_file_exception(mocker) -> None: mocker.patch( - 'freqtrade.configuration.configuration.open', + 'freqtrade.configuration.configuration.Path.open', MagicMock(side_effect=FileNotFoundError('File not found')) ) diff --git a/tests/test_misc.py b/tests/test_misc.py index 596c7bd51..6b4343ab2 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -46,7 +46,7 @@ def test_shorten_date() -> None: def test_file_dump_json(mocker) -> None: - file_open = mocker.patch('freqtrade.misc.open', MagicMock()) + file_open = mocker.patch('freqtrade.misc.Path.open', MagicMock()) json_dump = mocker.patch('rapidjson.dump', MagicMock()) file_dump_json(Path('somefile'), [1, 2, 3]) assert file_open.call_count == 1