Merge pull request #8225 from freqtrade/ruff2
Ruff - add PTH rule and subsequent changes
This commit is contained in:
commit
756c284ecd
@ -58,7 +58,7 @@ def load_config_file(path: str) -> Dict[str, Any]:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Read config from stdin if requested in the options
|
# Read config from stdin if requested in the options
|
||||||
with open(path) if path != '-' else sys.stdin as file:
|
with Path(path).open() if path != '-' else sys.stdin as file:
|
||||||
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
|
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
|
@ -24,9 +24,9 @@ def _load_signal_candles(backtest_dir: Path):
|
|||||||
scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_signals.pkl")
|
scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_signals.pkl")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
scp = open(scpf, "rb")
|
with scpf.open("rb") as scp:
|
||||||
signal_candles = joblib.load(scp)
|
signal_candles = joblib.load(scp)
|
||||||
logger.info(f"Loaded signal candles: {str(scpf)}")
|
logger.info(f"Loaded signal candles: {str(scpf)}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Cannot load signal candles from pickled results: ", e)
|
logger.error("Cannot load signal candles from pickled results: ", e)
|
||||||
|
|
||||||
|
@ -195,7 +195,7 @@ class Binance(Exchange):
|
|||||||
leverage_tiers_path = (
|
leverage_tiers_path = (
|
||||||
Path(__file__).parent / 'binance_leverage_tiers.json'
|
Path(__file__).parent / 'binance_leverage_tiers.json'
|
||||||
)
|
)
|
||||||
with open(leverage_tiers_path) as json_file:
|
with leverage_tiers_path.open() as json_file:
|
||||||
return json_load(json_file)
|
return json_load(json_file)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
@ -126,7 +126,7 @@ class FreqaiDataDrawer:
|
|||||||
"""
|
"""
|
||||||
exists = self.global_metadata_path.is_file()
|
exists = self.global_metadata_path.is_file()
|
||||||
if exists:
|
if exists:
|
||||||
with open(self.global_metadata_path, "r") as fp:
|
with self.global_metadata_path.open("r") as fp:
|
||||||
metatada_dict = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
metatada_dict = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||||
return metatada_dict
|
return metatada_dict
|
||||||
return {}
|
return {}
|
||||||
@ -139,7 +139,7 @@ class FreqaiDataDrawer:
|
|||||||
"""
|
"""
|
||||||
exists = self.pair_dictionary_path.is_file()
|
exists = self.pair_dictionary_path.is_file()
|
||||||
if exists:
|
if exists:
|
||||||
with open(self.pair_dictionary_path, "r") as fp:
|
with self.pair_dictionary_path.open("r") as fp:
|
||||||
self.pair_dict = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
self.pair_dict = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||||
else:
|
else:
|
||||||
logger.info("Could not find existing datadrawer, starting from scratch")
|
logger.info("Could not find existing datadrawer, starting from scratch")
|
||||||
@ -152,7 +152,7 @@ class FreqaiDataDrawer:
|
|||||||
if self.freqai_info.get('write_metrics_to_disk', False):
|
if self.freqai_info.get('write_metrics_to_disk', False):
|
||||||
exists = self.metric_tracker_path.is_file()
|
exists = self.metric_tracker_path.is_file()
|
||||||
if exists:
|
if exists:
|
||||||
with open(self.metric_tracker_path, "r") as fp:
|
with self.metric_tracker_path.open("r") as fp:
|
||||||
self.metric_tracker = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
self.metric_tracker = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||||
logger.info("Loading existing metric tracker from disk.")
|
logger.info("Loading existing metric tracker from disk.")
|
||||||
else:
|
else:
|
||||||
@ -166,7 +166,7 @@ class FreqaiDataDrawer:
|
|||||||
exists = self.historic_predictions_path.is_file()
|
exists = self.historic_predictions_path.is_file()
|
||||||
if exists:
|
if exists:
|
||||||
try:
|
try:
|
||||||
with open(self.historic_predictions_path, "rb") as fp:
|
with self.historic_predictions_path.open("rb") as fp:
|
||||||
self.historic_predictions = cloudpickle.load(fp)
|
self.historic_predictions = cloudpickle.load(fp)
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Found existing historic predictions at {self.full_path}, but beware "
|
f"Found existing historic predictions at {self.full_path}, but beware "
|
||||||
@ -176,7 +176,7 @@ class FreqaiDataDrawer:
|
|||||||
except EOFError:
|
except EOFError:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Historical prediction file was corrupted. Trying to load backup file.')
|
'Historical prediction file was corrupted. Trying to load backup file.')
|
||||||
with open(self.historic_predictions_bkp_path, "rb") as fp:
|
with self.historic_predictions_bkp_path.open("rb") as fp:
|
||||||
self.historic_predictions = cloudpickle.load(fp)
|
self.historic_predictions = cloudpickle.load(fp)
|
||||||
logger.warning('FreqAI successfully loaded the backup historical predictions file.')
|
logger.warning('FreqAI successfully loaded the backup historical predictions file.')
|
||||||
|
|
||||||
@ -189,7 +189,7 @@ class FreqaiDataDrawer:
|
|||||||
"""
|
"""
|
||||||
Save historic predictions pickle to disk
|
Save historic predictions pickle to disk
|
||||||
"""
|
"""
|
||||||
with open(self.historic_predictions_path, "wb") as fp:
|
with self.historic_predictions_path.open("wb") as fp:
|
||||||
cloudpickle.dump(self.historic_predictions, fp, protocol=cloudpickle.DEFAULT_PROTOCOL)
|
cloudpickle.dump(self.historic_predictions, fp, protocol=cloudpickle.DEFAULT_PROTOCOL)
|
||||||
|
|
||||||
# create a backup
|
# create a backup
|
||||||
@ -200,16 +200,16 @@ class FreqaiDataDrawer:
|
|||||||
Save metric tracker of all pair metrics collected.
|
Save metric tracker of all pair metrics collected.
|
||||||
"""
|
"""
|
||||||
with self.save_lock:
|
with self.save_lock:
|
||||||
with open(self.metric_tracker_path, 'w') as fp:
|
with self.metric_tracker_path.open('w') as fp:
|
||||||
rapidjson.dump(self.metric_tracker, fp, default=self.np_encoder,
|
rapidjson.dump(self.metric_tracker, fp, default=self.np_encoder,
|
||||||
number_mode=rapidjson.NM_NATIVE)
|
number_mode=rapidjson.NM_NATIVE)
|
||||||
|
|
||||||
def save_drawer_to_disk(self):
|
def save_drawer_to_disk(self) -> None:
|
||||||
"""
|
"""
|
||||||
Save data drawer full of all pair model metadata in present model folder.
|
Save data drawer full of all pair model metadata in present model folder.
|
||||||
"""
|
"""
|
||||||
with self.save_lock:
|
with self.save_lock:
|
||||||
with open(self.pair_dictionary_path, 'w') as fp:
|
with self.pair_dictionary_path.open('w') as fp:
|
||||||
rapidjson.dump(self.pair_dict, fp, default=self.np_encoder,
|
rapidjson.dump(self.pair_dict, fp, default=self.np_encoder,
|
||||||
number_mode=rapidjson.NM_NATIVE)
|
number_mode=rapidjson.NM_NATIVE)
|
||||||
|
|
||||||
@ -218,7 +218,7 @@ class FreqaiDataDrawer:
|
|||||||
Save global metadata json to disk
|
Save global metadata json to disk
|
||||||
"""
|
"""
|
||||||
with self.save_lock:
|
with self.save_lock:
|
||||||
with open(self.global_metadata_path, 'w') as fp:
|
with self.global_metadata_path.open('w') as fp:
|
||||||
rapidjson.dump(metadata, fp, default=self.np_encoder,
|
rapidjson.dump(metadata, fp, default=self.np_encoder,
|
||||||
number_mode=rapidjson.NM_NATIVE)
|
number_mode=rapidjson.NM_NATIVE)
|
||||||
|
|
||||||
@ -424,7 +424,7 @@ class FreqaiDataDrawer:
|
|||||||
dk.data["training_features_list"] = list(dk.data_dictionary["train_features"].columns)
|
dk.data["training_features_list"] = list(dk.data_dictionary["train_features"].columns)
|
||||||
dk.data["label_list"] = dk.label_list
|
dk.data["label_list"] = dk.label_list
|
||||||
|
|
||||||
with open(save_path / f"{dk.model_filename}_metadata.json", "w") as fp:
|
with (save_path / f"{dk.model_filename}_metadata.json").open("w") as fp:
|
||||||
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE)
|
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE)
|
||||||
|
|
||||||
return
|
return
|
||||||
@ -457,7 +457,7 @@ class FreqaiDataDrawer:
|
|||||||
dk.data["training_features_list"] = dk.training_features_list
|
dk.data["training_features_list"] = dk.training_features_list
|
||||||
dk.data["label_list"] = dk.label_list
|
dk.data["label_list"] = dk.label_list
|
||||||
# store the metadata
|
# store the metadata
|
||||||
with open(save_path / f"{dk.model_filename}_metadata.json", "w") as fp:
|
with (save_path / f"{dk.model_filename}_metadata.json").open("w") as fp:
|
||||||
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE)
|
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE)
|
||||||
|
|
||||||
# save the train data to file so we can check preds for area of applicability later
|
# save the train data to file so we can check preds for area of applicability later
|
||||||
@ -471,7 +471,7 @@ class FreqaiDataDrawer:
|
|||||||
|
|
||||||
if self.freqai_info["feature_parameters"].get("principal_component_analysis"):
|
if self.freqai_info["feature_parameters"].get("principal_component_analysis"):
|
||||||
cloudpickle.dump(
|
cloudpickle.dump(
|
||||||
dk.pca, open(dk.data_path / f"{dk.model_filename}_pca_object.pkl", "wb")
|
dk.pca, (dk.data_path / f"{dk.model_filename}_pca_object.pkl").open("wb")
|
||||||
)
|
)
|
||||||
|
|
||||||
self.model_dictionary[coin] = model
|
self.model_dictionary[coin] = model
|
||||||
@ -491,7 +491,7 @@ class FreqaiDataDrawer:
|
|||||||
Load only metadata into datakitchen to increase performance during
|
Load only metadata into datakitchen to increase performance during
|
||||||
presaved backtesting (prediction file loading).
|
presaved backtesting (prediction file loading).
|
||||||
"""
|
"""
|
||||||
with open(dk.data_path / f"{dk.model_filename}_metadata.json", "r") as fp:
|
with (dk.data_path / f"{dk.model_filename}_metadata.json").open("r") as fp:
|
||||||
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||||
dk.training_features_list = dk.data["training_features_list"]
|
dk.training_features_list = dk.data["training_features_list"]
|
||||||
dk.label_list = dk.data["label_list"]
|
dk.label_list = dk.data["label_list"]
|
||||||
@ -514,7 +514,7 @@ class FreqaiDataDrawer:
|
|||||||
dk.data = self.meta_data_dictionary[coin]["meta_data"]
|
dk.data = self.meta_data_dictionary[coin]["meta_data"]
|
||||||
dk.data_dictionary["train_features"] = self.meta_data_dictionary[coin]["train_df"]
|
dk.data_dictionary["train_features"] = self.meta_data_dictionary[coin]["train_df"]
|
||||||
else:
|
else:
|
||||||
with open(dk.data_path / f"{dk.model_filename}_metadata.json", "r") as fp:
|
with (dk.data_path / f"{dk.model_filename}_metadata.json").open("r") as fp:
|
||||||
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
dk.data = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||||
|
|
||||||
dk.data_dictionary["train_features"] = pd.read_pickle(
|
dk.data_dictionary["train_features"] = pd.read_pickle(
|
||||||
@ -552,7 +552,7 @@ class FreqaiDataDrawer:
|
|||||||
|
|
||||||
if self.config["freqai"]["feature_parameters"]["principal_component_analysis"]:
|
if self.config["freqai"]["feature_parameters"]["principal_component_analysis"]:
|
||||||
dk.pca = cloudpickle.load(
|
dk.pca = cloudpickle.load(
|
||||||
open(dk.data_path / f"{dk.model_filename}_pca_object.pkl", "rb")
|
(dk.data_path / f"{dk.model_filename}_pca_object.pkl").open("rb")
|
||||||
)
|
)
|
||||||
|
|
||||||
return model
|
return model
|
||||||
|
@ -211,7 +211,7 @@ def record_params(config: Dict[str, Any], full_path: Path) -> None:
|
|||||||
"pairs": config.get('exchange', {}).get('pair_whitelist')
|
"pairs": config.get('exchange', {}).get('pair_whitelist')
|
||||||
}
|
}
|
||||||
|
|
||||||
with open(params_record_path, "w") as handle:
|
with params_record_path.open("w") as handle:
|
||||||
rapidjson.dump(
|
rapidjson.dump(
|
||||||
run_params,
|
run_params,
|
||||||
handle,
|
handle,
|
||||||
|
@ -81,7 +81,7 @@ def file_dump_json(filename: Path, data: Any, is_zip: bool = False, log: bool =
|
|||||||
else:
|
else:
|
||||||
if log:
|
if log:
|
||||||
logger.info(f'dumping json to "{filename}"')
|
logger.info(f'dumping json to "{filename}"')
|
||||||
with open(filename, 'w') as fp:
|
with filename.open('w') as fp:
|
||||||
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
||||||
|
|
||||||
logger.debug(f'done json to "{filename}"')
|
logger.debug(f'done json to "{filename}"')
|
||||||
@ -98,7 +98,7 @@ def file_dump_joblib(filename: Path, data: Any, log: bool = True) -> None:
|
|||||||
|
|
||||||
if log:
|
if log:
|
||||||
logger.info(f'dumping joblib to "{filename}"')
|
logger.info(f'dumping joblib to "{filename}"')
|
||||||
with open(filename, 'wb') as fp:
|
with filename.open('wb') as fp:
|
||||||
joblib.dump(data, fp)
|
joblib.dump(data, fp)
|
||||||
logger.debug(f'done joblib dump to "{filename}"')
|
logger.debug(f'done joblib dump to "{filename}"')
|
||||||
|
|
||||||
@ -112,7 +112,7 @@ def json_load(datafile: IO) -> Any:
|
|||||||
return rapidjson.load(datafile, number_mode=rapidjson.NM_NATIVE)
|
return rapidjson.load(datafile, number_mode=rapidjson.NM_NATIVE)
|
||||||
|
|
||||||
|
|
||||||
def file_load_json(file):
|
def file_load_json(file: Path):
|
||||||
|
|
||||||
if file.suffix != ".gz":
|
if file.suffix != ".gz":
|
||||||
gzipfile = file.with_suffix(file.suffix + '.gz')
|
gzipfile = file.with_suffix(file.suffix + '.gz')
|
||||||
@ -125,7 +125,7 @@ def file_load_json(file):
|
|||||||
pairdata = json_load(datafile)
|
pairdata = json_load(datafile)
|
||||||
elif file.is_file():
|
elif file.is_file():
|
||||||
logger.debug(f"Loading historical data from file {file}")
|
logger.debug(f"Loading historical data from file {file}")
|
||||||
with open(file) as datafile:
|
with file.open() as datafile:
|
||||||
pairdata = json_load(datafile)
|
pairdata = json_load(datafile)
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
@ -29,7 +29,7 @@ def get_strategy_run_id(strategy) -> str:
|
|||||||
# Include _ft_params_from_file - so changing parameter files cause cache eviction
|
# Include _ft_params_from_file - so changing parameter files cause cache eviction
|
||||||
digest.update(rapidjson.dumps(
|
digest.update(rapidjson.dumps(
|
||||||
strategy._ft_params_from_file, default=str, number_mode=rapidjson.NM_NAN).encode('utf-8'))
|
strategy._ft_params_from_file, default=str, number_mode=rapidjson.NM_NAN).encode('utf-8'))
|
||||||
with open(strategy.__file__, 'rb') as fp:
|
with Path(strategy.__file__).open('rb') as fp:
|
||||||
digest.update(fp.read())
|
digest.update(fp.read())
|
||||||
return digest.hexdigest().lower()
|
return digest.hexdigest().lower()
|
||||||
|
|
||||||
|
@ -157,7 +157,7 @@ class RemotePairList(IPairList):
|
|||||||
file_path = Path(filename)
|
file_path = Path(filename)
|
||||||
|
|
||||||
if file_path.exists():
|
if file_path.exists():
|
||||||
with open(filename) as json_file:
|
with file_path.open() as json_file:
|
||||||
# Load the JSON data into a dictionary
|
# Load the JSON data into a dictionary
|
||||||
jsonparse = json.load(json_file)
|
jsonparse = json.load(json_file)
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ extend-select = [
|
|||||||
# "DTZ", # flake8-datetimez
|
# "DTZ", # flake8-datetimez
|
||||||
# "RSE", # flake8-raise
|
# "RSE", # flake8-raise
|
||||||
# "TCH", # flake8-type-checking
|
# "TCH", # flake8-type-checking
|
||||||
# "PTH", # flake8-use-pathlib
|
"PTH", # flake8-use-pathlib
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff.mccabe]
|
[tool.ruff.mccabe]
|
||||||
|
@ -191,7 +191,7 @@ def test_load_cached_data_for_updating(mocker, testdatadir) -> None:
|
|||||||
|
|
||||||
test_data = None
|
test_data = None
|
||||||
test_filename = testdatadir.joinpath('UNITTEST_BTC-1m.json')
|
test_filename = testdatadir.joinpath('UNITTEST_BTC-1m.json')
|
||||||
with open(test_filename, "rt") as file:
|
with test_filename.open("rt") as file:
|
||||||
test_data = json.load(file)
|
test_data = json.load(file)
|
||||||
|
|
||||||
test_data_df = ohlcv_to_dataframe(test_data, '1m', 'UNITTEST/BTC',
|
test_data_df = ohlcv_to_dataframe(test_data, '1m', 'UNITTEST/BTC',
|
||||||
|
@ -255,7 +255,7 @@ def test_write_read_backtest_candles(tmpdir):
|
|||||||
|
|
||||||
# test directory exporting
|
# test directory exporting
|
||||||
stored_file = store_backtest_signal_candles(Path(tmpdir), candle_dict, '2022_01_01_15_05_13')
|
stored_file = store_backtest_signal_candles(Path(tmpdir), candle_dict, '2022_01_01_15_05_13')
|
||||||
scp = open(stored_file, "rb")
|
scp = stored_file.open("rb")
|
||||||
pickled_signal_candles = joblib.load(scp)
|
pickled_signal_candles = joblib.load(scp)
|
||||||
scp.close()
|
scp.close()
|
||||||
|
|
||||||
@ -269,7 +269,7 @@ def test_write_read_backtest_candles(tmpdir):
|
|||||||
# test file exporting
|
# test file exporting
|
||||||
filename = Path(tmpdir / 'testresult')
|
filename = Path(tmpdir / 'testresult')
|
||||||
stored_file = store_backtest_signal_candles(filename, candle_dict, '2022_01_01_15_05_13')
|
stored_file = store_backtest_signal_candles(filename, candle_dict, '2022_01_01_15_05_13')
|
||||||
scp = open(stored_file, "rb")
|
scp = stored_file.open("rb")
|
||||||
pickled_signal_candles = joblib.load(scp)
|
pickled_signal_candles = joblib.load(scp)
|
||||||
scp.close()
|
scp.close()
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ def test_load_config_incorrect_stake_amount(default_conf) -> None:
|
|||||||
def test_load_config_file(default_conf, mocker, caplog) -> None:
|
def test_load_config_file(default_conf, mocker, caplog) -> None:
|
||||||
del default_conf['user_data_dir']
|
del default_conf['user_data_dir']
|
||||||
default_conf['datadir'] = str(default_conf['datadir'])
|
default_conf['datadir'] = str(default_conf['datadir'])
|
||||||
file_mock = mocker.patch('freqtrade.configuration.load_config.open', mocker.mock_open(
|
file_mock = mocker.patch('freqtrade.configuration.load_config.Path.open', mocker.mock_open(
|
||||||
read_data=json.dumps(default_conf)
|
read_data=json.dumps(default_conf)
|
||||||
))
|
))
|
||||||
|
|
||||||
@ -73,7 +73,8 @@ def test_load_config_file_error(default_conf, mocker, caplog) -> None:
|
|||||||
default_conf['datadir'] = str(default_conf['datadir'])
|
default_conf['datadir'] = str(default_conf['datadir'])
|
||||||
filedata = json.dumps(default_conf).replace(
|
filedata = json.dumps(default_conf).replace(
|
||||||
'"stake_amount": 0.001,', '"stake_amount": .001,')
|
'"stake_amount": 0.001,', '"stake_amount": .001,')
|
||||||
mocker.patch('freqtrade.configuration.load_config.open', mocker.mock_open(read_data=filedata))
|
mocker.patch('freqtrade.configuration.load_config.Path.open',
|
||||||
|
mocker.mock_open(read_data=filedata))
|
||||||
mocker.patch.object(Path, "read_text", MagicMock(return_value=filedata))
|
mocker.patch.object(Path, "read_text", MagicMock(return_value=filedata))
|
||||||
|
|
||||||
with pytest.raises(OperationalException, match=r".*Please verify the following segment.*"):
|
with pytest.raises(OperationalException, match=r".*Please verify the following segment.*"):
|
||||||
@ -272,7 +273,7 @@ def test_load_config_max_open_trades_minus_one(default_conf, mocker, caplog) ->
|
|||||||
|
|
||||||
def test_load_config_file_exception(mocker) -> None:
|
def test_load_config_file_exception(mocker) -> None:
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
'freqtrade.configuration.configuration.open',
|
'freqtrade.configuration.configuration.Path.open',
|
||||||
MagicMock(side_effect=FileNotFoundError('File not found'))
|
MagicMock(side_effect=FileNotFoundError('File not found'))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ def test_shorten_date() -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_file_dump_json(mocker) -> None:
|
def test_file_dump_json(mocker) -> None:
|
||||||
file_open = mocker.patch('freqtrade.misc.open', MagicMock())
|
file_open = mocker.patch('freqtrade.misc.Path.open', MagicMock())
|
||||||
json_dump = mocker.patch('rapidjson.dump', MagicMock())
|
json_dump = mocker.patch('rapidjson.dump', MagicMock())
|
||||||
file_dump_json(Path('somefile'), [1, 2, 3])
|
file_dump_json(Path('somefile'), [1, 2, 3])
|
||||||
assert file_open.call_count == 1
|
assert file_open.call_count == 1
|
||||||
|
Loading…
Reference in New Issue
Block a user