From 6f9d703f132fde6ec5e18df42eef0d1b0da0b69a Mon Sep 17 00:00:00 2001 From: x54-729 <17307130121@fudan.edu.cn> Date: Mon, 20 Jun 2022 20:52:04 +0800 Subject: [PATCH] logger.warn->logger.warning --- fastNLP/core/drivers/torch_driver/deepspeed.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/fastNLP/core/drivers/torch_driver/deepspeed.py b/fastNLP/core/drivers/torch_driver/deepspeed.py index 3d519099..a99a42f8 100644 --- a/fastNLP/core/drivers/torch_driver/deepspeed.py +++ b/fastNLP/core/drivers/torch_driver/deepspeed.py @@ -104,7 +104,7 @@ class DeepSpeedDriver(TorchDDPDriver): if train_dl is not None: self.train_micro_batch_size = self.get_dataloader_args(train_dl).batch_size else: - logger.warn("No `train_dataloader` found, and we will set `train_micro_batch_size_per_gpu`" + logger.warning("No `train_dataloader` found, and we will set `train_micro_batch_size_per_gpu`" "to 1 for deepspeed configuration.") self.train_micro_batch_size = 1 @@ -226,7 +226,7 @@ class DeepSpeedDriver(TorchDDPDriver): self.config = self._ds_kwargs.get("config") if self.config is not None: - logger.warn("Notice that you have defined a configuration for deepspeed and parameters like" + logger.warning("Notice that you have defined a configuration for deepspeed and parameters like" "`optimizers`, `strategy` and `fp16` may not take effects.") return @@ -330,7 +330,7 @@ class DeepSpeedDriver(TorchDDPDriver): :return: """ if not only_state_dict: - logger.warn("Only loading state dict is not allowed for `DeepSpeedDriver`. We will load its " + logger.warning("Only loading state dict is not allowed for `DeepSpeedDriver`. We will load its " "checkpoint for you instead.") self.model.load_checkpoint(filepath, **kwargs)