Browse Source

fix redundant log when using distributed training using pytorch

Link: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/10959078
master^2
wenmeng.zwm 3 years ago
parent
commit
d68d66f8c1
57 changed files with 80 additions and 59 deletions
  1. +1
    -1
      modelscope/exporters/torch_model_exporter.py
  2. +1
    -1
      modelscope/models/base/base_torch_head.py
  3. +1
    -1
      modelscope/models/base/base_torch_model.py
  4. +1
    -1
      modelscope/models/multi_modal/mplug/configuration_mplug.py
  5. +1
    -1
      modelscope/models/multi_modal/mplug/modeling_mplug.py
  6. +1
    -1
      modelscope/models/multi_modal/ofa/configuration_mmspeech.py
  7. +1
    -1
      modelscope/models/multi_modal/ofa/configuration_ofa.py
  8. +1
    -1
      modelscope/models/multi_modal/ofa/modeling_mmspeech.py
  9. +1
    -1
      modelscope/models/multi_modal/ofa/modeling_ofa.py
  10. +1
    -1
      modelscope/models/multi_modal/ofa/tokenization_ofa.py
  11. +1
    -1
      modelscope/models/multi_modal/ofa/tokenization_ofa_fast.py
  12. +1
    -1
      modelscope/models/nlp/T5/backbone.py
  13. +1
    -1
      modelscope/models/nlp/T5/configuration.py
  14. +1
    -1
      modelscope/models/nlp/T5/text2text_generation.py
  15. +1
    -1
      modelscope/models/nlp/bert/backbone.py
  16. +1
    -1
      modelscope/models/nlp/bert/configuration.py
  17. +1
    -1
      modelscope/models/nlp/bert/fill_mask.py
  18. +1
    -1
      modelscope/models/nlp/bert/text_classification.py
  19. +1
    -1
      modelscope/models/nlp/bert/text_ranking.py
  20. +1
    -1
      modelscope/models/nlp/bert/token_classification.py
  21. +1
    -1
      modelscope/models/nlp/deberta_v2/backbone.py
  22. +1
    -1
      modelscope/models/nlp/deberta_v2/configuration.py
  23. +1
    -1
      modelscope/models/nlp/deberta_v2/tokenization_fast.py
  24. +1
    -1
      modelscope/models/nlp/gpt3/configuration.py
  25. +1
    -1
      modelscope/models/nlp/gpt_moe/configuration.py
  26. +1
    -1
      modelscope/models/nlp/palm_v2/configuration.py
  27. +1
    -1
      modelscope/models/nlp/palm_v2/text_generation.py
  28. +1
    -1
      modelscope/models/nlp/plug/configuration.py
  29. +1
    -1
      modelscope/models/nlp/plug/distributed_plug.py
  30. +1
    -1
      modelscope/models/nlp/ponet/backbone.py
  31. +1
    -1
      modelscope/models/nlp/ponet/configuration.py
  32. +1
    -1
      modelscope/models/nlp/ponet/fill_mask.py
  33. +1
    -1
      modelscope/models/nlp/ponet/tokenization.py
  34. +1
    -1
      modelscope/models/nlp/space/configuration.py
  35. +1
    -1
      modelscope/models/nlp/space/model/tokenization_space.py
  36. +1
    -1
      modelscope/models/nlp/structbert/adv_utils.py
  37. +1
    -1
      modelscope/models/nlp/structbert/backbone.py
  38. +1
    -1
      modelscope/models/nlp/structbert/configuration.py
  39. +1
    -1
      modelscope/models/nlp/structbert/fill_mask.py
  40. +1
    -1
      modelscope/models/nlp/structbert/text_classification.py
  41. +1
    -1
      modelscope/models/nlp/structbert/token_classification.py
  42. +1
    -1
      modelscope/models/nlp/task_models/task_model.py
  43. +1
    -1
      modelscope/models/nlp/veco/backbone.py
  44. +1
    -1
      modelscope/models/nlp/veco/configuration.py
  45. +1
    -1
      modelscope/pipelines/nlp/text_classification_pipeline.py
  46. +1
    -1
      modelscope/preprocessors/base.py
  47. +1
    -1
      modelscope/preprocessors/nlp/text_classification_preprocessor.py
  48. +1
    -1
      modelscope/preprocessors/nlp/text_generation_preprocessor.py
  49. +1
    -1
      modelscope/preprocessors/nlp/token_classification_preprocessor.py
  50. +1
    -1
      modelscope/trainers/hooks/checkpoint_hook.py
  51. +1
    -1
      modelscope/trainers/hooks/lr_scheduler_hook.py
  52. +1
    -1
      modelscope/trainers/optimizer/child_tuning_adamw_optimizer.py
  53. +1
    -1
      modelscope/utils/checkpoint.py
  54. +1
    -1
      modelscope/utils/hub.py
  55. +0
    -2
      modelscope/utils/import_utils.py
  56. +24
    -3
      modelscope/utils/logger.py
  57. +2
    -0
      modelscope/utils/test_utils.py

+ 1
- 1
modelscope/exporters/torch_model_exporter.py View File

@@ -17,7 +17,7 @@ from modelscope.utils.regress_test_utils import (compare_arguments_nested,
numpify_tensor_nested)
from .base import Exporter

logger = get_logger(__name__)
logger = get_logger()


class TorchModelExporter(Exporter):


+ 1
- 1
modelscope/models/base/base_torch_head.py View File

@@ -6,7 +6,7 @@ import torch
from modelscope.models.base.base_head import Head
from modelscope.utils.logger import get_logger

logger = get_logger(__name__)
logger = get_logger()


class TorchHead(Head, torch.nn.Module):


+ 1
- 1
modelscope/models/base/base_torch_model.py View File

@@ -10,7 +10,7 @@ from modelscope.utils.hub import parse_label_mapping
from modelscope.utils.logger import get_logger
from .base_model import Model

logger = get_logger(__name__)
logger = get_logger()


class TorchModel(Model, torch.nn.Module):


+ 1
- 1
modelscope/models/multi_modal/mplug/configuration_mplug.py View File

@@ -23,7 +23,7 @@ from transformers.utils import logging

from modelscope.utils.constant import Tasks

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class MPlugConfig(PretrainedConfig):


+ 1
- 1
modelscope/models/multi_modal/mplug/modeling_mplug.py View File

@@ -46,7 +46,7 @@ from modelscope.utils.constant import ModelFile

transformers.logging.set_verbosity_error()

logger = logging.get_logger(__name__)
logger = logging.get_logger()

CONFIG_NAME = 'config.yaml'



+ 1
- 1
modelscope/models/multi_modal/ofa/configuration_mmspeech.py View File

@@ -17,7 +17,7 @@ import warnings
from transformers import PretrainedConfig
from transformers.utils import logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class MMSpeechConfig(PretrainedConfig):


+ 1
- 1
modelscope/models/multi_modal/ofa/configuration_ofa.py View File

@@ -17,7 +17,7 @@ import warnings
from transformers import PretrainedConfig
from transformers.utils import logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()

OFA_PRETRAINED_CONFIG_ARCHIVE_MAP = {
'ofa-medium': 'https://huggingface.co/ofa-base/resolve/main/config.json',


+ 1
- 1
modelscope/models/multi_modal/ofa/modeling_mmspeech.py View File

@@ -44,7 +44,7 @@ from .generate import utils
from .modeling_ofa import (Embedding, OFADecoder, OFAModel, OFAPreTrainedModel,
_expand_mask, shift_tokens_right)

logger = logging.get_logger(__name__)
logger = logging.get_logger()

_CHECKPOINT_FOR_DOC = 'mmspeech-base'
_CONFIG_FOR_DOC = 'MMSpeechConfig'


+ 1
- 1
modelscope/models/multi_modal/ofa/modeling_ofa.py View File

@@ -38,7 +38,7 @@ from .resnet import ResNet
from .utils.utils import DropPath
from .vit import vit_base, vit_huge, vit_large, vit_large_336

logger = logging.get_logger(__name__)
logger = logging.get_logger()

_CHECKPOINT_FOR_DOC = 'ofa-base'
_CONFIG_FOR_DOC = 'OFAConfig'


+ 1
- 1
modelscope/models/multi_modal/ofa/tokenization_ofa.py View File

@@ -24,7 +24,7 @@ from transformers.utils import logging

from modelscope.utils.constant import ModelFile

logger = logging.get_logger(__name__)
logger = logging.get_logger()

VOCAB_FILES_NAMES = {'vocab_file': 'vocab.json', 'merges_file': 'merges.txt'}



+ 1
- 1
modelscope/models/multi_modal/ofa/tokenization_ofa_fast.py View File

@@ -23,7 +23,7 @@ from transformers.utils import logging
from modelscope.utils.constant import ModelFile
from .tokenization_ofa import OFATokenizer, OFATokenizerZH

logger = logging.get_logger(__name__)
logger = logging.get_logger()

VOCAB_FILES_NAMES = {
'vocab_file': 'vocab.json',


+ 1
- 1
modelscope/models/nlp/T5/backbone.py View File

@@ -41,7 +41,7 @@ from modelscope.utils.constant import Tasks
from modelscope.utils.logger import get_logger
from .configuration import T5Config

logger = get_logger(__name__)
logger = get_logger()


###################################################


+ 1
- 1
modelscope/models/nlp/T5/configuration.py View File

@@ -20,7 +20,7 @@ from transformers.onnx import OnnxSeq2SeqConfigWithPast

from modelscope.utils.logger import get_logger

logger = get_logger(__name__)
logger = get_logger()


class T5Config(PretrainedConfig):


+ 1
- 1
modelscope/models/nlp/T5/text2text_generation.py View File

@@ -31,7 +31,7 @@ from modelscope.utils.logger import get_logger
from .backbone import T5PreTrainedModel, T5Stack
from .configuration import T5Config

logger = get_logger(__name__)
logger = get_logger()

# Warning message for FutureWarning: head_mask was separated into two input args - head_mask, decoder_head_mask
__HEAD_MASK_WARNING_MSG = """


+ 1
- 1
modelscope/models/nlp/bert/backbone.py View File

@@ -36,7 +36,7 @@ from modelscope.utils.logger import get_logger
from modelscope.utils.nlp.utils import parse_labels_in_order
from .configuration import BertConfig

logger = get_logger(__name__)
logger = get_logger()

_CONFIG_FOR_DOC = 'BertConfig'



+ 1
- 1
modelscope/models/nlp/bert/configuration.py View File

@@ -22,7 +22,7 @@ from transformers.onnx import OnnxConfig

from modelscope.utils.logger import get_logger

logger = get_logger(__name__)
logger = get_logger()


class BertConfig(PretrainedConfig):


+ 1
- 1
modelscope/models/nlp/bert/fill_mask.py View File

@@ -28,7 +28,7 @@ from modelscope.utils.constant import Tasks
from .backbone import BertModel, BertPreTrainedModel
from .configuration import BertConfig

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class BertPredictionHeadTransform(nn.Module):


+ 1
- 1
modelscope/models/nlp/bert/text_classification.py View File

@@ -27,7 +27,7 @@ from modelscope.utils import logger as logging
from modelscope.utils.constant import Tasks
from .backbone import BertModel, BertPreTrainedModel

logger = logging.get_logger(__name__)
logger = logging.get_logger()


@MODELS.register_module(Tasks.text_classification, module_name=Models.bert)


+ 1
- 1
modelscope/models/nlp/bert/text_ranking.py View File

@@ -12,7 +12,7 @@ from modelscope.utils.constant import Tasks
from .backbone import BertModel
from .text_classification import BertForSequenceClassification

logger = logging.get_logger(__name__)
logger = logging.get_logger()


@MODELS.register_module(Tasks.text_ranking, module_name=Models.bert)


+ 1
- 1
modelscope/models/nlp/bert/token_classification.py View File

@@ -27,7 +27,7 @@ from modelscope.utils import logger as logging
from modelscope.utils.constant import Tasks
from .backbone import BertModel, BertPreTrainedModel

logger = logging.get_logger(__name__)
logger = logging.get_logger()


@MODELS.register_module(Tasks.token_classification, module_name=Models.bert)


+ 1
- 1
modelscope/models/nlp/deberta_v2/backbone.py View File

@@ -33,7 +33,7 @@ from modelscope.utils import logger as logging
from modelscope.utils.constant import Tasks
from .configuration import DebertaV2Config

logger = logging.get_logger(__name__)
logger = logging.get_logger()


# Copied from transformers.models.deberta.modeling_deberta.ContextPooler


+ 1
- 1
modelscope/models/nlp/deberta_v2/configuration.py View File

@@ -18,7 +18,7 @@ from transformers import PretrainedConfig

from modelscope.utils import logger as logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class DebertaV2Config(PretrainedConfig):


+ 1
- 1
modelscope/models/nlp/deberta_v2/tokenization_fast.py View File

@@ -28,7 +28,7 @@ if is_sentencepiece_available():
else:
DebertaV2Tokenizer = None

logger = logging.get_logger(__name__)
logger = logging.get_logger()

VOCAB_FILES_NAMES = {
'vocab_file': 'spm.model',


+ 1
- 1
modelscope/models/nlp/gpt3/configuration.py View File

@@ -17,7 +17,7 @@ import torch
from transformers.configuration_utils import PretrainedConfig
from transformers.utils import logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class GPT3Config(PretrainedConfig):


+ 1
- 1
modelscope/models/nlp/gpt_moe/configuration.py View File

@@ -17,7 +17,7 @@ import torch
from transformers.configuration_utils import PretrainedConfig
from transformers.utils import logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class GPTMoEConfig(PretrainedConfig):


+ 1
- 1
modelscope/models/nlp/palm_v2/configuration.py View File

@@ -19,7 +19,7 @@ from transformers.configuration_utils import PretrainedConfig

from modelscope.utils import logger as logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class PalmConfig(PretrainedConfig):


+ 1
- 1
modelscope/models/nlp/palm_v2/text_generation.py View File

@@ -760,7 +760,7 @@ class Translator(object):

def __init__(self, model, dataset: str = 'cnn'):
super().__init__()
self.logger = logging.get_logger(__name__)
self.logger = logging.get_logger()
self.args = model.config
self.args.dataset = dataset
self.model = model.palm


+ 1
- 1
modelscope/models/nlp/plug/configuration.py View File

@@ -21,7 +21,7 @@ from transformers import PretrainedConfig

from modelscope.utils import logger as logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class PlugNLUConfig(PretrainedConfig):


+ 1
- 1
modelscope/models/nlp/plug/distributed_plug.py View File

@@ -17,7 +17,7 @@ from modelscope.utils.torch_utils import set_random_seed_mpu
from . import PlugModel
from .configuration import PlugNLGConfig

logger = get_logger(__name__)
logger = get_logger()


class DistributedPlug(TorchModel):


+ 1
- 1
modelscope/models/nlp/ponet/backbone.py View File

@@ -36,7 +36,7 @@ from modelscope.utils.constant import Tasks
from modelscope.utils.logger import get_logger
from .configuration import PoNetConfig

logger = get_logger(__name__)
logger = get_logger()

is_pytorch_12plus = LooseVersion(torch.__version__) >= LooseVersion('1.12.0')



+ 1
- 1
modelscope/models/nlp/ponet/configuration.py View File

@@ -18,7 +18,7 @@ from transformers import PretrainedConfig

from modelscope.utils import logger as logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class PoNetConfig(PretrainedConfig):


+ 1
- 1
modelscope/models/nlp/ponet/fill_mask.py View File

@@ -26,7 +26,7 @@ from modelscope.utils.constant import Tasks
from modelscope.utils.logger import get_logger
from .backbone import PoNetModel, PoNetPreTrainedModel

logger = get_logger(__name__)
logger = get_logger()


class PoNetPredictionHeadTransform(nn.Module):


+ 1
- 1
modelscope/models/nlp/ponet/tokenization.py View File

@@ -24,7 +24,7 @@ from transformers.tokenization_utils import BatchEncoding, EncodedInput
from modelscope.utils.constant import ModelFile
from modelscope.utils.logger import get_logger

logger = get_logger(__name__)
logger = get_logger()

VOCAB_FILES_NAMES = {'vocab_file': ModelFile.VOCAB_FILE}



+ 1
- 1
modelscope/models/nlp/space/configuration.py View File

@@ -20,7 +20,7 @@
from modelscope.models.nlp.structbert import SbertConfig
from modelscope.utils import logger as logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class SpaceConfig(SbertConfig):


+ 1
- 1
modelscope/models/nlp/space/model/tokenization_space.py View File

@@ -19,7 +19,7 @@ from transformers import BasicTokenizer, BertTokenizer, WordpieceTokenizer

from modelscope.utils import logger as logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class SpaceTokenizer(BertTokenizer):


+ 1
- 1
modelscope/models/nlp/structbert/adv_utils.py View File

@@ -18,7 +18,7 @@ from torch import nn

from modelscope.utils.logger import get_logger

logger = get_logger(__name__)
logger = get_logger()


def _symmetric_kl_div(logits1, logits2, attention_mask=None):


+ 1
- 1
modelscope/models/nlp/structbert/backbone.py View File

@@ -39,7 +39,7 @@ from modelscope.utils.logger import get_logger
from modelscope.utils.nlp.utils import parse_labels_in_order
from .configuration import SbertConfig

logger = get_logger(__name__)
logger = get_logger()


class SbertEmbeddings(nn.Module):


+ 1
- 1
modelscope/models/nlp/structbert/configuration.py View File

@@ -19,7 +19,7 @@ from transformers import PretrainedConfig

from modelscope.utils import logger as logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class SbertConfig(PretrainedConfig):


+ 1
- 1
modelscope/models/nlp/structbert/fill_mask.py View File

@@ -29,7 +29,7 @@ from modelscope.utils.constant import Tasks
from .backbone import SbertModel, SbertPreTrainedModel
from .configuration import SbertConfig

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class SbertPredictionHeadTransform(nn.Module):


+ 1
- 1
modelscope/models/nlp/structbert/text_classification.py View File

@@ -29,7 +29,7 @@ from .adv_utils import compute_adv_loss
from .backbone import SbertModel, SbertPreTrainedModel
from .configuration import SbertConfig

logger = logging.get_logger(__name__)
logger = logging.get_logger()


@MODELS.register_module(


+ 1
- 1
modelscope/models/nlp/structbert/token_classification.py View File

@@ -29,7 +29,7 @@ from .adv_utils import compute_adv_loss
from .backbone import SbertModel, SbertPreTrainedModel
from .configuration import SbertConfig

logger = logging.get_logger(__name__)
logger = logging.get_logger()


@MODELS.register_module(


+ 1
- 1
modelscope/models/nlp/task_models/task_model.py View File

@@ -15,7 +15,7 @@ from modelscope.utils.constant import Fields, Tasks
from modelscope.utils.file_utils import func_receive_dict_inputs
from modelscope.utils.logger import get_logger

logger = get_logger(__name__)
logger = get_logger()

__all__ = ['EncoderDecoderTaskModelBase', 'SingleBackboneTaskModelBase']



+ 1
- 1
modelscope/models/nlp/veco/backbone.py View File

@@ -26,7 +26,7 @@ from modelscope.utils import logger as logging
from modelscope.utils.constant import Tasks
from .configuration import VecoConfig

logger = logging.get_logger(__name__)
logger = logging.get_logger()

VECO_PRETRAINED_MODEL_ARCHIVE_LIST = []



+ 1
- 1
modelscope/models/nlp/veco/configuration.py View File

@@ -21,7 +21,7 @@ from transformers import RobertaConfig

from modelscope.utils import logger as logging

logger = logging.get_logger(__name__)
logger = logging.get_logger()


class VecoConfig(RobertaConfig):


+ 1
- 1
modelscope/pipelines/nlp/text_classification_pipeline.py View File

@@ -12,7 +12,7 @@ from modelscope.preprocessors import Preprocessor
from modelscope.utils.constant import Fields, Tasks
from modelscope.utils.logger import get_logger

logger = get_logger(__name__)
logger = get_logger()


@PIPELINES.register_module(


+ 1
- 1
modelscope/preprocessors/base.py View File

@@ -13,7 +13,7 @@ from modelscope.utils.hub import read_config, snapshot_download
from modelscope.utils.logger import get_logger
from .builder import build_preprocessor

logger = get_logger(__name__)
logger = get_logger()

PREPROCESSOR_MAP = {
# nlp


+ 1
- 1
modelscope/preprocessors/nlp/text_classification_preprocessor.py View File

@@ -14,7 +14,7 @@ from modelscope.utils.logger import get_logger
from .transformers_tokenizer import NLPTokenizer
from .utils import labels_to_id, parse_text_and_label

logger = get_logger(__name__)
logger = get_logger()


class TextClassificationPreprocessorBase(Preprocessor):


+ 1
- 1
modelscope/preprocessors/nlp/text_generation_preprocessor.py View File

@@ -15,7 +15,7 @@ from modelscope.utils.logger import get_logger
from .transformers_tokenizer import NLPTokenizer
from .utils import parse_text_and_label

logger = get_logger(__name__)
logger = get_logger()


class TextGenerationPreprocessorBase(Preprocessor):


+ 1
- 1
modelscope/preprocessors/nlp/token_classification_preprocessor.py View File

@@ -16,7 +16,7 @@ from modelscope.utils.type_assert import type_assert
from .transformers_tokenizer import NLPTokenizer
from .utils import parse_text_and_label

logger = get_logger(__name__)
logger = get_logger()


@PREPROCESSORS.register_module(


+ 1
- 1
modelscope/trainers/hooks/checkpoint_hook.py View File

@@ -70,7 +70,7 @@ class CheckpointHook(Hook):
os.makedirs(self.save_dir)

if not hasattr(trainer, 'logger'):
self.logger = get_logger(__name__)
self.logger = get_logger()
else:
self.logger = trainer.logger



+ 1
- 1
modelscope/trainers/hooks/lr_scheduler_hook.py View File

@@ -99,7 +99,7 @@ class PlateauLrSchedulerHook(LrSchedulerHook):
def before_run(self, trainer):
super().before_run(trainer)
if not hasattr(trainer, 'logger'):
self.logger = get_logger(__name__)
self.logger = get_logger()
else:
self.logger = trainer.logger



+ 1
- 1
modelscope/trainers/optimizer/child_tuning_adamw_optimizer.py View File

@@ -24,7 +24,7 @@ from torch.optim import Optimizer
from modelscope.utils.logger import get_logger
from .builder import OPTIMIZERS, default_group

logger = get_logger(__name__)
logger = get_logger()

__all__ = ['calculate_fisher', 'ChildTuningAdamW']



+ 1
- 1
modelscope/utils/checkpoint.py View File

@@ -18,7 +18,7 @@ from modelscope.utils.config import JSONIteratorEncoder
from modelscope.utils.constant import ConfigFields, ModelFile
from modelscope.utils.logger import get_logger

logger = get_logger(__name__)
logger = get_logger()

storage = LocalStorage()



+ 1
- 1
modelscope/utils/hub.py View File

@@ -14,7 +14,7 @@ from modelscope.utils.constant import (DEFAULT_MODEL_REVISION, ConfigFields,
ModelFile)
from .logger import get_logger

logger = get_logger(__name__)
logger = get_logger()


def create_model_if_not_exist(


+ 0
- 2
modelscope/utils/import_utils.py View File

@@ -21,8 +21,6 @@ from modelscope.utils.ast_utils import (INDEX_KEY, MODULE_KEY, REQUIREMENT_KEY,
from modelscope.utils.error import * # noqa
from modelscope.utils.logger import get_logger

logger = get_logger(__name__)

if sys.version_info < (3, 8):
import importlib_metadata
else:


+ 24
- 3
modelscope/utils/logger.py View File

@@ -1,5 +1,6 @@
# Copyright (c) Alibaba, Inc. and its affiliates.

import importlib
import logging
from typing import Optional

@@ -24,11 +25,27 @@ def get_logger(log_file: Optional[str] = None,
if logger_name in init_loggers:
return logger

# handle duplicate logs to the console
# Starting in 1.8.0, PyTorch DDP attaches a StreamHandler <stderr> (NOTSET)
# to the root logger. As logger.propagate is True by default, this root
# level handler causes logging messages from rank>0 processes to
# unexpectedly show up on the console, creating much unwanted clutter.
# To fix this issue, we set the root logger's StreamHandler, if any, to log
# at the ERROR level.
for handler in logger.root.handlers:
if type(handler) is logging.StreamHandler:
handler.setLevel(logging.ERROR)

stream_handler = logging.StreamHandler()
handlers = [stream_handler]

# TODO @wenmeng.zwm add logger setting for distributed environment
if log_file is not None:
if importlib.util.find_spec('torch') is not None:
from modelscope.utils.torch_utils import is_master
is_worker0 = is_master()
else:
is_worker0 = True

if is_worker0 and log_file is not None:
file_handler = logging.FileHandler(log_file, file_mode)
handlers.append(file_handler)

@@ -39,7 +56,11 @@ def get_logger(log_file: Optional[str] = None,
handler.setLevel(log_level)
logger.addHandler(handler)

logger.setLevel(log_level)
if is_worker0:
logger.setLevel(log_level)
else:
logger.setLevel(logging.ERROR)

init_loggers[logger_name] = True

return logger

+ 2
- 0
modelscope/utils/test_utils.py View File

@@ -230,6 +230,8 @@ class DistributedTestCase(unittest.TestCase):
tmp_env = copy.deepcopy(os.environ)
tmp_env['PYTHONPATH'] = ':'.join(
(tmp_env.get('PYTHONPATH', ''), script_dir)).lstrip(':')
# avoid distributed test hang
tmp_env['NCCL_P2P_DISABLE'] = '1'
script_params = '--save_all_ranks=%s --save_file=%s' % (save_all_ranks,
tmp_res_file)
script_cmd = '%s %s %s' % (dist_start_cmd, tmp_run_file, script_params)


Loading…
Cancel
Save