Link: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/10959078master^2
| @@ -17,7 +17,7 @@ from modelscope.utils.regress_test_utils import (compare_arguments_nested, | |||||
| numpify_tensor_nested) | numpify_tensor_nested) | ||||
| from .base import Exporter | from .base import Exporter | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class TorchModelExporter(Exporter): | class TorchModelExporter(Exporter): | ||||
| @@ -6,7 +6,7 @@ import torch | |||||
| from modelscope.models.base.base_head import Head | from modelscope.models.base.base_head import Head | ||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class TorchHead(Head, torch.nn.Module): | class TorchHead(Head, torch.nn.Module): | ||||
| @@ -10,7 +10,7 @@ from modelscope.utils.hub import parse_label_mapping | |||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| from .base_model import Model | from .base_model import Model | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class TorchModel(Model, torch.nn.Module): | class TorchModel(Model, torch.nn.Module): | ||||
| @@ -23,7 +23,7 @@ from transformers.utils import logging | |||||
| from modelscope.utils.constant import Tasks | from modelscope.utils.constant import Tasks | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class MPlugConfig(PretrainedConfig): | class MPlugConfig(PretrainedConfig): | ||||
| @@ -46,7 +46,7 @@ from modelscope.utils.constant import ModelFile | |||||
| transformers.logging.set_verbosity_error() | transformers.logging.set_verbosity_error() | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| CONFIG_NAME = 'config.yaml' | CONFIG_NAME = 'config.yaml' | ||||
| @@ -17,7 +17,7 @@ import warnings | |||||
| from transformers import PretrainedConfig | from transformers import PretrainedConfig | ||||
| from transformers.utils import logging | from transformers.utils import logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class MMSpeechConfig(PretrainedConfig): | class MMSpeechConfig(PretrainedConfig): | ||||
| @@ -17,7 +17,7 @@ import warnings | |||||
| from transformers import PretrainedConfig | from transformers import PretrainedConfig | ||||
| from transformers.utils import logging | from transformers.utils import logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| OFA_PRETRAINED_CONFIG_ARCHIVE_MAP = { | OFA_PRETRAINED_CONFIG_ARCHIVE_MAP = { | ||||
| 'ofa-medium': 'https://huggingface.co/ofa-base/resolve/main/config.json', | 'ofa-medium': 'https://huggingface.co/ofa-base/resolve/main/config.json', | ||||
| @@ -44,7 +44,7 @@ from .generate import utils | |||||
| from .modeling_ofa import (Embedding, OFADecoder, OFAModel, OFAPreTrainedModel, | from .modeling_ofa import (Embedding, OFADecoder, OFAModel, OFAPreTrainedModel, | ||||
| _expand_mask, shift_tokens_right) | _expand_mask, shift_tokens_right) | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| _CHECKPOINT_FOR_DOC = 'mmspeech-base' | _CHECKPOINT_FOR_DOC = 'mmspeech-base' | ||||
| _CONFIG_FOR_DOC = 'MMSpeechConfig' | _CONFIG_FOR_DOC = 'MMSpeechConfig' | ||||
| @@ -38,7 +38,7 @@ from .resnet import ResNet | |||||
| from .utils.utils import DropPath | from .utils.utils import DropPath | ||||
| from .vit import vit_base, vit_huge, vit_large, vit_large_336 | from .vit import vit_base, vit_huge, vit_large, vit_large_336 | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| _CHECKPOINT_FOR_DOC = 'ofa-base' | _CHECKPOINT_FOR_DOC = 'ofa-base' | ||||
| _CONFIG_FOR_DOC = 'OFAConfig' | _CONFIG_FOR_DOC = 'OFAConfig' | ||||
| @@ -24,7 +24,7 @@ from transformers.utils import logging | |||||
| from modelscope.utils.constant import ModelFile | from modelscope.utils.constant import ModelFile | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| VOCAB_FILES_NAMES = {'vocab_file': 'vocab.json', 'merges_file': 'merges.txt'} | VOCAB_FILES_NAMES = {'vocab_file': 'vocab.json', 'merges_file': 'merges.txt'} | ||||
| @@ -23,7 +23,7 @@ from transformers.utils import logging | |||||
| from modelscope.utils.constant import ModelFile | from modelscope.utils.constant import ModelFile | ||||
| from .tokenization_ofa import OFATokenizer, OFATokenizerZH | from .tokenization_ofa import OFATokenizer, OFATokenizerZH | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| VOCAB_FILES_NAMES = { | VOCAB_FILES_NAMES = { | ||||
| 'vocab_file': 'vocab.json', | 'vocab_file': 'vocab.json', | ||||
| @@ -41,7 +41,7 @@ from modelscope.utils.constant import Tasks | |||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| from .configuration import T5Config | from .configuration import T5Config | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| ################################################### | ################################################### | ||||
| @@ -20,7 +20,7 @@ from transformers.onnx import OnnxSeq2SeqConfigWithPast | |||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class T5Config(PretrainedConfig): | class T5Config(PretrainedConfig): | ||||
| @@ -31,7 +31,7 @@ from modelscope.utils.logger import get_logger | |||||
| from .backbone import T5PreTrainedModel, T5Stack | from .backbone import T5PreTrainedModel, T5Stack | ||||
| from .configuration import T5Config | from .configuration import T5Config | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| # Warning message for FutureWarning: head_mask was separated into two input args - head_mask, decoder_head_mask | # Warning message for FutureWarning: head_mask was separated into two input args - head_mask, decoder_head_mask | ||||
| __HEAD_MASK_WARNING_MSG = """ | __HEAD_MASK_WARNING_MSG = """ | ||||
| @@ -36,7 +36,7 @@ from modelscope.utils.logger import get_logger | |||||
| from modelscope.utils.nlp.utils import parse_labels_in_order | from modelscope.utils.nlp.utils import parse_labels_in_order | ||||
| from .configuration import BertConfig | from .configuration import BertConfig | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| _CONFIG_FOR_DOC = 'BertConfig' | _CONFIG_FOR_DOC = 'BertConfig' | ||||
| @@ -22,7 +22,7 @@ from transformers.onnx import OnnxConfig | |||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class BertConfig(PretrainedConfig): | class BertConfig(PretrainedConfig): | ||||
| @@ -28,7 +28,7 @@ from modelscope.utils.constant import Tasks | |||||
| from .backbone import BertModel, BertPreTrainedModel | from .backbone import BertModel, BertPreTrainedModel | ||||
| from .configuration import BertConfig | from .configuration import BertConfig | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class BertPredictionHeadTransform(nn.Module): | class BertPredictionHeadTransform(nn.Module): | ||||
| @@ -27,7 +27,7 @@ from modelscope.utils import logger as logging | |||||
| from modelscope.utils.constant import Tasks | from modelscope.utils.constant import Tasks | ||||
| from .backbone import BertModel, BertPreTrainedModel | from .backbone import BertModel, BertPreTrainedModel | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| @MODELS.register_module(Tasks.text_classification, module_name=Models.bert) | @MODELS.register_module(Tasks.text_classification, module_name=Models.bert) | ||||
| @@ -12,7 +12,7 @@ from modelscope.utils.constant import Tasks | |||||
| from .backbone import BertModel | from .backbone import BertModel | ||||
| from .text_classification import BertForSequenceClassification | from .text_classification import BertForSequenceClassification | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| @MODELS.register_module(Tasks.text_ranking, module_name=Models.bert) | @MODELS.register_module(Tasks.text_ranking, module_name=Models.bert) | ||||
| @@ -27,7 +27,7 @@ from modelscope.utils import logger as logging | |||||
| from modelscope.utils.constant import Tasks | from modelscope.utils.constant import Tasks | ||||
| from .backbone import BertModel, BertPreTrainedModel | from .backbone import BertModel, BertPreTrainedModel | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| @MODELS.register_module(Tasks.token_classification, module_name=Models.bert) | @MODELS.register_module(Tasks.token_classification, module_name=Models.bert) | ||||
| @@ -33,7 +33,7 @@ from modelscope.utils import logger as logging | |||||
| from modelscope.utils.constant import Tasks | from modelscope.utils.constant import Tasks | ||||
| from .configuration import DebertaV2Config | from .configuration import DebertaV2Config | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| # Copied from transformers.models.deberta.modeling_deberta.ContextPooler | # Copied from transformers.models.deberta.modeling_deberta.ContextPooler | ||||
| @@ -18,7 +18,7 @@ from transformers import PretrainedConfig | |||||
| from modelscope.utils import logger as logging | from modelscope.utils import logger as logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class DebertaV2Config(PretrainedConfig): | class DebertaV2Config(PretrainedConfig): | ||||
| @@ -28,7 +28,7 @@ if is_sentencepiece_available(): | |||||
| else: | else: | ||||
| DebertaV2Tokenizer = None | DebertaV2Tokenizer = None | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| VOCAB_FILES_NAMES = { | VOCAB_FILES_NAMES = { | ||||
| 'vocab_file': 'spm.model', | 'vocab_file': 'spm.model', | ||||
| @@ -17,7 +17,7 @@ import torch | |||||
| from transformers.configuration_utils import PretrainedConfig | from transformers.configuration_utils import PretrainedConfig | ||||
| from transformers.utils import logging | from transformers.utils import logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class GPT3Config(PretrainedConfig): | class GPT3Config(PretrainedConfig): | ||||
| @@ -17,7 +17,7 @@ import torch | |||||
| from transformers.configuration_utils import PretrainedConfig | from transformers.configuration_utils import PretrainedConfig | ||||
| from transformers.utils import logging | from transformers.utils import logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class GPTMoEConfig(PretrainedConfig): | class GPTMoEConfig(PretrainedConfig): | ||||
| @@ -19,7 +19,7 @@ from transformers.configuration_utils import PretrainedConfig | |||||
| from modelscope.utils import logger as logging | from modelscope.utils import logger as logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class PalmConfig(PretrainedConfig): | class PalmConfig(PretrainedConfig): | ||||
| @@ -760,7 +760,7 @@ class Translator(object): | |||||
| def __init__(self, model, dataset: str = 'cnn'): | def __init__(self, model, dataset: str = 'cnn'): | ||||
| super().__init__() | super().__init__() | ||||
| self.logger = logging.get_logger(__name__) | |||||
| self.logger = logging.get_logger() | |||||
| self.args = model.config | self.args = model.config | ||||
| self.args.dataset = dataset | self.args.dataset = dataset | ||||
| self.model = model.palm | self.model = model.palm | ||||
| @@ -21,7 +21,7 @@ from transformers import PretrainedConfig | |||||
| from modelscope.utils import logger as logging | from modelscope.utils import logger as logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class PlugNLUConfig(PretrainedConfig): | class PlugNLUConfig(PretrainedConfig): | ||||
| @@ -17,7 +17,7 @@ from modelscope.utils.torch_utils import set_random_seed_mpu | |||||
| from . import PlugModel | from . import PlugModel | ||||
| from .configuration import PlugNLGConfig | from .configuration import PlugNLGConfig | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class DistributedPlug(TorchModel): | class DistributedPlug(TorchModel): | ||||
| @@ -36,7 +36,7 @@ from modelscope.utils.constant import Tasks | |||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| from .configuration import PoNetConfig | from .configuration import PoNetConfig | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| is_pytorch_12plus = LooseVersion(torch.__version__) >= LooseVersion('1.12.0') | is_pytorch_12plus = LooseVersion(torch.__version__) >= LooseVersion('1.12.0') | ||||
| @@ -18,7 +18,7 @@ from transformers import PretrainedConfig | |||||
| from modelscope.utils import logger as logging | from modelscope.utils import logger as logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class PoNetConfig(PretrainedConfig): | class PoNetConfig(PretrainedConfig): | ||||
| @@ -26,7 +26,7 @@ from modelscope.utils.constant import Tasks | |||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| from .backbone import PoNetModel, PoNetPreTrainedModel | from .backbone import PoNetModel, PoNetPreTrainedModel | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class PoNetPredictionHeadTransform(nn.Module): | class PoNetPredictionHeadTransform(nn.Module): | ||||
| @@ -24,7 +24,7 @@ from transformers.tokenization_utils import BatchEncoding, EncodedInput | |||||
| from modelscope.utils.constant import ModelFile | from modelscope.utils.constant import ModelFile | ||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| VOCAB_FILES_NAMES = {'vocab_file': ModelFile.VOCAB_FILE} | VOCAB_FILES_NAMES = {'vocab_file': ModelFile.VOCAB_FILE} | ||||
| @@ -20,7 +20,7 @@ | |||||
| from modelscope.models.nlp.structbert import SbertConfig | from modelscope.models.nlp.structbert import SbertConfig | ||||
| from modelscope.utils import logger as logging | from modelscope.utils import logger as logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class SpaceConfig(SbertConfig): | class SpaceConfig(SbertConfig): | ||||
| @@ -19,7 +19,7 @@ from transformers import BasicTokenizer, BertTokenizer, WordpieceTokenizer | |||||
| from modelscope.utils import logger as logging | from modelscope.utils import logger as logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class SpaceTokenizer(BertTokenizer): | class SpaceTokenizer(BertTokenizer): | ||||
| @@ -18,7 +18,7 @@ from torch import nn | |||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| def _symmetric_kl_div(logits1, logits2, attention_mask=None): | def _symmetric_kl_div(logits1, logits2, attention_mask=None): | ||||
| @@ -39,7 +39,7 @@ from modelscope.utils.logger import get_logger | |||||
| from modelscope.utils.nlp.utils import parse_labels_in_order | from modelscope.utils.nlp.utils import parse_labels_in_order | ||||
| from .configuration import SbertConfig | from .configuration import SbertConfig | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class SbertEmbeddings(nn.Module): | class SbertEmbeddings(nn.Module): | ||||
| @@ -19,7 +19,7 @@ from transformers import PretrainedConfig | |||||
| from modelscope.utils import logger as logging | from modelscope.utils import logger as logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class SbertConfig(PretrainedConfig): | class SbertConfig(PretrainedConfig): | ||||
| @@ -29,7 +29,7 @@ from modelscope.utils.constant import Tasks | |||||
| from .backbone import SbertModel, SbertPreTrainedModel | from .backbone import SbertModel, SbertPreTrainedModel | ||||
| from .configuration import SbertConfig | from .configuration import SbertConfig | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class SbertPredictionHeadTransform(nn.Module): | class SbertPredictionHeadTransform(nn.Module): | ||||
| @@ -29,7 +29,7 @@ from .adv_utils import compute_adv_loss | |||||
| from .backbone import SbertModel, SbertPreTrainedModel | from .backbone import SbertModel, SbertPreTrainedModel | ||||
| from .configuration import SbertConfig | from .configuration import SbertConfig | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| @MODELS.register_module( | @MODELS.register_module( | ||||
| @@ -29,7 +29,7 @@ from .adv_utils import compute_adv_loss | |||||
| from .backbone import SbertModel, SbertPreTrainedModel | from .backbone import SbertModel, SbertPreTrainedModel | ||||
| from .configuration import SbertConfig | from .configuration import SbertConfig | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| @MODELS.register_module( | @MODELS.register_module( | ||||
| @@ -15,7 +15,7 @@ from modelscope.utils.constant import Fields, Tasks | |||||
| from modelscope.utils.file_utils import func_receive_dict_inputs | from modelscope.utils.file_utils import func_receive_dict_inputs | ||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| __all__ = ['EncoderDecoderTaskModelBase', 'SingleBackboneTaskModelBase'] | __all__ = ['EncoderDecoderTaskModelBase', 'SingleBackboneTaskModelBase'] | ||||
| @@ -26,7 +26,7 @@ from modelscope.utils import logger as logging | |||||
| from modelscope.utils.constant import Tasks | from modelscope.utils.constant import Tasks | ||||
| from .configuration import VecoConfig | from .configuration import VecoConfig | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| VECO_PRETRAINED_MODEL_ARCHIVE_LIST = [] | VECO_PRETRAINED_MODEL_ARCHIVE_LIST = [] | ||||
| @@ -21,7 +21,7 @@ from transformers import RobertaConfig | |||||
| from modelscope.utils import logger as logging | from modelscope.utils import logger as logging | ||||
| logger = logging.get_logger(__name__) | |||||
| logger = logging.get_logger() | |||||
| class VecoConfig(RobertaConfig): | class VecoConfig(RobertaConfig): | ||||
| @@ -12,7 +12,7 @@ from modelscope.preprocessors import Preprocessor | |||||
| from modelscope.utils.constant import Fields, Tasks | from modelscope.utils.constant import Fields, Tasks | ||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| @PIPELINES.register_module( | @PIPELINES.register_module( | ||||
| @@ -13,7 +13,7 @@ from modelscope.utils.hub import read_config, snapshot_download | |||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| from .builder import build_preprocessor | from .builder import build_preprocessor | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| PREPROCESSOR_MAP = { | PREPROCESSOR_MAP = { | ||||
| # nlp | # nlp | ||||
| @@ -14,7 +14,7 @@ from modelscope.utils.logger import get_logger | |||||
| from .transformers_tokenizer import NLPTokenizer | from .transformers_tokenizer import NLPTokenizer | ||||
| from .utils import labels_to_id, parse_text_and_label | from .utils import labels_to_id, parse_text_and_label | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class TextClassificationPreprocessorBase(Preprocessor): | class TextClassificationPreprocessorBase(Preprocessor): | ||||
| @@ -15,7 +15,7 @@ from modelscope.utils.logger import get_logger | |||||
| from .transformers_tokenizer import NLPTokenizer | from .transformers_tokenizer import NLPTokenizer | ||||
| from .utils import parse_text_and_label | from .utils import parse_text_and_label | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| class TextGenerationPreprocessorBase(Preprocessor): | class TextGenerationPreprocessorBase(Preprocessor): | ||||
| @@ -16,7 +16,7 @@ from modelscope.utils.type_assert import type_assert | |||||
| from .transformers_tokenizer import NLPTokenizer | from .transformers_tokenizer import NLPTokenizer | ||||
| from .utils import parse_text_and_label | from .utils import parse_text_and_label | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| @PREPROCESSORS.register_module( | @PREPROCESSORS.register_module( | ||||
| @@ -70,7 +70,7 @@ class CheckpointHook(Hook): | |||||
| os.makedirs(self.save_dir) | os.makedirs(self.save_dir) | ||||
| if not hasattr(trainer, 'logger'): | if not hasattr(trainer, 'logger'): | ||||
| self.logger = get_logger(__name__) | |||||
| self.logger = get_logger() | |||||
| else: | else: | ||||
| self.logger = trainer.logger | self.logger = trainer.logger | ||||
| @@ -99,7 +99,7 @@ class PlateauLrSchedulerHook(LrSchedulerHook): | |||||
| def before_run(self, trainer): | def before_run(self, trainer): | ||||
| super().before_run(trainer) | super().before_run(trainer) | ||||
| if not hasattr(trainer, 'logger'): | if not hasattr(trainer, 'logger'): | ||||
| self.logger = get_logger(__name__) | |||||
| self.logger = get_logger() | |||||
| else: | else: | ||||
| self.logger = trainer.logger | self.logger = trainer.logger | ||||
| @@ -24,7 +24,7 @@ from torch.optim import Optimizer | |||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| from .builder import OPTIMIZERS, default_group | from .builder import OPTIMIZERS, default_group | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| __all__ = ['calculate_fisher', 'ChildTuningAdamW'] | __all__ = ['calculate_fisher', 'ChildTuningAdamW'] | ||||
| @@ -18,7 +18,7 @@ from modelscope.utils.config import JSONIteratorEncoder | |||||
| from modelscope.utils.constant import ConfigFields, ModelFile | from modelscope.utils.constant import ConfigFields, ModelFile | ||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| storage = LocalStorage() | storage = LocalStorage() | ||||
| @@ -14,7 +14,7 @@ from modelscope.utils.constant import (DEFAULT_MODEL_REVISION, ConfigFields, | |||||
| ModelFile) | ModelFile) | ||||
| from .logger import get_logger | from .logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| logger = get_logger() | |||||
| def create_model_if_not_exist( | def create_model_if_not_exist( | ||||
| @@ -21,8 +21,6 @@ from modelscope.utils.ast_utils import (INDEX_KEY, MODULE_KEY, REQUIREMENT_KEY, | |||||
| from modelscope.utils.error import * # noqa | from modelscope.utils.error import * # noqa | ||||
| from modelscope.utils.logger import get_logger | from modelscope.utils.logger import get_logger | ||||
| logger = get_logger(__name__) | |||||
| if sys.version_info < (3, 8): | if sys.version_info < (3, 8): | ||||
| import importlib_metadata | import importlib_metadata | ||||
| else: | else: | ||||
| @@ -1,5 +1,6 @@ | |||||
| # Copyright (c) Alibaba, Inc. and its affiliates. | # Copyright (c) Alibaba, Inc. and its affiliates. | ||||
| import importlib | |||||
| import logging | import logging | ||||
| from typing import Optional | from typing import Optional | ||||
| @@ -24,11 +25,27 @@ def get_logger(log_file: Optional[str] = None, | |||||
| if logger_name in init_loggers: | if logger_name in init_loggers: | ||||
| return logger | return logger | ||||
| # handle duplicate logs to the console | |||||
| # Starting in 1.8.0, PyTorch DDP attaches a StreamHandler <stderr> (NOTSET) | |||||
| # to the root logger. As logger.propagate is True by default, this root | |||||
| # level handler causes logging messages from rank>0 processes to | |||||
| # unexpectedly show up on the console, creating much unwanted clutter. | |||||
| # To fix this issue, we set the root logger's StreamHandler, if any, to log | |||||
| # at the ERROR level. | |||||
| for handler in logger.root.handlers: | |||||
| if type(handler) is logging.StreamHandler: | |||||
| handler.setLevel(logging.ERROR) | |||||
| stream_handler = logging.StreamHandler() | stream_handler = logging.StreamHandler() | ||||
| handlers = [stream_handler] | handlers = [stream_handler] | ||||
| # TODO @wenmeng.zwm add logger setting for distributed environment | |||||
| if log_file is not None: | |||||
| if importlib.util.find_spec('torch') is not None: | |||||
| from modelscope.utils.torch_utils import is_master | |||||
| is_worker0 = is_master() | |||||
| else: | |||||
| is_worker0 = True | |||||
| if is_worker0 and log_file is not None: | |||||
| file_handler = logging.FileHandler(log_file, file_mode) | file_handler = logging.FileHandler(log_file, file_mode) | ||||
| handlers.append(file_handler) | handlers.append(file_handler) | ||||
| @@ -39,7 +56,11 @@ def get_logger(log_file: Optional[str] = None, | |||||
| handler.setLevel(log_level) | handler.setLevel(log_level) | ||||
| logger.addHandler(handler) | logger.addHandler(handler) | ||||
| logger.setLevel(log_level) | |||||
| if is_worker0: | |||||
| logger.setLevel(log_level) | |||||
| else: | |||||
| logger.setLevel(logging.ERROR) | |||||
| init_loggers[logger_name] = True | init_loggers[logger_name] = True | ||||
| return logger | return logger | ||||
| @@ -230,6 +230,8 @@ class DistributedTestCase(unittest.TestCase): | |||||
| tmp_env = copy.deepcopy(os.environ) | tmp_env = copy.deepcopy(os.environ) | ||||
| tmp_env['PYTHONPATH'] = ':'.join( | tmp_env['PYTHONPATH'] = ':'.join( | ||||
| (tmp_env.get('PYTHONPATH', ''), script_dir)).lstrip(':') | (tmp_env.get('PYTHONPATH', ''), script_dir)).lstrip(':') | ||||
| # avoid distributed test hang | |||||
| tmp_env['NCCL_P2P_DISABLE'] = '1' | |||||
| script_params = '--save_all_ranks=%s --save_file=%s' % (save_all_ranks, | script_params = '--save_all_ranks=%s --save_file=%s' % (save_all_ranks, | ||||
| tmp_res_file) | tmp_res_file) | ||||
| script_cmd = '%s %s %s' % (dist_start_cmd, tmp_run_file, script_params) | script_cmd = '%s %s %s' % (dist_start_cmd, tmp_run_file, script_params) | ||||