diff --git a/fastNLP/io/pipe/matching.py b/fastNLP/io/pipe/matching.py index dbe69525..f58706fe 100644 --- a/fastNLP/io/pipe/matching.py +++ b/fastNLP/io/pipe/matching.py @@ -351,6 +351,9 @@ class MNLIPipe(MatchingPipe): class LCQMCPipe(MatchingPipe): + def __init__(self, tokenizer='cn=char'): + super().__init__(tokenizer=tokenizer) + def process_from_file(self, paths=None): data_bundle = LCQMCLoader().load(paths) data_bundle = RenamePipe().process(data_bundle) @@ -360,6 +363,9 @@ class LCQMCPipe(MatchingPipe): class CNXNLIPipe(MatchingPipe): + def __init__(self, tokenizer='cn-char'): + super().__init__(tokenizer=tokenizer) + def process_from_file(self, paths=None): data_bundle = CNXNLILoader().load(paths) data_bundle = GranularizePipe(task='XNLI').process(data_bundle) @@ -370,6 +376,9 @@ class CNXNLIPipe(MatchingPipe): class BQCorpusPipe(MatchingPipe): + def __init__(self, tokenizer='cn-char'): + super().__init__(tokenizer=tokenizer) + def process_from_file(self, paths=None): data_bundle = BQCorpusLoader().load(paths) data_bundle = RenamePipe().process(data_bundle) @@ -462,6 +471,9 @@ class MachingTruncatePipe(Pipe): # truncate sentence for bert, modify seq_len class LCQMCBertPipe(MatchingBertPipe): + def __init__(self, tokenizer='cn=char'): + super().__init__(tokenizer=tokenizer) + def process_from_file(self, paths=None): data_bundle = LCQMCLoader().load(paths) data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle) @@ -472,6 +484,9 @@ class LCQMCBertPipe(MatchingBertPipe): class BQCorpusBertPipe(MatchingBertPipe): + def __init__(self, tokenizer='cn-char'): + super().__init__(tokenizer=tokenizer) + def process_from_file(self, paths=None): data_bundle = BQCorpusLoader().load(paths) data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle) @@ -482,6 +497,9 @@ class BQCorpusBertPipe(MatchingBertPipe): class CNXNLIBertPipe(MatchingBertPipe): + def __init__(self, tokenizer='cn-char'): + super().__init__(tokenizer=tokenizer) + def process_from_file(self, paths=None): data_bundle = CNXNLILoader().load(paths) data_bundle = GranularizePipe(task='XNLI').process(data_bundle)