Browse Source

Merge branch 'dev0.5.0' of https://github.com/fastnlp/fastNLP into dev0.5.0

tags/v0.5.0
yh 5 years ago
parent
commit
c843c56029
1 changed files with 18 additions and 0 deletions
  1. +18
    -0
      fastNLP/io/pipe/matching.py

+ 18
- 0
fastNLP/io/pipe/matching.py View File

@@ -351,6 +351,9 @@ class MNLIPipe(MatchingPipe):


class LCQMCPipe(MatchingPipe):
def __init__(self, tokenizer='cn=char'):
super().__init__(tokenizer=tokenizer)

def process_from_file(self, paths=None):
data_bundle = LCQMCLoader().load(paths)
data_bundle = RenamePipe().process(data_bundle)
@@ -360,6 +363,9 @@ class LCQMCPipe(MatchingPipe):


class CNXNLIPipe(MatchingPipe):
def __init__(self, tokenizer='cn-char'):
super().__init__(tokenizer=tokenizer)

def process_from_file(self, paths=None):
data_bundle = CNXNLILoader().load(paths)
data_bundle = GranularizePipe(task='XNLI').process(data_bundle)
@@ -370,6 +376,9 @@ class CNXNLIPipe(MatchingPipe):


class BQCorpusPipe(MatchingPipe):
def __init__(self, tokenizer='cn-char'):
super().__init__(tokenizer=tokenizer)

def process_from_file(self, paths=None):
data_bundle = BQCorpusLoader().load(paths)
data_bundle = RenamePipe().process(data_bundle)
@@ -462,6 +471,9 @@ class MachingTruncatePipe(Pipe): # truncate sentence for bert, modify seq_len


class LCQMCBertPipe(MatchingBertPipe):
def __init__(self, tokenizer='cn=char'):
super().__init__(tokenizer=tokenizer)

def process_from_file(self, paths=None):
data_bundle = LCQMCLoader().load(paths)
data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle)
@@ -472,6 +484,9 @@ class LCQMCBertPipe(MatchingBertPipe):


class BQCorpusBertPipe(MatchingBertPipe):
def __init__(self, tokenizer='cn-char'):
super().__init__(tokenizer=tokenizer)

def process_from_file(self, paths=None):
data_bundle = BQCorpusLoader().load(paths)
data_bundle = RenamePipe(task='cn-nli-bert').process(data_bundle)
@@ -482,6 +497,9 @@ class BQCorpusBertPipe(MatchingBertPipe):


class CNXNLIBertPipe(MatchingBertPipe):
def __init__(self, tokenizer='cn-char'):
super().__init__(tokenizer=tokenizer)

def process_from_file(self, paths=None):
data_bundle = CNXNLILoader().load(paths)
data_bundle = GranularizePipe(task='XNLI').process(data_bundle)


Loading…
Cancel
Save