From c24d01d50f78c96f1cf91a74d9cc0a8195899705 Mon Sep 17 00:00:00 2001 From: 2017alan <17210240044@fudan.edu.cn> Date: Sat, 15 Sep 2018 17:15:25 +0800 Subject: [PATCH] fix a bug in label2index dict. --- fastNLP/core/preprocess.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fastNLP/core/preprocess.py b/fastNLP/core/preprocess.py index f8142c36..e049c762 100644 --- a/fastNLP/core/preprocess.py +++ b/fastNLP/core/preprocess.py @@ -239,7 +239,7 @@ class SeqLabelPreprocess(BasePreprocess): label2index: dict of {str, int} """ # In seq labeling, both word seq and label seq need to be padded to the same length in a mini-batch. - label2index = DEFAULT_WORD_TO_INDEX.copy() + label2index = {} # DEFAULT_WORD_TO_INDEX.copy() word2index = DEFAULT_WORD_TO_INDEX.copy() for example in data: for word, label in zip(example[0], example[1]): @@ -297,7 +297,7 @@ class ClassPreprocess(BasePreprocess): # build vocabulary from scratch if nothing exists word2index = DEFAULT_WORD_TO_INDEX.copy() - label2index = DEFAULT_WORD_TO_INDEX.copy() + label2index = {} # DEFAULT_WORD_TO_INDEX.copy() # collect every word and label for sent, label in data: