From 58e73631147e5a4a495025c2cb6f2518d81951d2 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Sun, 12 May 2019 17:55:54 +0800 Subject: [PATCH 01/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E6=96=87=E6=A1=A3?= =?UTF-8?q?=E4=B8=AD=E7=9A=84=E4=B8=AD=E6=96=87=E5=90=8D=E7=A7=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/source/fastNLP.component.bert_tokenizer.rst | 7 ------- docs/source/fastNLP.component.rst | 15 --------------- docs/source/fastNLP.core.batch.rst | 4 ++-- docs/source/fastNLP.core.callback.rst | 4 ++-- docs/source/fastNLP.core.const.rst | 4 ++-- docs/source/fastNLP.core.dataset.rst | 4 ++-- docs/source/fastNLP.core.field.rst | 4 ++-- docs/source/fastNLP.core.instance.rst | 4 ++-- docs/source/fastNLP.core.losses.rst | 4 ++-- docs/source/fastNLP.core.metrics.rst | 4 ++-- docs/source/fastNLP.core.optimizer.rst | 4 ++-- docs/source/fastNLP.core.rst | 4 ++-- docs/source/fastNLP.core.sampler.rst | 4 ++-- docs/source/fastNLP.core.tester.rst | 4 ++-- docs/source/fastNLP.core.trainer.rst | 4 ++-- docs/source/fastNLP.core.utils.rst | 4 ++-- docs/source/fastNLP.core.vocabulary.rst | 4 ++-- docs/source/fastNLP.io.base_loader.rst | 4 ++-- docs/source/fastNLP.io.dataset_loader.rst | 4 ++-- docs/source/fastNLP.io.embed_loader.rst | 4 ++-- docs/source/fastNLP.io.model_io.rst | 4 ++-- docs/source/fastNLP.io.rst | 4 ++-- docs/source/fastNLP.models.base_model.rst | 4 ++-- docs/source/fastNLP.models.bert.rst | 4 ++-- docs/source/fastNLP.models.biaffine_parser.rst | 4 ++-- .../fastNLP.models.cnn_text_classification.rst | 4 ++-- docs/source/fastNLP.models.enas_controller.rst | 4 ++-- docs/source/fastNLP.models.enas_model.rst | 4 ++-- docs/source/fastNLP.models.enas_trainer.rst | 4 ++-- docs/source/fastNLP.models.enas_utils.rst | 4 ++-- docs/source/fastNLP.models.rst | 4 ++-- docs/source/fastNLP.models.sequence_labeling.rst | 4 ++-- docs/source/fastNLP.models.snli.rst | 4 ++-- docs/source/fastNLP.models.star_transformer.rst | 4 ++-- .../fastNLP.modules.aggregator.attention.rst | 4 ++-- .../source/fastNLP.modules.aggregator.pooling.rst | 4 ++-- docs/source/fastNLP.modules.aggregator.rst | 6 +++--- docs/source/fastNLP.modules.decoder.CRF.rst | 4 ++-- docs/source/fastNLP.modules.decoder.MLP.rst | 4 ++-- docs/source/fastNLP.modules.decoder.rst | 6 +++--- docs/source/fastNLP.modules.decoder.utils.rst | 4 ++-- docs/source/fastNLP.modules.encoder.bert.rst | 4 ++-- .../fastNLP.modules.encoder.char_encoder.rst | 4 ++-- .../fastNLP.modules.encoder.conv_maxpool.rst | 4 ++-- docs/source/fastNLP.modules.encoder.embedding.rst | 4 ++-- docs/source/fastNLP.modules.encoder.lstm.rst | 4 ++-- docs/source/fastNLP.modules.encoder.rst | 6 +++--- .../fastNLP.modules.encoder.star_transformer.rst | 4 ++-- .../fastNLP.modules.encoder.transformer.rst | 4 ++-- .../fastNLP.modules.encoder.variational_rnn.rst | 4 ++-- docs/source/fastNLP.modules.rst | 4 ++-- docs/source/fastNLP.rst | 8 ++++---- docs/source/index.rst | 7 +------ 53 files changed, 106 insertions(+), 133 deletions(-) delete mode 100644 docs/source/fastNLP.component.bert_tokenizer.rst delete mode 100644 docs/source/fastNLP.component.rst diff --git a/docs/source/fastNLP.component.bert_tokenizer.rst b/docs/source/fastNLP.component.bert_tokenizer.rst deleted file mode 100644 index ccfc50c6..00000000 --- a/docs/source/fastNLP.component.bert_tokenizer.rst +++ /dev/null @@ -1,7 +0,0 @@ -fastNLP.component.bert\_tokenizer module -======================================== - -.. automodule:: fastNLP.component.bert_tokenizer - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/fastNLP.component.rst b/docs/source/fastNLP.component.rst deleted file mode 100644 index 3e15fa47..00000000 --- a/docs/source/fastNLP.component.rst +++ /dev/null @@ -1,15 +0,0 @@ -fastNLP.component package -========================= - -.. automodule:: fastNLP.component - :members: - :undoc-members: - :show-inheritance: - -Submodules ----------- - -.. toctree:: - - fastNLP.component.bert_tokenizer - diff --git a/docs/source/fastNLP.core.batch.rst b/docs/source/fastNLP.core.batch.rst index b0294bac..33a5b730 100644 --- a/docs/source/fastNLP.core.batch.rst +++ b/docs/source/fastNLP.core.batch.rst @@ -1,5 +1,5 @@ -fastNLP.core.batch module -========================= +fastNLP.core.batch +================== .. automodule:: fastNLP.core.batch :members: diff --git a/docs/source/fastNLP.core.callback.rst b/docs/source/fastNLP.core.callback.rst index 075712e8..31ec627b 100644 --- a/docs/source/fastNLP.core.callback.rst +++ b/docs/source/fastNLP.core.callback.rst @@ -1,5 +1,5 @@ -fastNLP.core.callback module -============================ +fastNLP.core.callback +===================== .. automodule:: fastNLP.core.callback :members: diff --git a/docs/source/fastNLP.core.const.rst b/docs/source/fastNLP.core.const.rst index 288fcf50..c9e3bd97 100644 --- a/docs/source/fastNLP.core.const.rst +++ b/docs/source/fastNLP.core.const.rst @@ -1,5 +1,5 @@ -fastNLP.core.const module -========================= +fastNLP.core.const +================== .. automodule:: fastNLP.core.const :members: diff --git a/docs/source/fastNLP.core.dataset.rst b/docs/source/fastNLP.core.dataset.rst index 24e1ab4e..b377cb0f 100644 --- a/docs/source/fastNLP.core.dataset.rst +++ b/docs/source/fastNLP.core.dataset.rst @@ -1,5 +1,5 @@ -fastNLP.core.dataset module -=========================== +fastNLP.core.dataset +==================== .. automodule:: fastNLP.core.dataset :members: diff --git a/docs/source/fastNLP.core.field.rst b/docs/source/fastNLP.core.field.rst index 23eb47bc..7686e79a 100644 --- a/docs/source/fastNLP.core.field.rst +++ b/docs/source/fastNLP.core.field.rst @@ -1,5 +1,5 @@ -fastNLP.core.field module -========================= +fastNLP.core.field +================== .. automodule:: fastNLP.core.field :members: diff --git a/docs/source/fastNLP.core.instance.rst b/docs/source/fastNLP.core.instance.rst index db731fe9..14393a91 100644 --- a/docs/source/fastNLP.core.instance.rst +++ b/docs/source/fastNLP.core.instance.rst @@ -1,5 +1,5 @@ -fastNLP.core.instance module -============================ +fastNLP.core.instance +===================== .. automodule:: fastNLP.core.instance :members: diff --git a/docs/source/fastNLP.core.losses.rst b/docs/source/fastNLP.core.losses.rst index 7f4e02b6..d2dd492b 100644 --- a/docs/source/fastNLP.core.losses.rst +++ b/docs/source/fastNLP.core.losses.rst @@ -1,5 +1,5 @@ -fastNLP.core.losses module -========================== +fastNLP.core.losses +=================== .. automodule:: fastNLP.core.losses :members: diff --git a/docs/source/fastNLP.core.metrics.rst b/docs/source/fastNLP.core.metrics.rst index d700a525..69afff36 100644 --- a/docs/source/fastNLP.core.metrics.rst +++ b/docs/source/fastNLP.core.metrics.rst @@ -1,5 +1,5 @@ -fastNLP.core.metrics module -=========================== +fastNLP.core.metrics +==================== .. automodule:: fastNLP.core.metrics :members: diff --git a/docs/source/fastNLP.core.optimizer.rst b/docs/source/fastNLP.core.optimizer.rst index 737fc430..e2100d2e 100644 --- a/docs/source/fastNLP.core.optimizer.rst +++ b/docs/source/fastNLP.core.optimizer.rst @@ -1,5 +1,5 @@ -fastNLP.core.optimizer module -============================= +fastNLP.core.optimizer +====================== .. automodule:: fastNLP.core.optimizer :members: diff --git a/docs/source/fastNLP.core.rst b/docs/source/fastNLP.core.rst index 01c59e21..932c6b21 100644 --- a/docs/source/fastNLP.core.rst +++ b/docs/source/fastNLP.core.rst @@ -1,4 +1,4 @@ -fastNLP.core package +fastNLP.core 模块 ==================== .. automodule:: fastNLP.core @@ -6,7 +6,7 @@ fastNLP.core package :undoc-members: :show-inheritance: -Submodules +子模块 ---------- .. toctree:: diff --git a/docs/source/fastNLP.core.sampler.rst b/docs/source/fastNLP.core.sampler.rst index a827d49c..1810d59c 100644 --- a/docs/source/fastNLP.core.sampler.rst +++ b/docs/source/fastNLP.core.sampler.rst @@ -1,5 +1,5 @@ -fastNLP.core.sampler module -=========================== +fastNLP.core.sampler +==================== .. automodule:: fastNLP.core.sampler :members: diff --git a/docs/source/fastNLP.core.tester.rst b/docs/source/fastNLP.core.tester.rst index 30cebe28..a9e7e09f 100644 --- a/docs/source/fastNLP.core.tester.rst +++ b/docs/source/fastNLP.core.tester.rst @@ -1,5 +1,5 @@ -fastNLP.core.tester module -========================== +fastNLP.core.tester +=================== .. automodule:: fastNLP.core.tester :members: diff --git a/docs/source/fastNLP.core.trainer.rst b/docs/source/fastNLP.core.trainer.rst index 648feb9d..9e518d4b 100644 --- a/docs/source/fastNLP.core.trainer.rst +++ b/docs/source/fastNLP.core.trainer.rst @@ -1,5 +1,5 @@ -fastNLP.core.trainer module -=========================== +fastNLP.core.trainer +==================== .. automodule:: fastNLP.core.trainer :members: diff --git a/docs/source/fastNLP.core.utils.rst b/docs/source/fastNLP.core.utils.rst index 2bec7f62..fcd3f50c 100644 --- a/docs/source/fastNLP.core.utils.rst +++ b/docs/source/fastNLP.core.utils.rst @@ -1,5 +1,5 @@ -fastNLP.core.utils module -========================= +fastNLP.core.utils +================== .. automodule:: fastNLP.core.utils :members: diff --git a/docs/source/fastNLP.core.vocabulary.rst b/docs/source/fastNLP.core.vocabulary.rst index 98d8d24d..b3bf4bac 100644 --- a/docs/source/fastNLP.core.vocabulary.rst +++ b/docs/source/fastNLP.core.vocabulary.rst @@ -1,5 +1,5 @@ -fastNLP.core.vocabulary module -============================== +fastNLP.core.vocabulary +======================= .. automodule:: fastNLP.core.vocabulary :members: diff --git a/docs/source/fastNLP.io.base_loader.rst b/docs/source/fastNLP.io.base_loader.rst index b3375f74..c1f9ac14 100644 --- a/docs/source/fastNLP.io.base_loader.rst +++ b/docs/source/fastNLP.io.base_loader.rst @@ -1,5 +1,5 @@ -fastNLP.io.base\_loader module -============================== +fastNLP.io.base\_loader +======================= .. automodule:: fastNLP.io.base_loader :members: diff --git a/docs/source/fastNLP.io.dataset_loader.rst b/docs/source/fastNLP.io.dataset_loader.rst index 89f9b165..d6663e59 100644 --- a/docs/source/fastNLP.io.dataset_loader.rst +++ b/docs/source/fastNLP.io.dataset_loader.rst @@ -1,5 +1,5 @@ -fastNLP.io.dataset\_loader module -================================= +fastNLP.io.dataset\_loader +========================== .. automodule:: fastNLP.io.dataset_loader :members: diff --git a/docs/source/fastNLP.io.embed_loader.rst b/docs/source/fastNLP.io.embed_loader.rst index 1f135155..7a8e730c 100644 --- a/docs/source/fastNLP.io.embed_loader.rst +++ b/docs/source/fastNLP.io.embed_loader.rst @@ -1,5 +1,5 @@ -fastNLP.io.embed\_loader module -=============================== +fastNLP.io.embed\_loader +======================== .. automodule:: fastNLP.io.embed_loader :members: diff --git a/docs/source/fastNLP.io.model_io.rst b/docs/source/fastNLP.io.model_io.rst index 75f1df69..50d4c25a 100644 --- a/docs/source/fastNLP.io.model_io.rst +++ b/docs/source/fastNLP.io.model_io.rst @@ -1,5 +1,5 @@ -fastNLP.io.model\_io module -=========================== +fastNLP.io.model\_io +==================== .. automodule:: fastNLP.io.model_io :members: diff --git a/docs/source/fastNLP.io.rst b/docs/source/fastNLP.io.rst index 610b1ce6..ae28573d 100644 --- a/docs/source/fastNLP.io.rst +++ b/docs/source/fastNLP.io.rst @@ -1,4 +1,4 @@ -fastNLP.io package +fastNLP.io 模块 ================== .. automodule:: fastNLP.io @@ -6,7 +6,7 @@ fastNLP.io package :undoc-members: :show-inheritance: -Submodules +子模块 ---------- .. toctree:: diff --git a/docs/source/fastNLP.models.base_model.rst b/docs/source/fastNLP.models.base_model.rst index 2537f75f..e1d4d64f 100644 --- a/docs/source/fastNLP.models.base_model.rst +++ b/docs/source/fastNLP.models.base_model.rst @@ -1,5 +1,5 @@ -fastNLP.models.base\_model module -================================= +fastNLP.models.base\_model +========================== .. automodule:: fastNLP.models.base_model :members: diff --git a/docs/source/fastNLP.models.bert.rst b/docs/source/fastNLP.models.bert.rst index 7ac64ad7..bba323df 100644 --- a/docs/source/fastNLP.models.bert.rst +++ b/docs/source/fastNLP.models.bert.rst @@ -1,5 +1,5 @@ -fastNLP.models.bert module -========================== +fastNLP.models.bert +=================== .. automodule:: fastNLP.models.bert :members: diff --git a/docs/source/fastNLP.models.biaffine_parser.rst b/docs/source/fastNLP.models.biaffine_parser.rst index 448dff09..a3dd1836 100644 --- a/docs/source/fastNLP.models.biaffine_parser.rst +++ b/docs/source/fastNLP.models.biaffine_parser.rst @@ -1,5 +1,5 @@ -fastNLP.models.biaffine\_parser module -====================================== +fastNLP.models.biaffine\_parser +=============================== .. automodule:: fastNLP.models.biaffine_parser :members: diff --git a/docs/source/fastNLP.models.cnn_text_classification.rst b/docs/source/fastNLP.models.cnn_text_classification.rst index 31807494..a935d0bf 100644 --- a/docs/source/fastNLP.models.cnn_text_classification.rst +++ b/docs/source/fastNLP.models.cnn_text_classification.rst @@ -1,5 +1,5 @@ -fastNLP.models.cnn\_text\_classification module -=============================================== +fastNLP.models.cnn\_text\_classification +======================================== .. automodule:: fastNLP.models.cnn_text_classification :members: diff --git a/docs/source/fastNLP.models.enas_controller.rst b/docs/source/fastNLP.models.enas_controller.rst index 7977de81..28655bd7 100644 --- a/docs/source/fastNLP.models.enas_controller.rst +++ b/docs/source/fastNLP.models.enas_controller.rst @@ -1,5 +1,5 @@ -fastNLP.models.enas\_controller module -====================================== +fastNLP.models.enas\_controller +=============================== .. automodule:: fastNLP.models.enas_controller :members: diff --git a/docs/source/fastNLP.models.enas_model.rst b/docs/source/fastNLP.models.enas_model.rst index 518f56b7..35fbe495 100644 --- a/docs/source/fastNLP.models.enas_model.rst +++ b/docs/source/fastNLP.models.enas_model.rst @@ -1,5 +1,5 @@ -fastNLP.models.enas\_model module -================================= +fastNLP.models.enas\_model +========================== .. automodule:: fastNLP.models.enas_model :members: diff --git a/docs/source/fastNLP.models.enas_trainer.rst b/docs/source/fastNLP.models.enas_trainer.rst index 45f77f31..7e0ef462 100644 --- a/docs/source/fastNLP.models.enas_trainer.rst +++ b/docs/source/fastNLP.models.enas_trainer.rst @@ -1,5 +1,5 @@ -fastNLP.models.enas\_trainer module -=================================== +fastNLP.models.enas\_trainer +============================ .. automodule:: fastNLP.models.enas_trainer :members: diff --git a/docs/source/fastNLP.models.enas_utils.rst b/docs/source/fastNLP.models.enas_utils.rst index 5f05a4fc..0a049706 100644 --- a/docs/source/fastNLP.models.enas_utils.rst +++ b/docs/source/fastNLP.models.enas_utils.rst @@ -1,5 +1,5 @@ -fastNLP.models.enas\_utils module -================================= +fastNLP.models.enas\_utils +========================== .. automodule:: fastNLP.models.enas_utils :members: diff --git a/docs/source/fastNLP.models.rst b/docs/source/fastNLP.models.rst index accfc3bb..c1be3a4c 100644 --- a/docs/source/fastNLP.models.rst +++ b/docs/source/fastNLP.models.rst @@ -1,4 +1,4 @@ -fastNLP.models package +fastNLP.models 模块 ====================== .. automodule:: fastNLP.models @@ -6,7 +6,7 @@ fastNLP.models package :undoc-members: :show-inheritance: -Submodules +子模块 ---------- .. toctree:: diff --git a/docs/source/fastNLP.models.sequence_labeling.rst b/docs/source/fastNLP.models.sequence_labeling.rst index b8568be7..6d569fe1 100644 --- a/docs/source/fastNLP.models.sequence_labeling.rst +++ b/docs/source/fastNLP.models.sequence_labeling.rst @@ -1,5 +1,5 @@ -fastNLP.models.sequence\_labeling module -======================================== +fastNLP.models.sequence\_labeling +================================= .. automodule:: fastNLP.models.sequence_labeling :members: diff --git a/docs/source/fastNLP.models.snli.rst b/docs/source/fastNLP.models.snli.rst index b24bc196..24c2cc53 100644 --- a/docs/source/fastNLP.models.snli.rst +++ b/docs/source/fastNLP.models.snli.rst @@ -1,5 +1,5 @@ -fastNLP.models.snli module -========================== +fastNLP.models.snli +=================== .. automodule:: fastNLP.models.snli :members: diff --git a/docs/source/fastNLP.models.star_transformer.rst b/docs/source/fastNLP.models.star_transformer.rst index f2185935..c93fb8cd 100644 --- a/docs/source/fastNLP.models.star_transformer.rst +++ b/docs/source/fastNLP.models.star_transformer.rst @@ -1,5 +1,5 @@ -fastNLP.models.star\_transformer module -======================================= +fastNLP.models.star\_transformer +================================ .. automodule:: fastNLP.models.star_transformer :members: diff --git a/docs/source/fastNLP.modules.aggregator.attention.rst b/docs/source/fastNLP.modules.aggregator.attention.rst index 46251e73..dc9c2b53 100644 --- a/docs/source/fastNLP.modules.aggregator.attention.rst +++ b/docs/source/fastNLP.modules.aggregator.attention.rst @@ -1,5 +1,5 @@ -fastNLP.modules.aggregator.attention module -=========================================== +fastNLP.modules.aggregator.attention +==================================== .. automodule:: fastNLP.modules.aggregator.attention :members: diff --git a/docs/source/fastNLP.modules.aggregator.pooling.rst b/docs/source/fastNLP.modules.aggregator.pooling.rst index f6730430..162f889d 100644 --- a/docs/source/fastNLP.modules.aggregator.pooling.rst +++ b/docs/source/fastNLP.modules.aggregator.pooling.rst @@ -1,5 +1,5 @@ -fastNLP.modules.aggregator.pooling module -========================================= +fastNLP.modules.aggregator.pooling +================================== .. automodule:: fastNLP.modules.aggregator.pooling :members: diff --git a/docs/source/fastNLP.modules.aggregator.rst b/docs/source/fastNLP.modules.aggregator.rst index b9b331c3..4f8baabd 100644 --- a/docs/source/fastNLP.modules.aggregator.rst +++ b/docs/source/fastNLP.modules.aggregator.rst @@ -1,12 +1,12 @@ -fastNLP.modules.aggregator package -================================== +fastNLP.modules.aggregator +========================== .. automodule:: fastNLP.modules.aggregator :members: :undoc-members: :show-inheritance: -Submodules +子模块 ---------- .. toctree:: diff --git a/docs/source/fastNLP.modules.decoder.CRF.rst b/docs/source/fastNLP.modules.decoder.CRF.rst index 8d980bbd..fc643fef 100644 --- a/docs/source/fastNLP.modules.decoder.CRF.rst +++ b/docs/source/fastNLP.modules.decoder.CRF.rst @@ -1,5 +1,5 @@ -fastNLP.modules.decoder.CRF module -================================== +fastNLP.modules.decoder.CRF +=========================== .. automodule:: fastNLP.modules.decoder.CRF :members: diff --git a/docs/source/fastNLP.modules.decoder.MLP.rst b/docs/source/fastNLP.modules.decoder.MLP.rst index 787a3c33..feb5c228 100644 --- a/docs/source/fastNLP.modules.decoder.MLP.rst +++ b/docs/source/fastNLP.modules.decoder.MLP.rst @@ -1,5 +1,5 @@ -fastNLP.modules.decoder.MLP module -================================== +fastNLP.modules.decoder.MLP +=========================== .. automodule:: fastNLP.modules.decoder.MLP :members: diff --git a/docs/source/fastNLP.modules.decoder.rst b/docs/source/fastNLP.modules.decoder.rst index 10fdbd90..fbda11d9 100644 --- a/docs/source/fastNLP.modules.decoder.rst +++ b/docs/source/fastNLP.modules.decoder.rst @@ -1,12 +1,12 @@ -fastNLP.modules.decoder package -=============================== +fastNLP.modules.decoder +======================= .. automodule:: fastNLP.modules.decoder :members: :undoc-members: :show-inheritance: -Submodules +子模块 ---------- .. toctree:: diff --git a/docs/source/fastNLP.modules.decoder.utils.rst b/docs/source/fastNLP.modules.decoder.utils.rst index b829baf7..da979d99 100644 --- a/docs/source/fastNLP.modules.decoder.utils.rst +++ b/docs/source/fastNLP.modules.decoder.utils.rst @@ -1,5 +1,5 @@ -fastNLP.modules.decoder.utils module -==================================== +fastNLP.modules.decoder.utils +============================= .. automodule:: fastNLP.modules.decoder.utils :members: diff --git a/docs/source/fastNLP.modules.encoder.bert.rst b/docs/source/fastNLP.modules.encoder.bert.rst index 6f811792..66bd0bbd 100644 --- a/docs/source/fastNLP.modules.encoder.bert.rst +++ b/docs/source/fastNLP.modules.encoder.bert.rst @@ -1,5 +1,5 @@ -fastNLP.modules.encoder.bert module -=================================== +fastNLP.modules.encoder.bert +============================ .. automodule:: fastNLP.modules.encoder.bert :members: diff --git a/docs/source/fastNLP.modules.encoder.char_encoder.rst b/docs/source/fastNLP.modules.encoder.char_encoder.rst index 12431c70..61ea3340 100644 --- a/docs/source/fastNLP.modules.encoder.char_encoder.rst +++ b/docs/source/fastNLP.modules.encoder.char_encoder.rst @@ -1,5 +1,5 @@ -fastNLP.modules.encoder.char\_encoder module -============================================ +fastNLP.modules.encoder.char\_encoder +===================================== .. automodule:: fastNLP.modules.encoder.char_encoder :members: diff --git a/docs/source/fastNLP.modules.encoder.conv_maxpool.rst b/docs/source/fastNLP.modules.encoder.conv_maxpool.rst index c40a5f39..7058a723 100644 --- a/docs/source/fastNLP.modules.encoder.conv_maxpool.rst +++ b/docs/source/fastNLP.modules.encoder.conv_maxpool.rst @@ -1,5 +1,5 @@ -fastNLP.modules.encoder.conv\_maxpool module -============================================ +fastNLP.modules.encoder.conv\_maxpool +===================================== .. automodule:: fastNLP.modules.encoder.conv_maxpool :members: diff --git a/docs/source/fastNLP.modules.encoder.embedding.rst b/docs/source/fastNLP.modules.encoder.embedding.rst index abdd5fd2..4427b3bf 100644 --- a/docs/source/fastNLP.modules.encoder.embedding.rst +++ b/docs/source/fastNLP.modules.encoder.embedding.rst @@ -1,5 +1,5 @@ -fastNLP.modules.encoder.embedding module -======================================== +fastNLP.modules.encoder.embedding +================================= .. automodule:: fastNLP.modules.encoder.embedding :members: diff --git a/docs/source/fastNLP.modules.encoder.lstm.rst b/docs/source/fastNLP.modules.encoder.lstm.rst index 897e7a5f..f9cbea88 100644 --- a/docs/source/fastNLP.modules.encoder.lstm.rst +++ b/docs/source/fastNLP.modules.encoder.lstm.rst @@ -1,5 +1,5 @@ -fastNLP.modules.encoder.lstm module -=================================== +fastNLP.modules.encoder.lstm +============================ .. automodule:: fastNLP.modules.encoder.lstm :members: diff --git a/docs/source/fastNLP.modules.encoder.rst b/docs/source/fastNLP.modules.encoder.rst index e571331d..9a11fe74 100644 --- a/docs/source/fastNLP.modules.encoder.rst +++ b/docs/source/fastNLP.modules.encoder.rst @@ -1,12 +1,12 @@ -fastNLP.modules.encoder package -=============================== +fastNLP.modules.encoder +======================= .. automodule:: fastNLP.modules.encoder :members: :undoc-members: :show-inheritance: -Submodules +子模块 ---------- .. toctree:: diff --git a/docs/source/fastNLP.modules.encoder.star_transformer.rst b/docs/source/fastNLP.modules.encoder.star_transformer.rst index 57cd6dcf..0c406782 100644 --- a/docs/source/fastNLP.modules.encoder.star_transformer.rst +++ b/docs/source/fastNLP.modules.encoder.star_transformer.rst @@ -1,5 +1,5 @@ -fastNLP.modules.encoder.star\_transformer module -================================================ +fastNLP.modules.encoder.star\_transformer +========================================= .. automodule:: fastNLP.modules.encoder.star_transformer :members: diff --git a/docs/source/fastNLP.modules.encoder.transformer.rst b/docs/source/fastNLP.modules.encoder.transformer.rst index 4b63686e..6a40c597 100644 --- a/docs/source/fastNLP.modules.encoder.transformer.rst +++ b/docs/source/fastNLP.modules.encoder.transformer.rst @@ -1,5 +1,5 @@ -fastNLP.modules.encoder.transformer module -========================================== +fastNLP.modules.encoder.transformer +=================================== .. automodule:: fastNLP.modules.encoder.transformer :members: diff --git a/docs/source/fastNLP.modules.encoder.variational_rnn.rst b/docs/source/fastNLP.modules.encoder.variational_rnn.rst index 4472d5c2..348fb3d8 100644 --- a/docs/source/fastNLP.modules.encoder.variational_rnn.rst +++ b/docs/source/fastNLP.modules.encoder.variational_rnn.rst @@ -1,5 +1,5 @@ -fastNLP.modules.encoder.variational\_rnn module -=============================================== +fastNLP.modules.encoder.variational\_rnn +======================================== .. automodule:: fastNLP.modules.encoder.variational_rnn :members: diff --git a/docs/source/fastNLP.modules.rst b/docs/source/fastNLP.modules.rst index 2057613a..263005f0 100644 --- a/docs/source/fastNLP.modules.rst +++ b/docs/source/fastNLP.modules.rst @@ -1,4 +1,4 @@ -fastNLP.modules package +fastNLP.modules 模块 ======================= .. automodule:: fastNLP.modules @@ -6,7 +6,7 @@ fastNLP.modules package :undoc-members: :show-inheritance: -Subpackages +子模块 ----------- .. toctree:: diff --git a/docs/source/fastNLP.rst b/docs/source/fastNLP.rst index 30f405f0..eaa06ea8 100644 --- a/docs/source/fastNLP.rst +++ b/docs/source/fastNLP.rst @@ -1,4 +1,4 @@ -fastNLP package +用户 API =============== .. automodule:: fastNLP @@ -6,14 +6,14 @@ fastNLP package :undoc-members: :show-inheritance: -Subpackages +内部模块 ----------- .. toctree:: + :maxdepth: 3 - fastNLP.component fastNLP.core fastNLP.io - fastNLP.models fastNLP.modules + fastNLP.models diff --git a/docs/source/index.rst b/docs/source/index.rst index d441601e..4bb44773 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -46,11 +46,6 @@ For example: (TODO) -快速入门 -------------- - -TODO - 用户手册 --------------- @@ -60,7 +55,7 @@ TODO 安装指南 快速入门 - 详细指南 + 详细指南 科研向导 From ce8d629fb1d21edc7e5f36d4ef0d1b964431f464 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Sun, 12 May 2019 22:10:11 +0800 Subject: [PATCH 02/19] =?UTF-8?q?=E5=AE=8C=E6=88=90=E4=BA=86=E8=AF=A6?= =?UTF-8?q?=E7=BB=86=E4=BB=8B=E7=BB=8D=E6=96=87=E6=A1=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/source/index.rst | 13 +- .../tutorials/fastnlp_10tmin_tutorial.rst | 376 ------------------ .../tutorials/fastnlp_1_minute_tutorial.rst | 113 ------ .../tutorials/fastnlp_advanced_tutorial.rst | 5 - .../tutorials/fastnlp_developer_guide.rst | 5 - docs/source/user/quickstart.rst | 2 +- docs/source/user/task1.rst | 3 - docs/source/user/task2.rst | 3 - docs/source/user/tutorial_one.rst | 371 +++++++++++++++++ docs/source/user/with_fitlog.rst | 5 + 10 files changed, 383 insertions(+), 513 deletions(-) delete mode 100644 docs/source/tutorials/fastnlp_10tmin_tutorial.rst delete mode 100644 docs/source/tutorials/fastnlp_1_minute_tutorial.rst delete mode 100644 docs/source/tutorials/fastnlp_advanced_tutorial.rst delete mode 100644 docs/source/tutorials/fastnlp_developer_guide.rst delete mode 100644 docs/source/user/task1.rst delete mode 100644 docs/source/user/task2.rst create mode 100644 docs/source/user/tutorial_one.rst create mode 100644 docs/source/user/with_fitlog.rst diff --git a/docs/source/index.rst b/docs/source/index.rst index 4bb44773..10bab0eb 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -24,7 +24,7 @@ fastNLP 是一款轻量级的 NLP 处理套件。你既可以使用它快速地 | module type | functionality | example | +=======================+=======================+=======================+ | encoder | 将输入编码为具有具 | embedding, RNN, CNN, | -| | 有表示能力的向量 | transformer | +| | 有表示能力的向量 | transformer | +-----------------------+-----------------------+-----------------------+ | aggregator | 从多个向量中聚合信息 | self-attention, | | | | max-pooling | @@ -39,16 +39,15 @@ For example: .. image:: figures/text_classification.png +.. todo:: + 各个任务上的结果 - -各个任务上的结果 ------------------------ - -(TODO) +内置的模型 +---------------- 用户手册 ---------------- +---------------- .. toctree:: :maxdepth: 1 diff --git a/docs/source/tutorials/fastnlp_10tmin_tutorial.rst b/docs/source/tutorials/fastnlp_10tmin_tutorial.rst deleted file mode 100644 index 4c5fc65e..00000000 --- a/docs/source/tutorials/fastnlp_10tmin_tutorial.rst +++ /dev/null @@ -1,376 +0,0 @@ -fastNLP 10分钟上手教程 -=============== - -教程原文见 https://github.com/fastnlp/fastNLP/blob/master/tutorials/fastnlp_10min_tutorial.ipynb - -fastNLP提供方便的数据预处理,训练和测试模型的功能 - -DataSet & Instance ------------------- - -fastNLP用DataSet和Instance保存和处理数据。每个DataSet表示一个数据集,每个Instance表示一个数据样本。一个DataSet存有多个Instance,每个Instance可以自定义存哪些内容。 - -有一些read\_\*方法,可以轻松从文件读取数据,存成DataSet。 - -.. code:: ipython3 - - from fastNLP import DataSet - from fastNLP import Instance - - # 从csv读取数据到DataSet - win_path = "C:\\Users\zyfeng\Desktop\FudanNLP\\fastNLP\\test\\data_for_tests\\tutorial_sample_dataset.csv" - dataset = DataSet.read_csv(win_path, headers=('raw_sentence', 'label'), sep='\t') - print(dataset[0]) - - -.. parsed-literal:: - - {'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story ., - 'label': 1} - - -.. code:: ipython3 - - # DataSet.append(Instance)加入新数据 - - dataset.append(Instance(raw_sentence='fake data', label='0')) - dataset[-1] - - - - -.. parsed-literal:: - - {'raw_sentence': fake data, - 'label': 0} - - - -.. code:: ipython3 - - # DataSet.apply(func, new_field_name)对数据预处理 - - # 将所有数字转为小写 - dataset.apply(lambda x: x['raw_sentence'].lower(), new_field_name='raw_sentence') - # label转int - dataset.apply(lambda x: int(x['label']), new_field_name='label_seq', is_target=True) - # 使用空格分割句子 - dataset.drop(lambda x: len(x['raw_sentence'].split()) == 0) - def split_sent(ins): - return ins['raw_sentence'].split() - dataset.apply(split_sent, new_field_name='words', is_input=True) - -.. code:: ipython3 - - # DataSet.drop(func)筛除数据 - # 删除低于某个长度的词语 - dataset.drop(lambda x: len(x['words']) <= 3) - -.. code:: ipython3 - - # 分出测试集、训练集 - - test_data, train_data = dataset.split(0.3) - print("Train size: ", len(test_data)) - print("Test size: ", len(train_data)) - - -.. parsed-literal:: - - Train size: 54 - Test size: - -Vocabulary ----------- - -fastNLP中的Vocabulary轻松构建词表,将词转成数字 - -.. code:: ipython3 - - from fastNLP import Vocabulary - - # 构建词表, Vocabulary.add(word) - vocab = Vocabulary(min_freq=2) - train_data.apply(lambda x: [vocab.add(word) for word in x['words']]) - vocab.build_vocab() - - # index句子, Vocabulary.to_index(word) - train_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='word_seq', is_input=True) - test_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='word_seq', is_input=True) - - - print(test_data[0]) - - -.. parsed-literal:: - - {'raw_sentence': the plot is romantic comedy boilerplate from start to finish ., - 'label': 2, - 'label_seq': 2, - 'words': ['the', 'plot', 'is', 'romantic', 'comedy', 'boilerplate', 'from', 'start', 'to', 'finish', '.'], - 'word_seq': [2, 13, 9, 24, 25, 26, 15, 27, 11, 28, 3]} - - -.. code:: ipython3 - - # 假设你们需要做强化学习或者gan之类的项目,也许你们可以使用这里的dataset - from fastNLP.core.batch import Batch - from fastNLP.core.sampler import RandomSampler - - batch_iterator = Batch(dataset=train_data, batch_size=2, sampler=RandomSampler()) - for batch_x, batch_y in batch_iterator: - print("batch_x has: ", batch_x) - print("batch_y has: ", batch_y) - break - - -.. parsed-literal:: - - batch_x has: {'words': array([list(['this', 'kind', 'of', 'hands-on', 'storytelling', 'is', 'ultimately', 'what', 'makes', 'shanghai', 'ghetto', 'move', 'beyond', 'a', 'good', ',', 'dry', ',', 'reliable', 'textbook', 'and', 'what', 'allows', 'it', 'to', 'rank', 'with', 'its', 'worthy', 'predecessors', '.']), - list(['the', 'entire', 'movie', 'is', 'filled', 'with', 'deja', 'vu', 'moments', '.'])], - dtype=object), 'word_seq': tensor([[ 19, 184, 6, 1, 481, 9, 206, 50, 91, 1210, 1609, 1330, - 495, 5, 63, 4, 1269, 4, 1, 1184, 7, 50, 1050, 10, - 8, 1611, 16, 21, 1039, 1, 2], - [ 3, 711, 22, 9, 1282, 16, 2482, 2483, 200, 2, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0]])} - batch_y has: {'label_seq': tensor([3, 2])} - - -Model ------ - -.. code:: ipython3 - - # 定义一个简单的Pytorch模型 - - from fastNLP.models import CNNText - model = CNNText(embed_num=len(vocab), embed_dim=50, num_classes=5, padding=2, dropout=0.1) - model - - - - -.. parsed-literal:: - - CNNText( - (embed): Embedding( - (embed): Embedding(77, 50, padding_idx=0) - (dropout): Dropout(p=0.0) - ) - (conv_pool): ConvMaxpool( - (convs): ModuleList( - (0): Conv1d(50, 3, kernel_size=(3,), stride=(1,), padding=(2,)) - (1): Conv1d(50, 4, kernel_size=(4,), stride=(1,), padding=(2,)) - (2): Conv1d(50, 5, kernel_size=(5,), stride=(1,), padding=(2,)) - ) - ) - (dropout): Dropout(p=0.1) - (fc): Linear( - (linear): Linear(in_features=12, out_features=5, bias=True) - ) - ) - - - -Trainer & Tester ----------------- - -使用fastNLP的Trainer训练模型 - -.. code:: ipython3 - - from fastNLP import Trainer - from copy import deepcopy - from fastNLP import CrossEntropyLoss - from fastNLP import AccuracyMetric - -.. code:: ipython3 - - # 进行overfitting测试 - copy_model = deepcopy(model) - overfit_trainer = Trainer(model=copy_model, - train_data=test_data, - dev_data=test_data, - loss=CrossEntropyLoss(pred="output", target="label_seq"), - metrics=AccuracyMetric(), - n_epochs=10, - save_path=None) - overfit_trainer.train() - - -.. parsed-literal:: - - training epochs started 2018-12-07 14:07:20 - - - - -.. parsed-literal:: - - HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=20), HTML(value='')), layout=Layout(display='… - - - -.. parsed-literal:: - - Epoch 1/10. Step:2/20. AccuracyMetric: acc=0.037037 - Epoch 2/10. Step:4/20. AccuracyMetric: acc=0.296296 - Epoch 3/10. Step:6/20. AccuracyMetric: acc=0.333333 - Epoch 4/10. Step:8/20. AccuracyMetric: acc=0.555556 - Epoch 5/10. Step:10/20. AccuracyMetric: acc=0.611111 - Epoch 6/10. Step:12/20. AccuracyMetric: acc=0.481481 - Epoch 7/10. Step:14/20. AccuracyMetric: acc=0.62963 - Epoch 8/10. Step:16/20. AccuracyMetric: acc=0.685185 - Epoch 9/10. Step:18/20. AccuracyMetric: acc=0.722222 - Epoch 10/10. Step:20/20. AccuracyMetric: acc=0.777778 - - -.. code:: ipython3 - - # 实例化Trainer,传入模型和数据,进行训练 - trainer = Trainer(model=model, - train_data=train_data, - dev_data=test_data, - loss=CrossEntropyLoss(pred="output", target="label_seq"), - metrics=AccuracyMetric(), - n_epochs=5) - trainer.train() - print('Train finished!') - - -.. parsed-literal:: - - training epochs started 2018-12-07 14:08:10 - - - - -.. parsed-literal:: - - HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=5), HTML(value='')), layout=Layout(display='i… - - - -.. parsed-literal:: - - Epoch 1/5. Step:1/5. AccuracyMetric: acc=0.037037 - Epoch 2/5. Step:2/5. AccuracyMetric: acc=0.037037 - Epoch 3/5. Step:3/5. AccuracyMetric: acc=0.037037 - Epoch 4/5. Step:4/5. AccuracyMetric: acc=0.185185 - Epoch 5/5. Step:5/5. AccuracyMetric: acc=0.240741 - Train finished! - - -.. code:: ipython3 - - from fastNLP import Tester - - tester = Tester(data=test_data, model=model, metrics=AccuracyMetric()) - acc = tester.test() - - -.. parsed-literal:: - - [tester] - AccuracyMetric: acc=0.240741 - - -In summary ----------- - -fastNLP Trainer的伪代码逻辑 ---------------------------- - -1. 准备DataSet,假设DataSet中共有如下的fields -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -:: - - ['raw_sentence', 'word_seq1', 'word_seq2', 'raw_label','label'] - 通过 - DataSet.set_input('word_seq1', word_seq2', flag=True)将'word_seq1', 'word_seq2'设置为input - 通过 - DataSet.set_target('label', flag=True)将'label'设置为target - -2. 初始化模型 -~~~~~~~~~~~~~ - -:: - - class Model(nn.Module): - def __init__(self): - xxx - def forward(self, word_seq1, word_seq2): - # (1) 这里使用的形参名必须和DataSet中的input field的名称对应。因为我们是通过形参名, 进行赋值的 - # (2) input field的数量可以多于这里的形参数量。但是不能少于。 - xxxx - # 输出必须是一个dict - -3. Trainer的训练过程 -~~~~~~~~~~~~~~~~~~~~ - -:: - - (1) 从DataSet中按照batch_size取出一个batch,调用Model.forward - (2) 将 Model.forward的结果 与 标记为target的field 传入Losser当中。 - 由于每个人写的Model.forward的output的dict可能key并不一样,比如有人是{'pred':xxx}, {'output': xxx}; - 另外每个人将target可能也会设置为不同的名称, 比如有人是label, 有人设置为target; - 为了解决以上的问题,我们的loss提供映射机制 - 比如CrossEntropyLosser的需要的输入是(prediction, target)。但是forward的output是{'output': xxx}; 'label'是target - 那么初始化losser的时候写为CrossEntropyLosser(prediction='output', target='label')即可 - (3) 对于Metric是同理的 - Metric计算也是从 forward的结果中取值 与 设置target的field中取值。 也是可以通过映射找到对应的值 - -一些问题. ---------- - -1. DataSet中为什么需要设置input和target -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -:: - - 只有被设置为input或者target的数据才会在train的过程中被取出来 - (1.1) 我们只会在设置为input的field中寻找传递给Model.forward的参数。 - (1.2) 我们在传递值给losser或者metric的时候会使用来自: - (a)Model.forward的output - (b)被设置为target的field - - -2. 我们是通过forwad中的形参名将DataSet中的field赋值给对应的参数 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -:: - - (1.1) 构建模型过程中, - 例如: - DataSet中x,seq_lens是input,那么forward就应该是 - def forward(self, x, seq_lens): - pass - 我们是通过形参名称进行匹配的field的 - - -1. 加载数据到DataSet -~~~~~~~~~~~~~~~~~~~~ - -2. 使用apply操作对DataSet进行预处理 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -:: - - (2.1) 处理过程中将某些field设置为input,某些field设置为target - -3. 构建模型 -~~~~~~~~~~~ - -:: - - (3.1) 构建模型过程中,需要注意forward函数的形参名需要和DataSet中设置为input的field名称是一致的。 - 例如: - DataSet中x,seq_lens是input,那么forward就应该是 - def forward(self, x, seq_lens): - pass - 我们是通过形参名称进行匹配的field的 - (3.2) 模型的forward的output需要是dict类型的。 - 建议将输出设置为{"pred": xx}. - diff --git a/docs/source/tutorials/fastnlp_1_minute_tutorial.rst b/docs/source/tutorials/fastnlp_1_minute_tutorial.rst deleted file mode 100644 index b4c6c8c4..00000000 --- a/docs/source/tutorials/fastnlp_1_minute_tutorial.rst +++ /dev/null @@ -1,113 +0,0 @@ - -FastNLP 1分钟上手教程 -===================== - -教程原文见 https://github.com/fastnlp/fastNLP/blob/master/tutorials/fastnlp_1min_tutorial.ipynb - -step 1 ------- - -读取数据集 - -.. code:: ipython3 - - from fastNLP import DataSet - # linux_path = "../test/data_for_tests/tutorial_sample_dataset.csv" - win_path = "C:\\Users\zyfeng\Desktop\FudanNLP\\fastNLP\\test\\data_for_tests\\tutorial_sample_dataset.csv" - ds = DataSet.read_csv(win_path, headers=('raw_sentence', 'label'), sep='\t') - -step 2 ------- - -数据预处理 1. 类型转换 2. 切分验证集 3. 构建词典 - -.. code:: ipython3 - - # 将所有数字转为小写 - ds.apply(lambda x: x['raw_sentence'].lower(), new_field_name='raw_sentence') - # label转int - ds.apply(lambda x: int(x['label']), new_field_name='label_seq', is_target=True) - - def split_sent(ins): - return ins['raw_sentence'].split() - ds.apply(split_sent, new_field_name='words', is_input=True) - - -.. code:: ipython3 - - # 分割训练集/验证集 - train_data, dev_data = ds.split(0.3) - print("Train size: ", len(train_data)) - print("Test size: ", len(dev_data)) - - -.. parsed-literal:: - - Train size: 54 - Test size: 23 - - -.. code:: ipython3 - - from fastNLP import Vocabulary - vocab = Vocabulary(min_freq=2) - train_data.apply(lambda x: [vocab.add(word) for word in x['words']]) - - # index句子, Vocabulary.to_index(word) - train_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='word_seq', is_input=True) - dev_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='word_seq', is_input=True) - - -step 3 ------- - -定义模型 - -.. code:: ipython3 - - from fastNLP.models import CNNText - model = CNNText(embed_num=len(vocab), embed_dim=50, num_classes=5, padding=2, dropout=0.1) - - -step 4 ------- - -开始训练 - -.. code:: ipython3 - - from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric - trainer = Trainer(model=model, - train_data=train_data, - dev_data=dev_data, - loss=CrossEntropyLoss(), - metrics=AccuracyMetric() - ) - trainer.train() - print('Train finished!') - - - -.. parsed-literal:: - - training epochs started 2018-12-07 14:03:41 - - - - -.. parsed-literal:: - - HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=6), HTML(value='')), layout=Layout(display='i… - - - -.. parsed-literal:: - - Epoch 1/3. Step:2/6. AccuracyMetric: acc=0.26087 - Epoch 2/3. Step:4/6. AccuracyMetric: acc=0.347826 - Epoch 3/3. Step:6/6. AccuracyMetric: acc=0.608696 - Train finished! - - -本教程结束。更多操作请参考进阶教程。 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/source/tutorials/fastnlp_advanced_tutorial.rst b/docs/source/tutorials/fastnlp_advanced_tutorial.rst deleted file mode 100644 index d788e9d6..00000000 --- a/docs/source/tutorials/fastnlp_advanced_tutorial.rst +++ /dev/null @@ -1,5 +0,0 @@ -fastNLP 进阶教程 -=============== - -教程原文见 https://github.com/fastnlp/fastNLP/blob/master/tutorials/fastnlp_advanced_tutorial/advance_tutorial.ipynb - diff --git a/docs/source/tutorials/fastnlp_developer_guide.rst b/docs/source/tutorials/fastnlp_developer_guide.rst deleted file mode 100644 index 73b75f02..00000000 --- a/docs/source/tutorials/fastnlp_developer_guide.rst +++ /dev/null @@ -1,5 +0,0 @@ -fastNLP 开发者指南 -=============== - -原文见 https://github.com/fastnlp/fastNLP/blob/master/tutorials/tutorial_for_developer.md - diff --git a/docs/source/user/quickstart.rst b/docs/source/user/quickstart.rst index 0e5c053e..43056a26 100644 --- a/docs/source/user/quickstart.rst +++ b/docs/source/user/quickstart.rst @@ -121,4 +121,4 @@ In Epoch:6/Step:12, got best dev performance:AccuracyMetric: acc=0.8 Reloaded the best model. -这份教程只是简单地介绍了使用 fastNLP 工作的流程,具体的细节分析见 :doc:`/user/tutorials` \ No newline at end of file +这份教程只是简单地介绍了使用 fastNLP 工作的流程,具体的细节分析见 :doc:`/user/tutorial_one` \ No newline at end of file diff --git a/docs/source/user/task1.rst b/docs/source/user/task1.rst deleted file mode 100644 index 0c346999..00000000 --- a/docs/source/user/task1.rst +++ /dev/null @@ -1,3 +0,0 @@ -===================== -用 fastNLP 分类 -===================== \ No newline at end of file diff --git a/docs/source/user/task2.rst b/docs/source/user/task2.rst deleted file mode 100644 index 73ee014b..00000000 --- a/docs/source/user/task2.rst +++ /dev/null @@ -1,3 +0,0 @@ -===================== -用 fastNLP 分词 -===================== \ No newline at end of file diff --git a/docs/source/user/tutorial_one.rst b/docs/source/user/tutorial_one.rst new file mode 100644 index 00000000..0c7be77d --- /dev/null +++ b/docs/source/user/tutorial_one.rst @@ -0,0 +1,371 @@ +=============== +详细指南 +=============== + +我们使用和 :doc:`/user/quickstart` 中一样的任务来进行详细的介绍。给出一段文字,预测它的标签是0~4中的哪一个 +(数据来源 `kaggle `_ )。 + +-------------- +数据处理 +-------------- + +数据读入 + 我们可以使用 fastNLP :mod:`fastNLP.io` 模块中的 :class:`~fastNLP.io.CSVLoader` 类,轻松地从 csv 文件读取我们的数据。 + 这里的 dataset 是 fastNLP 中 :class:`~fastNLP.DataSet` 类的对象 + + .. code-block:: python + + from fastNLP.io import CSVLoader + + loader = CSVLoader(headers=('raw_sentence', 'label'), sep='\t') + dataset = loader.load("./sample_data/tutorial_sample_dataset.csv") + + 除了读取数据外,fastNLP 还提供了读取其它文件类型的 Loader 类、读取 Embedding的 Loader 等。详见 :doc:`/fastNLP.io` 。 + +Instance 和 DataSet + fastNLP 中的 :class:`~fastNLP.DataSet` 类对象类似于二维表格,它的每一列是一个 :mod:`~fastNLP.core.field` + 每一行是一个 :mod:`~fastNLP.core.instance` 。我们可以手动向数据集中添加 :class:`~fastNLP.Instance` 类的对象 + + .. code-block:: python + + from fastNLP import Instance + + dataset.append(Instance(raw_sentence='fake data', label='0')) + + 此时的 ``dataset[-1]`` 的值如下,可以看到,数据集中的每个数据包含 ``raw_sentence`` 和 ``label`` 两个 + :mod:`~fastNLP.core.field` ,他们的类型都是 ``str`` :: + + {'raw_sentence': fake data type=str, 'label': 0 type=str} + +field 的修改 + 我们使用 :class:`~fastNLP.DataSet` 类的 :meth:`~fastNLP.DataSet.apply` 方法将 ``raw_sentence`` 中字母变成小写,并将句子分词。 + 同时也将 ``label`` :mod:`~fastNLP.core.field` 转化为整数并改名为 ``target`` + + .. code-block:: python + + dataset.apply(lambda x: x['raw_sentence'].lower(), new_field_name='sentence') + dataset.apply_field(lambda x: x.split(), field_name='sentence', new_field_name='words') + dataset.apply(lambda x: int(x['label']), new_field_name='target') + + ``words`` 和 ``target`` 已经足够用于 :class:`~fastNLP.models.CNNText` 的训练了,但我们从其文档 + :class:`~fastNLP.models.CNNText` 中看到,在 :meth:`~fastNLP.models.CNNText.forward` 的时候,还可以传入可选参数 ``seq_len`` 。 + 所以,我们再使用 :meth:`~fastNLP.DataSet.apply_field` 方法增加一个名为 ``seq_len`` 的 :mod:`~fastNLP.core.field` 。 + + .. code-block:: python + + dataset.apply_field(lambda x: len(x), field_name='words', new_field_name='seq_len') + + 观察可知: :meth:`~fastNLP.DataSet.apply_field` 与 :meth:`~fastNLP.DataSet.apply` 类似, + 但所传入的 `lambda` 函数是针对一个 :class:`~fastNLP.Instance` 中的一个 :mod:`~fastNLP.core.field` 的; + 而 :meth:`~fastNLP.DataSet.apply` 所传入的 `lambda` 函数是针对整个 :class:`~fastNLP.Instance` 的。 + + .. note:: + `lambda` 函数即匿名函数,是 Python 的重要特性。 ``lambda x: len(x)`` 和下面的这个函数的作用相同:: + + def func_lambda(x): + return len(x) + + 你也可以编写复杂的函数做为 :meth:`~fastNLP.DataSet.apply_field` 与 :meth:`~fastNLP.DataSet.apply` 的参数 + +Vocabulary 的使用 + 我们再用 :class:`~fastNLP.Vocabulary` 类来统计数据中出现的单词,并使用 :meth:`~fastNLP.Vocabularyindex_dataset` + 将单词序列转化为训练可用的数字序列。 + + .. code-block:: python + + from fastNLP import Vocabulary + + vocab = Vocabulary(min_freq=2).from_dataset(dataset, field_name='words') + vocab.index_dataset(dataset, field_name='words',new_field_name='words') + +数据集分割 + 除了修改 :mod:`~fastNLP.core.field` 之外,我们还可以对 :class:`~fastNLP.DataSet` 进行分割,以供训练、开发和测试使用。 + 下面这段代码展示了 :meth:`~fastNLP.DataSet.split` 的使用方法(但实际应该放在后面两段改名和设置输入的代码之后) + + .. code-block:: python + + train_dev_data, test_data = dataset.split(0.1) + train_data, dev_data = train_dev_data.split(0.1) + len(train_data), len(dev_data), len(test_data) + +--------------------- +使用内置模型训练 +--------------------- + +内置模型的输入输出命名 + fastNLP内置了一些完整的神经网络模型,详见 :doc:`/fastNLP.models` , 我们使用其中的 :class:`~fastNLP.models.CNNText` 模型进行训练。 + 为了使用内置的 :class:`~fastNLP.models.CNNText`,我们必须修改 :class:`~fastNLP.DataSet` 中 :mod:`~fastNLP.core.field` 的名称。 + 在这个例子中模型输入 (forward方法的参数) 为 ``words`` 和 ``seq_len`` ; 预测输出为 ``pred`` ;标准答案为 ``target`` 。 + 具体的命名规范可以参考 :doc:`/fastNLP.core.const` 。 + + 如果不想查看文档,您也可以使用 :class:`~fastNLP.Const` 类进行命名。下面的代码展示了给 :class:`~fastNLP.DataSet` 中 + :mod:`~fastNLP.core.field` 改名的 :meth:`~fastNLP.DataSet.rename_field` 方法,以及 :class:`~fastNLP.Const` 类的使用方法。 + + .. code-block:: python + + from fastNLP import Const + + dataset.rename_field('words', Const.INPUT) + dataset.rename_field('seq_len', Const.INPUT_LEN) + dataset.rename_field('target', Const.TARGET) + + 在给 :class:`~fastNLP.DataSet` 中 :mod:`~fastNLP.core.field` 改名后,我们还需要设置训练所需的输入和目标,这里使用的是 + :meth:`~fastNLP.DataSet.set_input` 和 :meth:`~fastNLP.DataSet.set_target` 两个函数。 + + .. code-block:: python + + dataset.set_input(Const.INPUT, Const.INPUT_LEN) + dataset.set_target(Const.TARGET) + +快速训练 + 现在我们可以导入 fastNLP 内置的文本分类模型 :class:`~fastNLP.models.CNNText` ,并使用 :class:`~fastNLP.Trainer` 进行训练了 + (其中 ``loss`` 和 ``metrics`` 的定义,我们将在后续两段代码中给出)。 + + .. code-block:: python + + from fastNLP.models import CNNText + from fastNLP import Trainer + + model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1) + + trainer = Trainer(model=model_cnn, train_data=train_data, dev_data=dev_data, + loss=loss, metrics=metrics) + trainer.train() + + 训练过程的输出如下:: + + input fields after batch(if batch size is 2): + words: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) + target fields after batch(if batch size is 2): + target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) + + training epochs started 2019-05-09-10-59-39 + Evaluation at Epoch 1/10. Step:2/20. AccuracyMetric: acc=0.333333 + + Evaluation at Epoch 2/10. Step:4/20. AccuracyMetric: acc=0.533333 + + Evaluation at Epoch 3/10. Step:6/20. AccuracyMetric: acc=0.533333 + + Evaluation at Epoch 4/10. Step:8/20. AccuracyMetric: acc=0.533333 + + Evaluation at Epoch 5/10. Step:10/20. AccuracyMetric: acc=0.6 + + Evaluation at Epoch 6/10. Step:12/20. AccuracyMetric: acc=0.8 + + Evaluation at Epoch 7/10. Step:14/20. AccuracyMetric: acc=0.8 + + Evaluation at Epoch 8/10. Step:16/20. AccuracyMetric: acc=0.733333 + + Evaluation at Epoch 9/10. Step:18/20. AccuracyMetric: acc=0.733333 + + Evaluation at Epoch 10/10. Step:20/20. AccuracyMetric: acc=0.733333 + + + In Epoch:6/Step:12, got best dev performance:AccuracyMetric: acc=0.8 + Reloaded the best model. + +损失函数 + 训练模型需要提供一个损失函数, 下面提供了一个在分类问题中常用的交叉熵损失。注意它的 **初始化参数** 。 + ``pred`` 参数对应的是模型的 forward 方法返回的 dict 中的一个 key 的名字。 + ``target`` 参数对应的是 :class:`~fastNLP.DataSet` 中作为标签的 :mod:`~fastNLP.core.field` 的名字。 + 这里我们用 :class:`~fastNLP.Const` 来辅助命名,如果你自己编写模型中 forward 方法的返回值或 + 数据集中 :mod:`~fastNLP.core.field` 的名字与本例不同, 你可以把 ``pred`` 参数和 ``target`` 参数设定符合自己代码的值。 + + .. code-block:: python + + from fastNLP import CrossEntropyLoss + + # loss = CrossEntropyLoss() 在本例中与下面这行代码等价 + loss = CrossEntropyLoss(pred=Const.OUTPUT, target=Const.TARGET) + +评价指标 + 训练模型需要提供一个评价指标。这里使用准确率做为评价指标。参数的 `命名规则` 跟上面类似。 + ``pred`` 参数对应的是模型的 forward 方法返回的 dict 中的一个 key 的名字。 + ``target`` 参数对应的是 :class:`~fastNLP.DataSet` 中作为标签的 :mod:`~fastNLP.core.field` 的名字。 + + .. code-block:: python + + from fastNLP import AccuracyMetric + + # metrics=AccuracyMetric() 在本例中与下面这行代码等价 + metrics=AccuracyMetric(pred=Const.OUTPUT, target=Const.TARGET) + +快速测试 + 与 :class:`~fastNLP.Trainer` 对应,fastNLP 也提供了 :class:`~fastNLP.Tester` 用于快速测试,用法如下 + + .. code-block:: python + + from fastNLP import Tester + + tester = Tester(test_data, model_cnn, metrics=AccuracyMetric()) + tester.test() + +--------------------- +编写自己的模型 +--------------------- + +因为 fastNLP 是基于 `PyTorch `_ 开发的框架,所以我们可以基于 PyTorch 模型编写自己的神经网络模型。 +与标准的 PyTorch 模型不同,fastNLP 模型中 forward 方法返回的是一个字典,字典中至少需要包含 "pred" 这个字段。 +而 forward 方法的参数名称必须与 :class:`~fastNLP.DataSet` 中用 :meth:`~fastNLP.DataSet.set_input` 设定的名称一致。 +模型定义的代码如下: + +.. code-block:: python + + import torch + import torch.nn as nn + + class LSTMText(nn.Module): + def __init__(self, vocab_size, embedding_dim, output_dim, hidden_dim=64, num_layers=2, dropout=0.5): + super().__init__() + + self.embedding = nn.Embedding(vocab_size, embedding_dim) + self.lstm = nn.LSTM(embedding_dim, hidden_dim, num_layers=num_layers, bidirectional=True, dropout=dropout) + self.fc = nn.Linear(hidden_dim * 2, output_dim) + self.dropout = nn.Dropout(dropout) + + def forward(self, words): + # (input) words : (batch_size, seq_len) + words = words.permute(1,0) + # words : (seq_len, batch_size) + + embedded = self.dropout(self.embedding(words)) + # embedded : (seq_len, batch_size, embedding_dim) + output, (hidden, cell) = self.lstm(embedded) + # output: (seq_len, batch_size, hidden_dim * 2) + # hidden: (num_layers * 2, batch_size, hidden_dim) + # cell: (num_layers * 2, batch_size, hidden_dim) + + hidden = torch.cat((hidden[-2, :, :], hidden[-1, :, :]), dim=1) + hidden = self.dropout(hidden) + # hidden: (batch_size, hidden_dim * 2) + + pred = self.fc(hidden.squeeze(0)) + # result: (batch_size, output_dim) + return {"pred":pred} + +模型的使用方法与内置模型 :class:`~fastNLP.models.CNNText` 一致 + +.. code-block:: python + + model_lstm = LSTMText(len(vocab),50,5) + + trainer = Trainer(model=model_lstm, train_data=train_data, dev_data=dev_data, + loss=loss, metrics=metrics) + trainer.train() + + tester = Tester(test_data, model_lstm, metrics=AccuracyMetric()) + tester.test() + +.. todo:: + 使用 :doc:`/fastNLP.modules` 编写模型 + +-------------------------- +自己编写训练过程 +-------------------------- + +如果你想用类似 PyTorch 的使用方法,自己编写训练过程,你可以参考下面这段代码。其中使用了 fastNLP 提供的 :class:`~fastNLP.Batch` +来获得小批量训练的小批量数据,使用 :class:`~fastNLP.BucketSampler` 做为 :class:`~fastNLP.Batch` 的参数来选择采样的方式。 +这段代码中使用了 PyTorch 的 `torch.optim.Adam` 优化器 和 `torch.nn.CrossEntropyLoss` 损失函数,并自己计算了正确率 + +.. code-block:: python + + from fastNLP import BucketSampler + from fastNLP import Batch + import torch + import time + + model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1) + + def train(epoch, data): + optim = torch.optim.Adam(model.parameters(), lr=0.001) + lossfunc = torch.nn.CrossEntropyLoss() + batch_size = 32 + + train_sampler = BucketSampler(batch_size=batch_size, seq_len_field_name='seq_len') + train_batch = Batch(batch_size=batch_size, dataset=data, sampler=train_sampler) + + start_time = time.time() + for i in range(epoch): + loss_list = [] + for batch_x, batch_y in train_batch: + optim.zero_grad() + output = model(batch_x['words']) + loss = lossfunc(output['pred'], batch_y['target']) + loss.backward() + optim.step() + loss_list.append(loss.item()) + print('Epoch {:d} Avg Loss: {:.2f}'.format(i, sum(loss_list) / len(loss_list)),end=" ") + print('{:d}ms'.format(round((time.time()-start_time)*1000))) + loss_list.clear() + + train(10, train_data) + + tester = Tester(test_data, model, metrics=AccuracyMetric()) + tester.test() + +这段代码的输出如下:: + + Epoch 0 Avg Loss: 2.76 17ms + Epoch 1 Avg Loss: 2.55 29ms + Epoch 2 Avg Loss: 2.37 41ms + Epoch 3 Avg Loss: 2.30 53ms + Epoch 4 Avg Loss: 2.12 65ms + Epoch 5 Avg Loss: 2.16 76ms + Epoch 6 Avg Loss: 1.88 88ms + Epoch 7 Avg Loss: 1.84 99ms + Epoch 8 Avg Loss: 1.71 111ms + Epoch 9 Avg Loss: 1.62 122ms + [tester] + AccuracyMetric: acc=0.142857 + +---------------------------------- +使用 Callback 增强 Trainer +---------------------------------- + +如果你不想自己实现繁琐的训练过程,只希望在训练过程中实现一些自己的功能(比如:输出从训练开始到当前 batch 结束的总时间), +你可以使用 fastNLP 提供的 :class:`~fastNLP.Callback` 类。下面的例子中,我们继承 :class:`~fastNLP.Callback` 类实现了这个功能。 + +.. code-block:: python + + from fastNLP import Callback + + start_time = time.time() + + class MyCallback(Callback): + def on_epoch_end(self): + print('Sum Time: {:d}ms\n\n'.format(round((time.time()-start_time)*1000))) + + + model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1) + trainer = Trainer(model=model, train_data=train_data, dev_data=dev_data, + loss=CrossEntropyLoss(), metrics=AccuracyMetric(), callbacks=[MyCallback()]) + trainer.train() + +训练输出如下:: + + input fields after batch(if batch size is 2): + words: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 16]) + seq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) + target fields after batch(if batch size is 2): + target: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) + + training epochs started 2019-05-12-21-38-40 + Evaluation at Epoch 1/10. Step:2/20. AccuracyMetric: acc=0.285714 + + Sum Time: 51ms + + + ………………………… + + + Evaluation at Epoch 10/10. Step:20/20. AccuracyMetric: acc=0.857143 + + Sum Time: 212ms + + + + In Epoch:10/Step:20, got best dev performance:AccuracyMetric: acc=0.857143 + Reloaded the best model. + +这个例子只是介绍了 :class:`~fastNLP.Callback` 类的使用方法。实际应用(比如:负采样、Learning Rate Decay、Early Stop 等)中 +很多功能已经被 fastNLP 实现了。你可以直接 import 它们使用,详细请查看文档 :doc:`/fastNLP.core.callback` 。 \ No newline at end of file diff --git a/docs/source/user/with_fitlog.rst b/docs/source/user/with_fitlog.rst new file mode 100644 index 00000000..97c3ea71 --- /dev/null +++ b/docs/source/user/with_fitlog.rst @@ -0,0 +1,5 @@ +================= +科研向导 +================= + +本文介绍使用 fastNLP 和 fitlog 进行科学研究的方法 \ No newline at end of file From 674c3d10281d1f5e0587d59808f58f779d87f055 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Sun, 12 May 2019 22:11:22 +0800 Subject: [PATCH 03/19] =?UTF-8?q?tutorial=20=E7=9A=84=20ipynb=20=E6=96=87?= =?UTF-8?q?=E4=BB=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tutorials/fastnlp_1min_tutorial.ipynb | 1775 ------------------------- tutorials/quickstart.ipynb | 280 ++++ tutorials/tutorial_one.ipynb | 831 ++++++++++++ 3 files changed, 1111 insertions(+), 1775 deletions(-) delete mode 100644 tutorials/fastnlp_1min_tutorial.ipynb create mode 100644 tutorials/quickstart.ipynb create mode 100644 tutorials/tutorial_one.ipynb diff --git a/tutorials/fastnlp_1min_tutorial.ipynb b/tutorials/fastnlp_1min_tutorial.ipynb deleted file mode 100644 index 64d57bc4..00000000 --- a/tutorials/fastnlp_1min_tutorial.ipynb +++ /dev/null @@ -1,1775 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "# fastNLP 1分钟上手教程" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## step 1\n", - "读取数据集" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP import DataSet\n", - " \n", - "data_path = \"./sample_data/tutorial_sample_dataset.csv\"\n", - "ds = DataSet.read_csv(data_path, headers=('raw_sentence', 'label'), sep='\\t')" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'raw_sentence': This quiet , introspective and entertaining independent is worth seeking . type=str,\n", - "'label': 4 type=str}" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "ds[1]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## step 2\n", - "数据预处理\n", - "1. 类型转换\n", - "2. 切分验证集\n", - "3. 构建词典" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[['a',\n", - " 'series',\n", - " 'of',\n", - " 'escapades',\n", - " 'demonstrating',\n", - " 'the',\n", - " 'adage',\n", - " 'that',\n", - " 'what',\n", - " 'is',\n", - " 'good',\n", - " 'for',\n", - " 'the',\n", - " 'goose',\n", - " 'is',\n", - " 'also',\n", - " 'good',\n", - " 'for',\n", - " 'the',\n", - " 'gander',\n", - " ',',\n", - " 'some',\n", - " 'of',\n", - " 'which',\n", - " 'occasionally',\n", - " 'amuses',\n", - " 'but',\n", - " 'none',\n", - " 'of',\n", - " 'which',\n", - " 'amounts',\n", - " 'to',\n", - " 'much',\n", - " 'of',\n", - " 'a',\n", - " 'story',\n", - " '.'],\n", - " ['this',\n", - " 'quiet',\n", - " ',',\n", - " 'introspective',\n", - " 'and',\n", - " 'entertaining',\n", - " 'independent',\n", - " 'is',\n", - " 'worth',\n", - " 'seeking',\n", - " '.'],\n", - " ['even',\n", - " 'fans',\n", - " 'of',\n", - " 'ismail',\n", - " 'merchant',\n", - " \"'s\",\n", - " 'work',\n", - " ',',\n", - " 'i',\n", - " 'suspect',\n", - " ',',\n", - " 'would',\n", - " 'have',\n", - " 'a',\n", - " 'hard',\n", - " 'time',\n", - " 'sitting',\n", - " 'through',\n", - " 'this',\n", - " 'one',\n", - " '.'],\n", - " ['a',\n", - " 'positively',\n", - " 'thrilling',\n", - " 'combination',\n", - " 'of',\n", - " 'ethnography',\n", - " 'and',\n", - " 'all',\n", - " 'the',\n", - " 'intrigue',\n", - " ',',\n", - " 'betrayal',\n", - " ',',\n", - " 'deceit',\n", - " 'and',\n", - " 'murder',\n", - " 'of',\n", - " 'a',\n", - " 'shakespearean',\n", - " 'tragedy',\n", - " 'or',\n", - " 'a',\n", - " 'juicy',\n", - " 'soap',\n", - " 'opera',\n", - " '.'],\n", - " ['aggressive',\n", - " 'self-glorification',\n", - " 'and',\n", - " 'a',\n", - " 'manipulative',\n", - " 'whitewash',\n", - " '.'],\n", - " ['a',\n", - " 'comedy-drama',\n", - " 'of',\n", - " 'nearly',\n", - " 'epic',\n", - " 'proportions',\n", - " 'rooted',\n", - " 'in',\n", - " 'a',\n", - " 'sincere',\n", - " 'performance',\n", - " 'by',\n", - " 'the',\n", - " 'title',\n", - " 'character',\n", - " 'undergoing',\n", - " 'midlife',\n", - " 'crisis',\n", - " '.'],\n", - " ['narratively',\n", - " ',',\n", - " 'trouble',\n", - " 'every',\n", - " 'day',\n", - " 'is',\n", - " 'a',\n", - " 'plodding',\n", - " 'mess',\n", - " '.'],\n", - " ['the',\n", - " 'importance',\n", - " 'of',\n", - " 'being',\n", - " 'earnest',\n", - " ',',\n", - " 'so',\n", - " 'thick',\n", - " 'with',\n", - " 'wit',\n", - " 'it',\n", - " 'plays',\n", - " 'like',\n", - " 'a',\n", - " 'reading',\n", - " 'from',\n", - " 'bartlett',\n", - " \"'s\",\n", - " 'familiar',\n", - " 'quotations'],\n", - " ['but', 'it', 'does', \"n't\", 'leave', 'you', 'with', 'much', '.'],\n", - " ['you', 'could', 'hate', 'it', 'for', 'the', 'same', 'reason', '.'],\n", - " ['there',\n", - " \"'s\",\n", - " 'little',\n", - " 'to',\n", - " 'recommend',\n", - " 'snow',\n", - " 'dogs',\n", - " ',',\n", - " 'unless',\n", - " 'one',\n", - " 'considers',\n", - " 'cliched',\n", - " 'dialogue',\n", - " 'and',\n", - " 'perverse',\n", - " 'escapism',\n", - " 'a',\n", - " 'source',\n", - " 'of',\n", - " 'high',\n", - " 'hilarity',\n", - " '.'],\n", - " ['kung',\n", - " 'pow',\n", - " 'is',\n", - " 'oedekerk',\n", - " \"'s\",\n", - " 'realization',\n", - " 'of',\n", - " 'his',\n", - " 'childhood',\n", - " 'dream',\n", - " 'to',\n", - " 'be',\n", - " 'in',\n", - " 'a',\n", - " 'martial-arts',\n", - " 'flick',\n", - " ',',\n", - " 'and',\n", - " 'proves',\n", - " 'that',\n", - " 'sometimes',\n", - " 'the',\n", - " 'dreams',\n", - " 'of',\n", - " 'youth',\n", - " 'should',\n", - " 'remain',\n", - " 'just',\n", - " 'that',\n", - " '.'],\n", - " ['the', 'performances', 'are', 'an', 'absolute', 'joy', '.'],\n", - " ['fresnadillo',\n", - " 'has',\n", - " 'something',\n", - " 'serious',\n", - " 'to',\n", - " 'say',\n", - " 'about',\n", - " 'the',\n", - " 'ways',\n", - " 'in',\n", - " 'which',\n", - " 'extravagant',\n", - " 'chance',\n", - " 'can',\n", - " 'distort',\n", - " 'our',\n", - " 'perspective',\n", - " 'and',\n", - " 'throw',\n", - " 'us',\n", - " 'off',\n", - " 'the',\n", - " 'path',\n", - " 'of',\n", - " 'good',\n", - " 'sense',\n", - " '.'],\n", - " ['i',\n", - " 'still',\n", - " 'like',\n", - " 'moonlight',\n", - " 'mile',\n", - " ',',\n", - " 'better',\n", - " 'judgment',\n", - " 'be',\n", - " 'damned',\n", - " '.'],\n", - " ['a',\n", - " 'welcome',\n", - " 'relief',\n", - " 'from',\n", - " 'baseball',\n", - " 'movies',\n", - " 'that',\n", - " 'try',\n", - " 'too',\n", - " 'hard',\n", - " 'to',\n", - " 'be',\n", - " 'mythic',\n", - " ',',\n", - " 'this',\n", - " 'one',\n", - " 'is',\n", - " 'a',\n", - " 'sweet',\n", - " 'and',\n", - " 'modest',\n", - " 'and',\n", - " 'ultimately',\n", - " 'winning',\n", - " 'story',\n", - " '.'],\n", - " ['a',\n", - " 'bilingual',\n", - " 'charmer',\n", - " ',',\n", - " 'just',\n", - " 'like',\n", - " 'the',\n", - " 'woman',\n", - " 'who',\n", - " 'inspired',\n", - " 'it'],\n", - " ['like',\n", - " 'a',\n", - " 'less',\n", - " 'dizzily',\n", - " 'gorgeous',\n", - " 'companion',\n", - " 'to',\n", - " 'mr.',\n", - " 'wong',\n", - " \"'s\",\n", - " 'in',\n", - " 'the',\n", - " 'mood',\n", - " 'for',\n", - " 'love',\n", - " '--',\n", - " 'very',\n", - " 'much',\n", - " 'a',\n", - " 'hong',\n", - " 'kong',\n", - " 'movie',\n", - " 'despite',\n", - " 'its',\n", - " 'mainland',\n", - " 'setting',\n", - " '.'],\n", - " ['as',\n", - " 'inept',\n", - " 'as',\n", - " 'big-screen',\n", - " 'remakes',\n", - " 'of',\n", - " 'the',\n", - " 'avengers',\n", - " 'and',\n", - " 'the',\n", - " 'wild',\n", - " 'wild',\n", - " 'west',\n", - " '.'],\n", - " ['it',\n", - " \"'s\",\n", - " 'everything',\n", - " 'you',\n", - " \"'d\",\n", - " 'expect',\n", - " '--',\n", - " 'but',\n", - " 'nothing',\n", - " 'more',\n", - " '.'],\n", - " ['best', 'indie', 'of', 'the', 'year', ',', 'so', 'far', '.'],\n", - " ['hatfield',\n", - " 'and',\n", - " 'hicks',\n", - " 'make',\n", - " 'the',\n", - " 'oddest',\n", - " 'of',\n", - " 'couples',\n", - " ',',\n", - " 'and',\n", - " 'in',\n", - " 'this',\n", - " 'sense',\n", - " 'the',\n", - " 'movie',\n", - " 'becomes',\n", - " 'a',\n", - " 'study',\n", - " 'of',\n", - " 'the',\n", - " 'gambles',\n", - " 'of',\n", - " 'the',\n", - " 'publishing',\n", - " 'world',\n", - " ',',\n", - " 'offering',\n", - " 'a',\n", - " 'case',\n", - " 'study',\n", - " 'that',\n", - " 'exists',\n", - " 'apart',\n", - " 'from',\n", - " 'all',\n", - " 'the',\n", - " 'movie',\n", - " \"'s\",\n", - " 'political',\n", - " 'ramifications',\n", - " '.'],\n", - " ['it',\n", - " \"'s\",\n", - " 'like',\n", - " 'going',\n", - " 'to',\n", - " 'a',\n", - " 'house',\n", - " 'party',\n", - " 'and',\n", - " 'watching',\n", - " 'the',\n", - " 'host',\n", - " 'defend',\n", - " 'himself',\n", - " 'against',\n", - " 'a',\n", - " 'frothing',\n", - " 'ex-girlfriend',\n", - " '.'],\n", - " ['that',\n", - " 'the',\n", - " 'chuck',\n", - " 'norris',\n", - " '``',\n", - " 'grenade',\n", - " 'gag',\n", - " \"''\",\n", - " 'occurs',\n", - " 'about',\n", - " '7',\n", - " 'times',\n", - " 'during',\n", - " 'windtalkers',\n", - " 'is',\n", - " 'a',\n", - " 'good',\n", - " 'indication',\n", - " 'of',\n", - " 'how',\n", - " 'serious-minded',\n", - " 'the',\n", - " 'film',\n", - " 'is',\n", - " '.'],\n", - " ['the',\n", - " 'plot',\n", - " 'is',\n", - " 'romantic',\n", - " 'comedy',\n", - " 'boilerplate',\n", - " 'from',\n", - " 'start',\n", - " 'to',\n", - " 'finish',\n", - " '.'],\n", - " ['it',\n", - " 'arrives',\n", - " 'with',\n", - " 'an',\n", - " 'impeccable',\n", - " 'pedigree',\n", - " ',',\n", - " 'mongrel',\n", - " 'pep',\n", - " ',',\n", - " 'and',\n", - " 'almost',\n", - " 'indecipherable',\n", - " 'plot',\n", - " 'complications',\n", - " '.'],\n", - " ['a',\n", - " 'film',\n", - " 'that',\n", - " 'clearly',\n", - " 'means',\n", - " 'to',\n", - " 'preach',\n", - " 'exclusively',\n", - " 'to',\n", - " 'the',\n", - " 'converted',\n", - " '.'],\n", - " ['while',\n", - " 'the',\n", - " 'importance',\n", - " 'of',\n", - " 'being',\n", - " 'earnest',\n", - " 'offers',\n", - " 'opportunities',\n", - " 'for',\n", - " 'occasional',\n", - " 'smiles',\n", - " 'and',\n", - " 'chuckles',\n", - " ',',\n", - " 'it',\n", - " 'does',\n", - " \"n't\",\n", - " 'give',\n", - " 'us',\n", - " 'a',\n", - " 'reason',\n", - " 'to',\n", - " 'be',\n", - " 'in',\n", - " 'the',\n", - " 'theater',\n", - " 'beyond',\n", - " 'wilde',\n", - " \"'s\",\n", - " 'wit',\n", - " 'and',\n", - " 'the',\n", - " 'actors',\n", - " \"'\",\n", - " 'performances',\n", - " '.'],\n", - " ['the',\n", - " 'latest',\n", - " 'vapid',\n", - " 'actor',\n", - " \"'s\",\n", - " 'exercise',\n", - " 'to',\n", - " 'appropriate',\n", - " 'the',\n", - " 'structure',\n", - " 'of',\n", - " 'arthur',\n", - " 'schnitzler',\n", - " \"'s\",\n", - " 'reigen',\n", - " '.'],\n", - " ['more',\n", - " 'vaudeville',\n", - " 'show',\n", - " 'than',\n", - " 'well-constructed',\n", - " 'narrative',\n", - " ',',\n", - " 'but',\n", - " 'on',\n", - " 'those',\n", - " 'terms',\n", - " 'it',\n", - " \"'s\",\n", - " 'inoffensive',\n", - " 'and',\n", - " 'actually',\n", - " 'rather',\n", - " 'sweet',\n", - " '.'],\n", - " ['nothing', 'more', 'than', 'a', 'run-of-the-mill', 'action', 'flick', '.'],\n", - " ['hampered',\n", - " '--',\n", - " 'no',\n", - " ',',\n", - " 'paralyzed',\n", - " '--',\n", - " 'by',\n", - " 'a',\n", - " 'self-indulgent',\n", - " 'script',\n", - " '...',\n", - " 'that',\n", - " 'aims',\n", - " 'for',\n", - " 'poetry',\n", - " 'and',\n", - " 'ends',\n", - " 'up',\n", - " 'sounding',\n", - " 'like',\n", - " 'satire',\n", - " '.'],\n", - " ['ice',\n", - " 'age',\n", - " 'is',\n", - " 'the',\n", - " 'first',\n", - " 'computer-generated',\n", - " 'feature',\n", - " 'cartoon',\n", - " 'to',\n", - " 'feel',\n", - " 'like',\n", - " 'other',\n", - " 'movies',\n", - " ',',\n", - " 'and',\n", - " 'that',\n", - " 'makes',\n", - " 'for',\n", - " 'some',\n", - " 'glacial',\n", - " 'pacing',\n", - " 'early',\n", - " 'on',\n", - " '.'],\n", - " ['there',\n", - " \"'s\",\n", - " 'very',\n", - " 'little',\n", - " 'sense',\n", - " 'to',\n", - " 'what',\n", - " \"'s\",\n", - " 'going',\n", - " 'on',\n", - " 'here',\n", - " ',',\n", - " 'but',\n", - " 'the',\n", - " 'makers',\n", - " 'serve',\n", - " 'up',\n", - " 'the',\n", - " 'cliches',\n", - " 'with',\n", - " 'considerable',\n", - " 'dash',\n", - " '.'],\n", - " ['cattaneo',\n", - " 'should',\n", - " 'have',\n", - " 'followed',\n", - " 'the',\n", - " 'runaway',\n", - " 'success',\n", - " 'of',\n", - " 'his',\n", - " 'first',\n", - " 'film',\n", - " ',',\n", - " 'the',\n", - " 'full',\n", - " 'monty',\n", - " ',',\n", - " 'with',\n", - " 'something',\n", - " 'different',\n", - " '.'],\n", - " ['they',\n", - " \"'re\",\n", - " 'the',\n", - " 'unnamed',\n", - " ',',\n", - " 'easily',\n", - " 'substitutable',\n", - " 'forces',\n", - " 'that',\n", - " 'serve',\n", - " 'as',\n", - " 'whatever',\n", - " 'terror',\n", - " 'the',\n", - " 'heroes',\n", - " 'of',\n", - " 'horror',\n", - " 'movies',\n", - " 'try',\n", - " 'to',\n", - " 'avoid',\n", - " '.'],\n", - " ['it',\n", - " 'almost',\n", - " 'feels',\n", - " 'as',\n", - " 'if',\n", - " 'the',\n", - " 'movie',\n", - " 'is',\n", - " 'more',\n", - " 'interested',\n", - " 'in',\n", - " 'entertaining',\n", - " 'itself',\n", - " 'than',\n", - " 'in',\n", - " 'amusing',\n", - " 'us',\n", - " '.'],\n", - " ['the',\n", - " 'movie',\n", - " \"'s\",\n", - " 'progression',\n", - " 'into',\n", - " 'rambling',\n", - " 'incoherence',\n", - " 'gives',\n", - " 'new',\n", - " 'meaning',\n", - " 'to',\n", - " 'the',\n", - " 'phrase',\n", - " '`',\n", - " 'fatal',\n", - " 'script',\n", - " 'error',\n", - " '.',\n", - " \"'\"],\n", - " ['i',\n", - " 'still',\n", - " 'like',\n", - " 'moonlight',\n", - " 'mile',\n", - " ',',\n", - " 'better',\n", - " 'judgment',\n", - " 'be',\n", - " 'damned',\n", - " '.'],\n", - " ['a',\n", - " 'welcome',\n", - " 'relief',\n", - " 'from',\n", - " 'baseball',\n", - " 'movies',\n", - " 'that',\n", - " 'try',\n", - " 'too',\n", - " 'hard',\n", - " 'to',\n", - " 'be',\n", - " 'mythic',\n", - " ',',\n", - " 'this',\n", - " 'one',\n", - " 'is',\n", - " 'a',\n", - " 'sweet',\n", - " 'and',\n", - " 'modest',\n", - " 'and',\n", - " 'ultimately',\n", - " 'winning',\n", - " 'story',\n", - " '.'],\n", - " ['a',\n", - " 'bilingual',\n", - " 'charmer',\n", - " ',',\n", - " 'just',\n", - " 'like',\n", - " 'the',\n", - " 'woman',\n", - " 'who',\n", - " 'inspired',\n", - " 'it'],\n", - " ['like',\n", - " 'a',\n", - " 'less',\n", - " 'dizzily',\n", - " 'gorgeous',\n", - " 'companion',\n", - " 'to',\n", - " 'mr.',\n", - " 'wong',\n", - " \"'s\",\n", - " 'in',\n", - " 'the',\n", - " 'mood',\n", - " 'for',\n", - " 'love',\n", - " '--',\n", - " 'very',\n", - " 'much',\n", - " 'a',\n", - " 'hong',\n", - " 'kong',\n", - " 'movie',\n", - " 'despite',\n", - " 'its',\n", - " 'mainland',\n", - " 'setting',\n", - " '.'],\n", - " ['as',\n", - " 'inept',\n", - " 'as',\n", - " 'big-screen',\n", - " 'remakes',\n", - " 'of',\n", - " 'the',\n", - " 'avengers',\n", - " 'and',\n", - " 'the',\n", - " 'wild',\n", - " 'wild',\n", - " 'west',\n", - " '.'],\n", - " ['it',\n", - " \"'s\",\n", - " 'everything',\n", - " 'you',\n", - " \"'d\",\n", - " 'expect',\n", - " '--',\n", - " 'but',\n", - " 'nothing',\n", - " 'more',\n", - " '.'],\n", - " ['best', 'indie', 'of', 'the', 'year', ',', 'so', 'far', '.'],\n", - " ['hatfield',\n", - " 'and',\n", - " 'hicks',\n", - " 'make',\n", - " 'the',\n", - " 'oddest',\n", - " 'of',\n", - " 'couples',\n", - " ',',\n", - " 'and',\n", - " 'in',\n", - " 'this',\n", - " 'sense',\n", - " 'the',\n", - " 'movie',\n", - " 'becomes',\n", - " 'a',\n", - " 'study',\n", - " 'of',\n", - " 'the',\n", - " 'gambles',\n", - " 'of',\n", - " 'the',\n", - " 'publishing',\n", - " 'world',\n", - " ',',\n", - " 'offering',\n", - " 'a',\n", - " 'case',\n", - " 'study',\n", - " 'that',\n", - " 'exists',\n", - " 'apart',\n", - " 'from',\n", - " 'all',\n", - " 'the',\n", - " 'movie',\n", - " \"'s\",\n", - " 'political',\n", - " 'ramifications',\n", - " '.'],\n", - " ['it',\n", - " \"'s\",\n", - " 'like',\n", - " 'going',\n", - " 'to',\n", - " 'a',\n", - " 'house',\n", - " 'party',\n", - " 'and',\n", - " 'watching',\n", - " 'the',\n", - " 'host',\n", - " 'defend',\n", - " 'himself',\n", - " 'against',\n", - " 'a',\n", - " 'frothing',\n", - " 'ex-girlfriend',\n", - " '.'],\n", - " ['that',\n", - " 'the',\n", - " 'chuck',\n", - " 'norris',\n", - " '``',\n", - " 'grenade',\n", - " 'gag',\n", - " \"''\",\n", - " 'occurs',\n", - " 'about',\n", - " '7',\n", - " 'times',\n", - " 'during',\n", - " 'windtalkers',\n", - " 'is',\n", - " 'a',\n", - " 'good',\n", - " 'indication',\n", - " 'of',\n", - " 'how',\n", - " 'serious-minded',\n", - " 'the',\n", - " 'film',\n", - " 'is',\n", - " '.'],\n", - " ['the',\n", - " 'plot',\n", - " 'is',\n", - " 'romantic',\n", - " 'comedy',\n", - " 'boilerplate',\n", - " 'from',\n", - " 'start',\n", - " 'to',\n", - " 'finish',\n", - " '.'],\n", - " ['it',\n", - " 'arrives',\n", - " 'with',\n", - " 'an',\n", - " 'impeccable',\n", - " 'pedigree',\n", - " ',',\n", - " 'mongrel',\n", - " 'pep',\n", - " ',',\n", - " 'and',\n", - " 'almost',\n", - " 'indecipherable',\n", - " 'plot',\n", - " 'complications',\n", - " '.'],\n", - " ['a',\n", - " 'film',\n", - " 'that',\n", - " 'clearly',\n", - " 'means',\n", - " 'to',\n", - " 'preach',\n", - " 'exclusively',\n", - " 'to',\n", - " 'the',\n", - " 'converted',\n", - " '.'],\n", - " ['i',\n", - " 'still',\n", - " 'like',\n", - " 'moonlight',\n", - " 'mile',\n", - " ',',\n", - " 'better',\n", - " 'judgment',\n", - " 'be',\n", - " 'damned',\n", - " '.'],\n", - " ['a',\n", - " 'welcome',\n", - " 'relief',\n", - " 'from',\n", - " 'baseball',\n", - " 'movies',\n", - " 'that',\n", - " 'try',\n", - " 'too',\n", - " 'hard',\n", - " 'to',\n", - " 'be',\n", - " 'mythic',\n", - " ',',\n", - " 'this',\n", - " 'one',\n", - " 'is',\n", - " 'a',\n", - " 'sweet',\n", - " 'and',\n", - " 'modest',\n", - " 'and',\n", - " 'ultimately',\n", - " 'winning',\n", - " 'story',\n", - " '.'],\n", - " ['a',\n", - " 'bilingual',\n", - " 'charmer',\n", - " ',',\n", - " 'just',\n", - " 'like',\n", - " 'the',\n", - " 'woman',\n", - " 'who',\n", - " 'inspired',\n", - " 'it'],\n", - " ['like',\n", - " 'a',\n", - " 'less',\n", - " 'dizzily',\n", - " 'gorgeous',\n", - " 'companion',\n", - " 'to',\n", - " 'mr.',\n", - " 'wong',\n", - " \"'s\",\n", - " 'in',\n", - " 'the',\n", - " 'mood',\n", - " 'for',\n", - " 'love',\n", - " '--',\n", - " 'very',\n", - " 'much',\n", - " 'a',\n", - " 'hong',\n", - " 'kong',\n", - " 'movie',\n", - " 'despite',\n", - " 'its',\n", - " 'mainland',\n", - " 'setting',\n", - " '.'],\n", - " ['as',\n", - " 'inept',\n", - " 'as',\n", - " 'big-screen',\n", - " 'remakes',\n", - " 'of',\n", - " 'the',\n", - " 'avengers',\n", - " 'and',\n", - " 'the',\n", - " 'wild',\n", - " 'wild',\n", - " 'west',\n", - " '.'],\n", - " ['it',\n", - " \"'s\",\n", - " 'everything',\n", - " 'you',\n", - " \"'d\",\n", - " 'expect',\n", - " '--',\n", - " 'but',\n", - " 'nothing',\n", - " 'more',\n", - " '.'],\n", - " ['best', 'indie', 'of', 'the', 'year', ',', 'so', 'far', '.'],\n", - " ['hatfield',\n", - " 'and',\n", - " 'hicks',\n", - " 'make',\n", - " 'the',\n", - " 'oddest',\n", - " 'of',\n", - " 'couples',\n", - " ',',\n", - " 'and',\n", - " 'in',\n", - " 'this',\n", - " 'sense',\n", - " 'the',\n", - " 'movie',\n", - " 'becomes',\n", - " 'a',\n", - " 'study',\n", - " 'of',\n", - " 'the',\n", - " 'gambles',\n", - " 'of',\n", - " 'the',\n", - " 'publishing',\n", - " 'world',\n", - " ',',\n", - " 'offering',\n", - " 'a',\n", - " 'case',\n", - " 'study',\n", - " 'that',\n", - " 'exists',\n", - " 'apart',\n", - " 'from',\n", - " 'all',\n", - " 'the',\n", - " 'movie',\n", - " \"'s\",\n", - " 'political',\n", - " 'ramifications',\n", - " '.'],\n", - " ['it',\n", - " \"'s\",\n", - " 'like',\n", - " 'going',\n", - " 'to',\n", - " 'a',\n", - " 'house',\n", - " 'party',\n", - " 'and',\n", - " 'watching',\n", - " 'the',\n", - " 'host',\n", - " 'defend',\n", - " 'himself',\n", - " 'against',\n", - " 'a',\n", - " 'frothing',\n", - " 'ex-girlfriend',\n", - " '.'],\n", - " ['that',\n", - " 'the',\n", - " 'chuck',\n", - " 'norris',\n", - " '``',\n", - " 'grenade',\n", - " 'gag',\n", - " \"''\",\n", - " 'occurs',\n", - " 'about',\n", - " '7',\n", - " 'times',\n", - " 'during',\n", - " 'windtalkers',\n", - " 'is',\n", - " 'a',\n", - " 'good',\n", - " 'indication',\n", - " 'of',\n", - " 'how',\n", - " 'serious-minded',\n", - " 'the',\n", - " 'film',\n", - " 'is',\n", - " '.'],\n", - " ['the',\n", - " 'plot',\n", - " 'is',\n", - " 'romantic',\n", - " 'comedy',\n", - " 'boilerplate',\n", - " 'from',\n", - " 'start',\n", - " 'to',\n", - " 'finish',\n", - " '.'],\n", - " ['it',\n", - " 'arrives',\n", - " 'with',\n", - " 'an',\n", - " 'impeccable',\n", - " 'pedigree',\n", - " ',',\n", - " 'mongrel',\n", - " 'pep',\n", - " ',',\n", - " 'and',\n", - " 'almost',\n", - " 'indecipherable',\n", - " 'plot',\n", - " 'complications',\n", - " '.'],\n", - " ['a',\n", - " 'film',\n", - " 'that',\n", - " 'clearly',\n", - " 'means',\n", - " 'to',\n", - " 'preach',\n", - " 'exclusively',\n", - " 'to',\n", - " 'the',\n", - " 'converted',\n", - " '.'],\n", - " ['i',\n", - " 'still',\n", - " 'like',\n", - " 'moonlight',\n", - " 'mile',\n", - " ',',\n", - " 'better',\n", - " 'judgment',\n", - " 'be',\n", - " 'damned',\n", - " '.'],\n", - " ['a',\n", - " 'welcome',\n", - " 'relief',\n", - " 'from',\n", - " 'baseball',\n", - " 'movies',\n", - " 'that',\n", - " 'try',\n", - " 'too',\n", - " 'hard',\n", - " 'to',\n", - " 'be',\n", - " 'mythic',\n", - " ',',\n", - " 'this',\n", - " 'one',\n", - " 'is',\n", - " 'a',\n", - " 'sweet',\n", - " 'and',\n", - " 'modest',\n", - " 'and',\n", - " 'ultimately',\n", - " 'winning',\n", - " 'story',\n", - " '.'],\n", - " ['a',\n", - " 'bilingual',\n", - " 'charmer',\n", - " ',',\n", - " 'just',\n", - " 'like',\n", - " 'the',\n", - " 'woman',\n", - " 'who',\n", - " 'inspired',\n", - " 'it'],\n", - " ['like',\n", - " 'a',\n", - " 'less',\n", - " 'dizzily',\n", - " 'gorgeous',\n", - " 'companion',\n", - " 'to',\n", - " 'mr.',\n", - " 'wong',\n", - " \"'s\",\n", - " 'in',\n", - " 'the',\n", - " 'mood',\n", - " 'for',\n", - " 'love',\n", - " '--',\n", - " 'very',\n", - " 'much',\n", - " 'a',\n", - " 'hong',\n", - " 'kong',\n", - " 'movie',\n", - " 'despite',\n", - " 'its',\n", - " 'mainland',\n", - " 'setting',\n", - " '.'],\n", - " ['as',\n", - " 'inept',\n", - " 'as',\n", - " 'big-screen',\n", - " 'remakes',\n", - " 'of',\n", - " 'the',\n", - " 'avengers',\n", - " 'and',\n", - " 'the',\n", - " 'wild',\n", - " 'wild',\n", - " 'west',\n", - " '.'],\n", - " ['it',\n", - " \"'s\",\n", - " 'everything',\n", - " 'you',\n", - " \"'d\",\n", - " 'expect',\n", - " '--',\n", - " 'but',\n", - " 'nothing',\n", - " 'more',\n", - " '.'],\n", - " ['best', 'indie', 'of', 'the', 'year', ',', 'so', 'far', '.'],\n", - " ['hatfield',\n", - " 'and',\n", - " 'hicks',\n", - " 'make',\n", - " 'the',\n", - " 'oddest',\n", - " 'of',\n", - " 'couples',\n", - " ',',\n", - " 'and',\n", - " 'in',\n", - " 'this',\n", - " 'sense',\n", - " 'the',\n", - " 'movie',\n", - " 'becomes',\n", - " 'a',\n", - " 'study',\n", - " 'of',\n", - " 'the',\n", - " 'gambles',\n", - " 'of',\n", - " 'the',\n", - " 'publishing',\n", - " 'world',\n", - " ',',\n", - " 'offering',\n", - " 'a',\n", - " 'case',\n", - " 'study',\n", - " 'that',\n", - " 'exists',\n", - " 'apart',\n", - " 'from',\n", - " 'all',\n", - " 'the',\n", - " 'movie',\n", - " \"'s\",\n", - " 'political',\n", - " 'ramifications',\n", - " '.'],\n", - " ['it',\n", - " \"'s\",\n", - " 'like',\n", - " 'going',\n", - " 'to',\n", - " 'a',\n", - " 'house',\n", - " 'party',\n", - " 'and',\n", - " 'watching',\n", - " 'the',\n", - " 'host',\n", - " 'defend',\n", - " 'himself',\n", - " 'against',\n", - " 'a',\n", - " 'frothing',\n", - " 'ex-girlfriend',\n", - " '.'],\n", - " ['that',\n", - " 'the',\n", - " 'chuck',\n", - " 'norris',\n", - " '``',\n", - " 'grenade',\n", - " 'gag',\n", - " \"''\",\n", - " 'occurs',\n", - " 'about',\n", - " '7',\n", - " 'times',\n", - " 'during',\n", - " 'windtalkers',\n", - " 'is',\n", - " 'a',\n", - " 'good',\n", - " 'indication',\n", - " 'of',\n", - " 'how',\n", - " 'serious-minded',\n", - " 'the',\n", - " 'film',\n", - " 'is',\n", - " '.'],\n", - " ['the',\n", - " 'plot',\n", - " 'is',\n", - " 'romantic',\n", - " 'comedy',\n", - " 'boilerplate',\n", - " 'from',\n", - " 'start',\n", - " 'to',\n", - " 'finish',\n", - " '.'],\n", - " ['it',\n", - " 'arrives',\n", - " 'with',\n", - " 'an',\n", - " 'impeccable',\n", - " 'pedigree',\n", - " ',',\n", - " 'mongrel',\n", - " 'pep',\n", - " ',',\n", - " 'and',\n", - " 'almost',\n", - " 'indecipherable',\n", - " 'plot',\n", - " 'complications',\n", - " '.'],\n", - " ['a',\n", - " 'film',\n", - " 'that',\n", - " 'clearly',\n", - " 'means',\n", - " 'to',\n", - " 'preach',\n", - " 'exclusively',\n", - " 'to',\n", - " 'the',\n", - " 'converted',\n", - " '.']]" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 将所有数字转为小写\n", - "ds.apply(lambda x: x['raw_sentence'].lower(), new_field_name='raw_sentence')\n", - "# label转int\n", - "ds.apply(lambda x: int(x['label']), new_field_name='label_seq', is_target=True)\n", - "\n", - "def split_sent(ins):\n", - " return ins['raw_sentence'].split()\n", - "ds.apply(split_sent, new_field_name='words', is_input=True)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Train size: 54\n", - "Test size: 23\n" - ] - } - ], - "source": [ - "# 分割训练集/验证集\n", - "train_data, dev_data = ds.split(0.3)\n", - "print(\"Train size: \", len(train_data))\n", - "print(\"Test size: \", len(dev_data))" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[[120, 121, 6, 2, 122, 5, 72, 123, 3],\n", - " [14,\n", - " 4,\n", - " 152,\n", - " 153,\n", - " 154,\n", - " 155,\n", - " 8,\n", - " 156,\n", - " 157,\n", - " 9,\n", - " 16,\n", - " 2,\n", - " 158,\n", - " 21,\n", - " 159,\n", - " 30,\n", - " 98,\n", - " 57,\n", - " 4,\n", - " 160,\n", - " 161,\n", - " 13,\n", - " 162,\n", - " 163,\n", - " 164,\n", - " 165,\n", - " 3],\n", - " [4,\n", - " 112,\n", - " 113,\n", - " 15,\n", - " 114,\n", - " 35,\n", - " 10,\n", - " 68,\n", - " 115,\n", - " 69,\n", - " 8,\n", - " 23,\n", - " 116,\n", - " 5,\n", - " 18,\n", - " 36,\n", - " 11,\n", - " 4,\n", - " 70,\n", - " 7,\n", - " 117,\n", - " 7,\n", - " 118,\n", - " 119,\n", - " 71,\n", - " 3],\n", - " [4, 1, 1, 5, 138, 14, 2, 1, 1, 1, 12],\n", - " [2, 27, 11, 139, 140, 141, 15, 142, 8, 143, 3],\n", - " [12, 9, 14, 32, 8, 4, 59, 60, 7, 61, 2, 62, 63, 64, 65, 4, 66, 67, 3],\n", - " [97, 145, 14, 146, 147, 5, 148, 149, 23, 150, 3],\n", - " [4, 1, 1, 5, 138, 14, 2, 1, 1, 1, 12],\n", - " [4, 1, 1, 5, 138, 14, 2, 1, 1, 1, 12],\n", - " [14,\n", - " 4,\n", - " 152,\n", - " 153,\n", - " 154,\n", - " 155,\n", - " 8,\n", - " 156,\n", - " 157,\n", - " 9,\n", - " 16,\n", - " 2,\n", - " 158,\n", - " 21,\n", - " 159,\n", - " 30,\n", - " 98,\n", - " 57,\n", - " 4,\n", - " 160,\n", - " 161,\n", - " 13,\n", - " 162,\n", - " 163,\n", - " 164,\n", - " 165,\n", - " 3],\n", - " [10,\n", - " 2,\n", - " 82,\n", - " 83,\n", - " 84,\n", - " 85,\n", - " 86,\n", - " 87,\n", - " 88,\n", - " 89,\n", - " 90,\n", - " 91,\n", - " 92,\n", - " 93,\n", - " 11,\n", - " 4,\n", - " 28,\n", - " 94,\n", - " 6,\n", - " 95,\n", - " 96,\n", - " 2,\n", - " 17,\n", - " 11,\n", - " 3],\n", - " [12, 73, 20, 33, 74, 75, 5, 76, 77, 5, 7, 78, 79, 27, 80, 3],\n", - " [12, 78, 1, 24, 1, 2, 13, 11, 31, 1, 16, 1, 1, 133, 16, 1, 1, 3],\n", - " [24, 107, 24, 108, 109, 6, 2, 110, 7, 2, 34, 34, 111, 3],\n", - " [2, 27, 11, 139, 140, 141, 15, 142, 8, 143, 3],\n", - " [24, 107, 24, 108, 109, 6, 2, 110, 7, 2, 34, 34, 111, 3],\n", - " [97, 145, 14, 146, 147, 5, 148, 149, 23, 150, 3],\n", - " [4,\n", - " 112,\n", - " 113,\n", - " 15,\n", - " 114,\n", - " 35,\n", - " 10,\n", - " 68,\n", - " 115,\n", - " 69,\n", - " 8,\n", - " 23,\n", - " 116,\n", - " 5,\n", - " 18,\n", - " 36,\n", - " 11,\n", - " 4,\n", - " 70,\n", - " 7,\n", - " 117,\n", - " 7,\n", - " 118,\n", - " 119,\n", - " 71,\n", - " 3],\n", - " [12, 9, 99, 29, 100, 101, 30, 22, 58, 31, 3],\n", - " [12, 9, 99, 29, 100, 101, 30, 22, 58, 31, 3],\n", - " [120, 121, 6, 2, 122, 5, 72, 123, 3],\n", - " [1, 30, 1, 5, 1, 30, 1, 4, 1, 1, 1, 10, 1, 21, 1, 7, 1, 1, 1, 14, 1, 3],\n", - " [1,\n", - " 1,\n", - " 1,\n", - " 1,\n", - " 8,\n", - " 1,\n", - " 89,\n", - " 2,\n", - " 1,\n", - " 16,\n", - " 151,\n", - " 1,\n", - " 1,\n", - " 1,\n", - " 1,\n", - " 1,\n", - " 1,\n", - " 7,\n", - " 1,\n", - " 1,\n", - " 1,\n", - " 2,\n", - " 1,\n", - " 6,\n", - " 28,\n", - " 25,\n", - " 3]]" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from fastNLP import Vocabulary\n", - "vocab = Vocabulary(min_freq=2)\n", - "train_data.apply(lambda x: [vocab.add(word) for word in x['words']])\n", - "\n", - "# index句子, Vocabulary.to_index(word)\n", - "train_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='word_seq', is_input=True)\n", - "dev_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='word_seq', is_input=True)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## step 3\n", - " 定义模型" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "from fastNLP.models import CNNText\n", - "model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## step 4\n", - "开始训练" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "input fields after batch(if batch size is 2):\n", - "\twords: (1)type:numpy.ndarray (2)dtype:object, (3)shape:(2,) \n", - "\tword_seq: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 11]) \n", - "target fields after batch(if batch size is 2):\n", - "\tlabel_seq: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n" - ] - }, - { - "ename": "AttributeError", - "evalue": "'numpy.ndarray' object has no attribute 'contiguous'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mdev_data\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mdev_data\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mloss\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mCrossEntropyLoss\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mmetrics\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mAccuracyMetric\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m )\n\u001b[1;32m 8\u001b[0m \u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, train_data, model, optimizer, loss, batch_size, sampler, update_every, n_epochs, print_every, dev_data, metrics, metric_key, validate_every, save_path, prefetch, use_tqdm, device, callbacks, check_code_level)\u001b[0m\n\u001b[1;32m 447\u001b[0m _check_code(dataset=train_data, model=model, losser=losser, metrics=metrics, dev_data=dev_data,\n\u001b[1;32m 448\u001b[0m \u001b[0mmetric_key\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmetric_key\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcheck_level\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcheck_code_level\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 449\u001b[0;31m batch_size=min(batch_size, DEFAULT_CHECK_BATCH_SIZE))\n\u001b[0m\u001b[1;32m 450\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 451\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain_data\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m_check_code\u001b[0;34m(dataset, model, losser, metrics, batch_size, dev_data, metric_key, check_level)\u001b[0m\n\u001b[1;32m 811\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 812\u001b[0m \u001b[0mrefined_batch_x\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_build_args\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mbatch_x\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 813\u001b[0;31m \u001b[0mpred_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mrefined_batch_x\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 814\u001b[0m \u001b[0mfunc_signature\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_get_func_signature\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 815\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpred_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 489\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 490\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 492\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 493\u001b[0m \u001b[0mhook_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/models/cnn_text_classification.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, words, seq_len)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[0;34m:\u001b[0m\u001b[0;32mreturn\u001b[0m \u001b[0moutput\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mdict\u001b[0m \u001b[0mof\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mLongTensor\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_classes\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 59\u001b[0m \"\"\"\n\u001b[0;32m---> 60\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0membed\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mwords\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# [N,L] -> [N,L,C]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 61\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconv_pool\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# [N,L,C] -> [N,C]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 62\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdropout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 489\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 490\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 492\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 493\u001b[0m \u001b[0mhook_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/modules/encoder/embedding.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 33\u001b[0m \u001b[0;34m:\u001b[0m\u001b[0;32mreturn\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTensor\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mseq_len\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0membed_dim\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 34\u001b[0m \"\"\"\n\u001b[0;32m---> 35\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 36\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdropout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/modules/sparse.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 106\u001b[0m return F.embedding(\n\u001b[1;32m 107\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mweight\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpadding_idx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmax_norm\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 108\u001b[0;31m self.norm_type, self.scale_grad_by_freq, self.sparse)\n\u001b[0m\u001b[1;32m 109\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 110\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mextra_repr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/functional.py\u001b[0m in \u001b[0;36membedding\u001b[0;34m(input, weight, padding_idx, max_norm, norm_type, scale_grad_by_freq, sparse)\u001b[0m\n\u001b[1;32m 1062\u001b[0m [ 0.6262, 0.2438, 0.7471]]])\n\u001b[1;32m 1063\u001b[0m \"\"\"\n\u001b[0;32m-> 1064\u001b[0;31m \u001b[0minput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcontiguous\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1065\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mpadding_idx\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1066\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mpadding_idx\u001b[0m \u001b[0;34m>\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mAttributeError\u001b[0m: 'numpy.ndarray' object has no attribute 'contiguous'" - ] - } - ], - "source": [ - "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric\n", - "trainer = Trainer(model=model, \n", - " train_data=train_data, \n", - " dev_data=dev_data,\n", - " loss=CrossEntropyLoss(),\n", - " metrics=AccuracyMetric()\n", - " )\n", - "trainer.train()\n", - "print('Train finished!')\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 本教程结束。更多操作请参考进阶教程。" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/tutorials/quickstart.ipynb b/tutorials/quickstart.ipynb new file mode 100644 index 00000000..00c30c93 --- /dev/null +++ b/tutorials/quickstart.ipynb @@ -0,0 +1,280 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# 快速入门" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'label': 1 type=str}" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP.io import CSVLoader\n", + "\n", + "loader = CSVLoader(headers=('raw_sentence', 'label'), sep='\\t')\n", + "dataset = loader.load(\"./sample_data/tutorial_sample_dataset.csv\")\n", + "dataset[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'label': 1 type=str,\n", + "'sentence': a series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'words': ['a', 'series', 'of', 'escapades', 'demonstrating', 'the', 'adage', 'that', 'what', 'is', 'good', 'for', 'the', 'goose', 'is', 'also', 'good', 'for', 'the', 'gander', ',', 'some', 'of', 'which', 'occasionally', 'amuses', 'but', 'none', 'of', 'which', 'amounts', 'to', 'much', 'of', 'a', 'story', '.'] type=list}" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# 将所有字母转为小写, 并所有句子变成单词序列\n", + "dataset.apply(lambda x: x['raw_sentence'].lower(), new_field_name='sentence')\n", + "dataset.apply(lambda x: x['sentence'].split(), new_field_name='words', is_input=True)\n", + "dataset[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'label': 1 type=str,\n", + "'sentence': a series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'words': [4, 1, 6, 1, 1, 2, 1, 11, 153, 10, 28, 17, 2, 1, 10, 1, 28, 17, 2, 1, 5, 154, 6, 149, 1, 1, 23, 1, 6, 149, 1, 8, 30, 6, 4, 35, 3] type=list}" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP import Vocabulary\n", + "\n", + "# 使用Vocabulary类统计单词,并将单词序列转化为数字序列\n", + "vocab = Vocabulary(min_freq=2).from_dataset(dataset, field_name='words')\n", + "vocab.index_dataset(dataset, field_name='words',new_field_name='words')\n", + "dataset[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'label': 1 type=str,\n", + "'sentence': a series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'words': [4, 1, 6, 1, 1, 2, 1, 11, 153, 10, 28, 17, 2, 1, 10, 1, 28, 17, 2, 1, 5, 154, 6, 149, 1, 1, 23, 1, 6, 149, 1, 8, 30, 6, 4, 35, 3] type=list,\n", + "'target': 1 type=int}" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# 将label转为整数,并设置为 target\n", + "dataset.apply(lambda x: int(x['label']), new_field_name='target', is_target=True)\n", + "dataset[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "CNNText(\n", + " (embed): Embedding(\n", + " 177, 50\n", + " (dropout): Dropout(p=0.0)\n", + " )\n", + " (conv_pool): ConvMaxpool(\n", + " (convs): ModuleList(\n", + " (0): Conv1d(50, 3, kernel_size=(3,), stride=(1,), padding=(2,))\n", + " (1): Conv1d(50, 4, kernel_size=(4,), stride=(1,), padding=(2,))\n", + " (2): Conv1d(50, 5, kernel_size=(5,), stride=(1,), padding=(2,))\n", + " )\n", + " )\n", + " (dropout): Dropout(p=0.1)\n", + " (fc): Linear(in_features=12, out_features=5, bias=True)\n", + ")" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP.models import CNNText\n", + "model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1)\n", + "model" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(62, 15)" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# 分割训练集/验证集\n", + "train_data, dev_data = dataset.split(0.2)\n", + "len(train_data), len(dev_data)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "input fields after batch(if batch size is 2):\n", + "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", + "target fields after batch(if batch size is 2):\n", + "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", + "\n", + "training epochs started 2019-05-09-10-59-39\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=20), HTML(value='')), layout=Layout(display='…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Evaluation at Epoch 1/10. Step:2/20. AccuracyMetric: acc=0.333333\n", + "\n", + "Evaluation at Epoch 2/10. Step:4/20. AccuracyMetric: acc=0.533333\n", + "\n", + "Evaluation at Epoch 3/10. Step:6/20. AccuracyMetric: acc=0.533333\n", + "\n", + "Evaluation at Epoch 4/10. Step:8/20. AccuracyMetric: acc=0.533333\n", + "\n", + "Evaluation at Epoch 5/10. Step:10/20. AccuracyMetric: acc=0.6\n", + "\n", + "Evaluation at Epoch 6/10. Step:12/20. AccuracyMetric: acc=0.8\n", + "\n", + "Evaluation at Epoch 7/10. Step:14/20. AccuracyMetric: acc=0.8\n", + "\n", + "Evaluation at Epoch 8/10. Step:16/20. AccuracyMetric: acc=0.733333\n", + "\n", + "Evaluation at Epoch 9/10. Step:18/20. AccuracyMetric: acc=0.733333\n", + "\n", + "Evaluation at Epoch 10/10. Step:20/20. AccuracyMetric: acc=0.733333\n", + "\n", + "\n", + "In Epoch:6/Step:12, got best dev performance:AccuracyMetric: acc=0.8\n", + "Reloaded the best model.\n" + ] + }, + { + "data": { + "text/plain": [ + "{'best_eval': {'AccuracyMetric': {'acc': 0.8}},\n", + " 'best_epoch': 6,\n", + " 'best_step': 12,\n", + " 'seconds': 0.22}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP import Trainer, CrossEntropyLoss, AccuracyMetric\n", + "\n", + "# 定义trainer并进行训练\n", + "trainer = Trainer(model=model, train_data=train_data, dev_data=dev_data,\n", + " loss=CrossEntropyLoss(), metrics=AccuracyMetric())\n", + "trainer.train()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.7" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/tutorials/tutorial_one.ipynb b/tutorials/tutorial_one.ipynb new file mode 100644 index 00000000..db302238 --- /dev/null +++ b/tutorials/tutorial_one.ipynb @@ -0,0 +1,831 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "# 详细指南" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 数据读入" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'label': 1 type=str}" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP.io import CSVLoader\n", + "\n", + "loader = CSVLoader(headers=('raw_sentence', 'label'), sep='\\t')\n", + "dataset = loader.load(\"./sample_data/tutorial_sample_dataset.csv\")\n", + "dataset[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Instance表示一个样本,由一个或多个field(域,属性,特征)组成,每个field有名字和值。\n", + "\n", + "在初始化Instance时即可定义它包含的域,使用 \"field_name=field_value\"的写法。" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'raw_sentence': fake data type=str,\n", + "'label': 0 type=str}" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP import Instance\n", + "\n", + "dataset.append(Instance(raw_sentence='fake data', label='0'))\n", + "dataset[-1]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 数据处理" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'label': 1 type=str,\n", + "'sentence': a series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'words': [4, 1, 6, 1, 1, 2, 1, 11, 153, 10, 28, 17, 2, 1, 10, 1, 28, 17, 2, 1, 5, 154, 6, 149, 1, 1, 23, 1, 6, 149, 1, 8, 30, 6, 4, 35, 3] type=list,\n", + "'target': 1 type=int}" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP import Vocabulary\n", + "\n", + "# 将所有字母转为小写, 并所有句子变成单词序列\n", + "dataset.apply(lambda x: x['raw_sentence'].lower(), new_field_name='sentence')\n", + "dataset.apply_field(lambda x: x.split(), field_name='sentence', new_field_name='words')\n", + "\n", + "# 使用Vocabulary类统计单词,并将单词序列转化为数字序列\n", + "vocab = Vocabulary(min_freq=2).from_dataset(dataset, field_name='words')\n", + "vocab.index_dataset(dataset, field_name='words',new_field_name='words')\n", + "\n", + "# 将label转为整数\n", + "dataset.apply(lambda x: int(x['label']), new_field_name='target')\n", + "dataset[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'label': 1 type=str,\n", + "'sentence': a series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", + "'words': [4, 1, 6, 1, 1, 2, 1, 11, 153, 10, 28, 17, 2, 1, 10, 1, 28, 17, 2, 1, 5, 154, 6, 149, 1, 1, 23, 1, 6, 149, 1, 8, 30, 6, 4, 35, 3] type=list,\n", + "'target': 1 type=int,\n", + "'seq_len': 37 type=int}\n" + ] + } + ], + "source": [ + "# 增加长度信息\n", + "dataset.apply_field(lambda x: len(x), field_name='words', new_field_name='seq_len')\n", + "print(dataset[0])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 使用内置模块CNNText\n", + "设置为符合内置模块的名称" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "CNNText(\n", + " (embed): Embedding(\n", + " 177, 50\n", + " (dropout): Dropout(p=0.0)\n", + " )\n", + " (conv_pool): ConvMaxpool(\n", + " (convs): ModuleList(\n", + " (0): Conv1d(50, 3, kernel_size=(3,), stride=(1,), padding=(2,))\n", + " (1): Conv1d(50, 4, kernel_size=(4,), stride=(1,), padding=(2,))\n", + " (2): Conv1d(50, 5, kernel_size=(5,), stride=(1,), padding=(2,))\n", + " )\n", + " )\n", + " (dropout): Dropout(p=0.1)\n", + " (fc): Linear(in_features=12, out_features=5, bias=True)\n", + ")" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP.models import CNNText\n", + "\n", + "model_cnn = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1)\n", + "model_cnn" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "我们在使用内置模块的时候,还应该使用应该注意把 field 设定成符合内置模型输入输出的名字。" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "words\n", + "seq_len\n", + "target\n" + ] + } + ], + "source": [ + "from fastNLP import Const\n", + "\n", + "dataset.rename_field('words', Const.INPUT)\n", + "dataset.rename_field('seq_len', Const.INPUT_LEN)\n", + "dataset.rename_field('target', Const.TARGET)\n", + "\n", + "dataset.set_input(Const.INPUT, Const.INPUT_LEN)\n", + "dataset.set_target(Const.TARGET)\n", + "\n", + "print(Const.INPUT)\n", + "print(Const.INPUT_LEN)\n", + "print(Const.TARGET)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 分割训练集/验证集/测试集" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(64, 7, 7)" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "train_dev_data, test_data = dataset.split(0.1)\n", + "train_data, dev_data = train_dev_data.split(0.1)\n", + "len(train_data), len(dev_data), len(test_data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 训练(model_cnn)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### loss\n", + "训练模型需要提供一个损失函数\n", + "\n", + "下面提供了一个在分类问题中常用的交叉熵损失。注意它的**初始化参数**。\n", + "\n", + "pred参数对应的是模型的forward返回的dict的一个key的名字,这里是\"output\"。\n", + "\n", + "target参数对应的是dataset作为标签的field的名字,这里是\"label_seq\"。" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "from fastNLP import CrossEntropyLoss\n", + "\n", + "# loss = CrossEntropyLoss()\n", + "# 等价于\n", + "loss = CrossEntropyLoss(pred=Const.OUTPUT, target=Const.TARGET)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Metric\n", + "定义评价指标\n", + "\n", + "这里使用准确率。参数的“命名规则”跟上面类似。\n", + "\n", + "pred参数对应的是模型的predict方法返回的dict的一个key的名字,这里是\"predict\"。\n", + "\n", + "target参数对应的是dataset作为标签的field的名字,这里是\"label_seq\"。" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "from fastNLP import AccuracyMetric\n", + "\n", + "# metrics=AccuracyMetric()\n", + "# 等价于\n", + "metrics=AccuracyMetric(pred=Const.OUTPUT, target=Const.TARGET)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "input fields after batch(if batch size is 2):\n", + "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 16]) \n", + "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", + "target fields after batch(if batch size is 2):\n", + "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", + "\n", + "training epochs started 2019-05-12-21-38-34\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=20), HTML(value='')), layout=Layout(display='…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Evaluation at Epoch 1/10. Step:2/20. AccuracyMetric: acc=0.285714\n", + "\n", + "Evaluation at Epoch 2/10. Step:4/20. AccuracyMetric: acc=0.428571\n", + "\n", + "Evaluation at Epoch 3/10. Step:6/20. AccuracyMetric: acc=0.428571\n", + "\n", + "Evaluation at Epoch 4/10. Step:8/20. AccuracyMetric: acc=0.428571\n", + "\n", + "Evaluation at Epoch 5/10. Step:10/20. AccuracyMetric: acc=0.428571\n", + "\n", + "Evaluation at Epoch 6/10. Step:12/20. AccuracyMetric: acc=0.428571\n", + "\n", + "Evaluation at Epoch 7/10. Step:14/20. AccuracyMetric: acc=0.428571\n", + "\n", + "Evaluation at Epoch 8/10. Step:16/20. AccuracyMetric: acc=0.857143\n", + "\n", + "Evaluation at Epoch 9/10. Step:18/20. AccuracyMetric: acc=0.857143\n", + "\n", + "Evaluation at Epoch 10/10. Step:20/20. AccuracyMetric: acc=0.857143\n", + "\n", + "\n", + "In Epoch:8/Step:16, got best dev performance:AccuracyMetric: acc=0.857143\n", + "Reloaded the best model.\n" + ] + }, + { + "data": { + "text/plain": [ + "{'best_eval': {'AccuracyMetric': {'acc': 0.857143}},\n", + " 'best_epoch': 8,\n", + " 'best_step': 16,\n", + " 'seconds': 0.21}" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP import Trainer\n", + "\n", + "trainer = Trainer(model=model_cnn, train_data=train_data, dev_data=dev_data, loss=loss, metrics=metrics)\n", + "trainer.train()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 测试(model_cnn)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[tester] \n", + "AccuracyMetric: acc=0.857143\n" + ] + }, + { + "data": { + "text/plain": [ + "{'AccuracyMetric': {'acc': 0.857143}}" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP import Tester\n", + "\n", + "tester = Tester(test_data, model_cnn, metrics=AccuracyMetric())\n", + "tester.test()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 编写自己的模型\n", + "\n", + "完全支持 pytorch 的模型,与 pytorch 唯一不同的是返回结果是一个字典,字典中至少需要包含 \"pred\" 这个字段" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "\n", + "class LSTMText(nn.Module):\n", + " def __init__(self, vocab_size, embedding_dim, output_dim, hidden_dim=64, num_layers=2, dropout=0.5):\n", + " super().__init__()\n", + "\n", + " self.embedding = nn.Embedding(vocab_size, embedding_dim)\n", + " self.lstm = nn.LSTM(embedding_dim, hidden_dim, num_layers=num_layers, bidirectional=True, dropout=dropout)\n", + " self.fc = nn.Linear(hidden_dim * 2, output_dim)\n", + " self.dropout = nn.Dropout(dropout)\n", + "\n", + " def forward(self, words):\n", + " # (input) words : (batch_size, seq_len)\n", + " words = words.permute(1,0)\n", + " # words : (seq_len, batch_size)\n", + "\n", + " embedded = self.dropout(self.embedding(words))\n", + " # embedded : (seq_len, batch_size, embedding_dim)\n", + " output, (hidden, cell) = self.lstm(embedded)\n", + " # output: (seq_len, batch_size, hidden_dim * 2)\n", + " # hidden: (num_layers * 2, batch_size, hidden_dim)\n", + " # cell: (num_layers * 2, batch_size, hidden_dim)\n", + "\n", + " hidden = torch.cat((hidden[-2, :, :], hidden[-1, :, :]), dim=1)\n", + " hidden = self.dropout(hidden)\n", + " # hidden: (batch_size, hidden_dim * 2)\n", + "\n", + " pred = self.fc(hidden.squeeze(0))\n", + " # result: (batch_size, output_dim)\n", + " return {\"pred\":pred}" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "input fields after batch(if batch size is 2):\n", + "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 16]) \n", + "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", + "target fields after batch(if batch size is 2):\n", + "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", + "\n", + "training epochs started 2019-05-12-21-38-36\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=20), HTML(value='')), layout=Layout(display='…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Evaluation at Epoch 1/10. Step:2/20. AccuracyMetric: acc=0.571429\n", + "\n", + "Evaluation at Epoch 2/10. Step:4/20. AccuracyMetric: acc=0.571429\n", + "\n", + "Evaluation at Epoch 3/10. Step:6/20. AccuracyMetric: acc=0.571429\n", + "\n", + "Evaluation at Epoch 4/10. Step:8/20. AccuracyMetric: acc=0.571429\n", + "\n", + "Evaluation at Epoch 5/10. Step:10/20. AccuracyMetric: acc=0.714286\n", + "\n", + "Evaluation at Epoch 6/10. Step:12/20. AccuracyMetric: acc=0.857143\n", + "\n", + "Evaluation at Epoch 7/10. Step:14/20. AccuracyMetric: acc=0.857143\n", + "\n", + "Evaluation at Epoch 8/10. Step:16/20. AccuracyMetric: acc=0.857143\n", + "\n", + "Evaluation at Epoch 9/10. Step:18/20. AccuracyMetric: acc=0.857143\n", + "\n", + "Evaluation at Epoch 10/10. Step:20/20. AccuracyMetric: acc=0.857143\n", + "\n", + "\n", + "In Epoch:6/Step:12, got best dev performance:AccuracyMetric: acc=0.857143\n", + "Reloaded the best model.\n" + ] + }, + { + "data": { + "text/plain": [ + "{'best_eval': {'AccuracyMetric': {'acc': 0.857143}},\n", + " 'best_epoch': 6,\n", + " 'best_step': 12,\n", + " 'seconds': 2.15}" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model_lstm = LSTMText(len(vocab),50,5)\n", + "trainer = Trainer(model=model_lstm, train_data=train_data, dev_data=dev_data, loss=loss, metrics=metrics)\n", + "trainer.train()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[tester] \n", + "AccuracyMetric: acc=0.857143\n" + ] + }, + { + "data": { + "text/plain": [ + "{'AccuracyMetric': {'acc': 0.857143}}" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tester = Tester(test_data, model_lstm, metrics=AccuracyMetric())\n", + "tester.test()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 使用 Batch编写自己的训练过程" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 0 Avg Loss: 3.11 18ms\n", + "Epoch 1 Avg Loss: 2.88 30ms\n", + "Epoch 2 Avg Loss: 2.69 42ms\n", + "Epoch 3 Avg Loss: 2.47 54ms\n", + "Epoch 4 Avg Loss: 2.38 67ms\n", + "Epoch 5 Avg Loss: 2.10 78ms\n", + "Epoch 6 Avg Loss: 2.06 91ms\n", + "Epoch 7 Avg Loss: 1.92 103ms\n", + "Epoch 8 Avg Loss: 1.91 114ms\n", + "Epoch 9 Avg Loss: 1.76 126ms\n", + "[tester] \n", + "AccuracyMetric: acc=0.571429\n" + ] + }, + { + "data": { + "text/plain": [ + "{'AccuracyMetric': {'acc': 0.571429}}" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP import BucketSampler\n", + "from fastNLP import Batch\n", + "import torch\n", + "import time\n", + "\n", + "model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1)\n", + "\n", + "def train(epoch, data):\n", + " optim = torch.optim.Adam(model.parameters(), lr=0.001)\n", + " lossfunc = torch.nn.CrossEntropyLoss()\n", + " batch_size = 32\n", + "\n", + " # 定义一个Batch,传入DataSet,规定batch_size和去batch的规则。\n", + " # 顺序(Sequential),随机(Random),相似长度组成一个batch(Bucket)\n", + " train_sampler = BucketSampler(batch_size=batch_size, seq_len_field_name='seq_len')\n", + " train_batch = Batch(batch_size=batch_size, dataset=data, sampler=train_sampler)\n", + " \n", + " start_time = time.time()\n", + " for i in range(epoch):\n", + " loss_list = []\n", + " for batch_x, batch_y in train_batch:\n", + " optim.zero_grad()\n", + " output = model(batch_x['words'])\n", + " loss = lossfunc(output['pred'], batch_y['target'])\n", + " loss.backward()\n", + " optim.step()\n", + " loss_list.append(loss.item())\n", + " print('Epoch {:d} Avg Loss: {:.2f}'.format(i, sum(loss_list) / len(loss_list)),end=\" \")\n", + " print('{:d}ms'.format(round((time.time()-start_time)*1000)))\n", + " loss_list.clear()\n", + " \n", + "train(10, train_data)\n", + "tester = Tester(test_data, model, metrics=AccuracyMetric())\n", + "tester.test()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 使用 Callback 实现自己想要的效果" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "input fields after batch(if batch size is 2):\n", + "\twords: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 16]) \n", + "\tseq_len: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", + "target fields after batch(if batch size is 2):\n", + "\ttarget: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", + "\n", + "training epochs started 2019-05-12-21-38-40\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=20), HTML(value='')), layout=Layout(display='…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Evaluation at Epoch 1/10. Step:2/20. AccuracyMetric: acc=0.285714\n", + "\n", + "Sum Time: 51ms\n", + "\n", + "\n", + "Evaluation at Epoch 2/10. Step:4/20. AccuracyMetric: acc=0.285714\n", + "\n", + "Sum Time: 69ms\n", + "\n", + "\n", + "Evaluation at Epoch 3/10. Step:6/20. AccuracyMetric: acc=0.285714\n", + "\n", + "Sum Time: 91ms\n", + "\n", + "\n", + "Evaluation at Epoch 4/10. Step:8/20. AccuracyMetric: acc=0.571429\n", + "\n", + "Sum Time: 107ms\n", + "\n", + "\n", + "Evaluation at Epoch 5/10. Step:10/20. AccuracyMetric: acc=0.571429\n", + "\n", + "Sum Time: 125ms\n", + "\n", + "\n", + "Evaluation at Epoch 6/10. Step:12/20. AccuracyMetric: acc=0.571429\n", + "\n", + "Sum Time: 142ms\n", + "\n", + "\n", + "Evaluation at Epoch 7/10. Step:14/20. AccuracyMetric: acc=0.571429\n", + "\n", + "Sum Time: 158ms\n", + "\n", + "\n", + "Evaluation at Epoch 8/10. Step:16/20. AccuracyMetric: acc=0.571429\n", + "\n", + "Sum Time: 176ms\n", + "\n", + "\n", + "Evaluation at Epoch 9/10. Step:18/20. AccuracyMetric: acc=0.714286\n", + "\n", + "Sum Time: 193ms\n", + "\n", + "\n", + "Evaluation at Epoch 10/10. Step:20/20. AccuracyMetric: acc=0.857143\n", + "\n", + "Sum Time: 212ms\n", + "\n", + "\n", + "\n", + "In Epoch:10/Step:20, got best dev performance:AccuracyMetric: acc=0.857143\n", + "Reloaded the best model.\n" + ] + }, + { + "data": { + "text/plain": [ + "{'best_eval': {'AccuracyMetric': {'acc': 0.857143}},\n", + " 'best_epoch': 10,\n", + " 'best_step': 20,\n", + " 'seconds': 0.2}" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from fastNLP import Callback\n", + "\n", + "start_time = time.time()\n", + "\n", + "class MyCallback(Callback):\n", + " def on_epoch_end(self):\n", + " print('Sum Time: {:d}ms\\n\\n'.format(round((time.time()-start_time)*1000)))\n", + " \n", + "\n", + "model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1)\n", + "trainer = Trainer(model=model, train_data=train_data, dev_data=dev_data,\n", + " loss=CrossEntropyLoss(), metrics=AccuracyMetric(), callbacks=[MyCallback()])\n", + "trainer.train()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.7" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} From fa246f5ab28049f3896f384baf9ac67b1a990fb0 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Sun, 12 May 2019 22:11:46 +0800 Subject: [PATCH 04/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86=E4=B8=80?= =?UTF-8?q?=E4=BA=9B=E4=BB=8B=E7=BB=8D=E6=96=87=E6=A1=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- fastNLP/__init__.py | 1 - fastNLP/core/callback.py | 9 ++++++--- fastNLP/core/const.py | 7 ++++++- fastNLP/models/__init__.py | 1 + 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/fastNLP/__init__.py b/fastNLP/__init__.py index 9873be72..bb9dfb4a 100644 --- a/fastNLP/__init__.py +++ b/fastNLP/__init__.py @@ -33,7 +33,6 @@ __all__ = [ "EngChar2DPadder", "AccuracyMetric", - "BMESF1PreRecMetric", "SpanFPreRecMetric", "SQuADMetric", diff --git a/fastNLP/core/callback.py b/fastNLP/core/callback.py index ba73f101..76a9e2e2 100644 --- a/fastNLP/core/callback.py +++ b/fastNLP/core/callback.py @@ -1,5 +1,10 @@ r""" -callback模块实现了 fastNLP 中的许多 callback 类,用于增强 :class:`~fastNLP.Trainer` 类, +callback模块实现了 fastNLP 中的许多 callback 类,用于增强 :class:`~fastNLP.Trainer` 类。 + +虽然Trainer本身已经集成了一些功能,但仍然不足以囊括训练过程中可能需要到的功能, +比如负采样,learning rate decay, Early Stop等。 +为了解决这个问题fastNLP引入了callback的机制,Callback 是一种在Trainer训练过程中特定阶段会运行的函数集合。 +关于Trainer的详细文档,请参见 :doc:`trainer 模块` 我们将 :meth:`~fastNLP.Train.train` 这个函数内部分为以下的阶段,在对应阶段会触发相应的调用:: @@ -26,8 +31,6 @@ callback模块实现了 fastNLP 中的许多 callback 类,用于增强 :class: callback.on_train_end() # 训练结束 callback.on_exception() # 这是一个特殊的步骤,在训练过程中遭遇exception会跳转到这里 -关于Trainer的详细文档,请参见 :doc:`trainer 模块` - 如下面的例子所示,我们可以使用内置的 callback 类,或者继承 :class:`~fastNLP.core.callback.Callback` 定义自己的 callback 类:: diff --git a/fastNLP/core/const.py b/fastNLP/core/const.py index f3022db2..89ff51a2 100644 --- a/fastNLP/core/const.py +++ b/fastNLP/core/const.py @@ -1,5 +1,10 @@ class Const: - """fastNLP中field命名常量。 + """ + fastNLP中field命名常量。 + + .. todo:: + 把下面这段改成表格 + 具体列表:: INPUT 模型的序列输入 words(复数words1, words2) diff --git a/fastNLP/models/__init__.py b/fastNLP/models/__init__.py index f0d84b1c..bad96cf9 100644 --- a/fastNLP/models/__init__.py +++ b/fastNLP/models/__init__.py @@ -3,6 +3,7 @@ TODO 详细介绍的表格,与主页相对应 """ +__all__ = ["CNNText", "SeqLabeling", "ESIM", "STSeqLabel", "AdvSeqLabel", "STNLICls", "STSeqCls"] from .base_model import BaseModel from .bert import BertForMultipleChoice, BertForQuestionAnswering, BertForSequenceClassification, \ BertForTokenClassification From e3d6560132ef95d8100c8fd97ee1b189964b142c Mon Sep 17 00:00:00 2001 From: ChenXin Date: Sun, 12 May 2019 23:54:10 +0800 Subject: [PATCH 05/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86=20models=20?= =?UTF-8?q?=E5=92=8C=20modules=20=E7=9A=84=E4=B8=BB=E9=A1=B5=E4=BB=8B?= =?UTF-8?q?=E7=BB=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/source/index.rst | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/docs/source/index.rst b/docs/source/index.rst index 10bab0eb..687b1c33 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -12,13 +12,14 @@ fastNLP 是一款轻量级的 NLP 处理套件。你既可以使用它快速地 - 便捷且具有扩展性的训练器; 提供多种内置callback函数,方便实验记录、异常捕获等。 -内置的模块 +内置组件 ------------ -(TODO) +大部分用于的 NLP 任务神经网络都可以看做由编码(encoder)、聚合(aggregator)、解码(decoder)三个阶段组成。 +.. image:: figures/text_classification.png -主要包含了以下的三大模块: +三大模块功能和例子如下: +-----------------------+-----------------------+-----------------------+ | module type | functionality | example | @@ -34,17 +35,17 @@ fastNLP 是一款轻量级的 NLP 处理套件。你既可以使用它快速地 | | 形式 | | +-----------------------+-----------------------+-----------------------+ +fastNLP 在 :mod:`~fastNLP.module` 模块中内置了大量的组件,可以帮助用户快速搭建自己所需的网络 -For example: - -.. image:: figures/text_classification.png -.. todo:: - 各个任务上的结果 - -内置的模型 +内置模型 ---------------- +fastNLP 在 :mod:`~fastNLP.models` 模块中内置了如 :class:`~fastNLP.models.CNNText` 、 +:class:`~fastNLP.models.SeqLabeling` 等完整的模型,以供用户直接使用。 + +.. todo:: + 这些模型的介绍如下表所示:(模型名称 + 介绍 + 任务上的结果) 用户手册 ---------------- From 269e28cc795eb22ba4414a37c916b7a8c174e5d8 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Mon, 13 May 2019 00:23:55 +0800 Subject: [PATCH 06/19] =?UTF-8?q?=E5=8A=A0=E4=B8=8A=E4=BA=86=20titlesonly?= =?UTF-8?q?=20=E8=BF=99=E4=B8=AA=E8=AE=BE=E7=BD=AE?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/source/fastNLP.core.rst | 1 + docs/source/fastNLP.io.rst | 1 + docs/source/fastNLP.models.rst | 1 + docs/source/fastNLP.modules.aggregator.rst | 1 + docs/source/fastNLP.modules.decoder.rst | 1 + docs/source/fastNLP.modules.encoder.rst | 1 + docs/source/fastNLP.modules.rst | 1 + docs/source/fastNLP.rst | 1 + docs/source/index.rst | 9 ++++----- docs/source/modules.rst | 1 + 10 files changed, 13 insertions(+), 5 deletions(-) diff --git a/docs/source/fastNLP.core.rst b/docs/source/fastNLP.core.rst index 932c6b21..ba784a59 100644 --- a/docs/source/fastNLP.core.rst +++ b/docs/source/fastNLP.core.rst @@ -10,6 +10,7 @@ fastNLP.core 模块 ---------- .. toctree:: + :titlesonly: fastNLP.core.batch fastNLP.core.callback diff --git a/docs/source/fastNLP.io.rst b/docs/source/fastNLP.io.rst index ae28573d..88498d9a 100644 --- a/docs/source/fastNLP.io.rst +++ b/docs/source/fastNLP.io.rst @@ -10,6 +10,7 @@ fastNLP.io 模块 ---------- .. toctree:: + :titlesonly: fastNLP.io.base_loader fastNLP.io.dataset_loader diff --git a/docs/source/fastNLP.models.rst b/docs/source/fastNLP.models.rst index c1be3a4c..2c243295 100644 --- a/docs/source/fastNLP.models.rst +++ b/docs/source/fastNLP.models.rst @@ -10,6 +10,7 @@ fastNLP.models 模块 ---------- .. toctree:: + :titlesonly: fastNLP.models.base_model fastNLP.models.bert diff --git a/docs/source/fastNLP.modules.aggregator.rst b/docs/source/fastNLP.modules.aggregator.rst index 4f8baabd..44398325 100644 --- a/docs/source/fastNLP.modules.aggregator.rst +++ b/docs/source/fastNLP.modules.aggregator.rst @@ -10,6 +10,7 @@ fastNLP.modules.aggregator ---------- .. toctree:: + :titlesonly: fastNLP.modules.aggregator.attention fastNLP.modules.aggregator.pooling diff --git a/docs/source/fastNLP.modules.decoder.rst b/docs/source/fastNLP.modules.decoder.rst index fbda11d9..1c28740b 100644 --- a/docs/source/fastNLP.modules.decoder.rst +++ b/docs/source/fastNLP.modules.decoder.rst @@ -10,6 +10,7 @@ fastNLP.modules.decoder ---------- .. toctree:: + :titlesonly: fastNLP.modules.decoder.CRF fastNLP.modules.decoder.MLP diff --git a/docs/source/fastNLP.modules.encoder.rst b/docs/source/fastNLP.modules.encoder.rst index 9a11fe74..b15232fa 100644 --- a/docs/source/fastNLP.modules.encoder.rst +++ b/docs/source/fastNLP.modules.encoder.rst @@ -10,6 +10,7 @@ fastNLP.modules.encoder ---------- .. toctree:: + :titlesonly: fastNLP.modules.encoder.bert fastNLP.modules.encoder.char_encoder diff --git a/docs/source/fastNLP.modules.rst b/docs/source/fastNLP.modules.rst index 263005f0..4f05ae7b 100644 --- a/docs/source/fastNLP.modules.rst +++ b/docs/source/fastNLP.modules.rst @@ -10,6 +10,7 @@ fastNLP.modules 模块 ----------- .. toctree:: + :titlesonly: fastNLP.modules.aggregator fastNLP.modules.decoder diff --git a/docs/source/fastNLP.rst b/docs/source/fastNLP.rst index eaa06ea8..a795045a 100644 --- a/docs/source/fastNLP.rst +++ b/docs/source/fastNLP.rst @@ -10,6 +10,7 @@ ----------- .. toctree:: + :titlesonly: :maxdepth: 3 fastNLP.core diff --git a/docs/source/index.rst b/docs/source/index.rst index 687b1c33..d77ae1c8 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -15,11 +15,12 @@ fastNLP 是一款轻量级的 NLP 处理套件。你既可以使用它快速地 内置组件 ------------ -大部分用于的 NLP 任务神经网络都可以看做由编码(encoder)、聚合(aggregator)、解码(decoder)三个阶段组成。 +大部分用于的 NLP 任务神经网络都可以看做由编码(encoder)、聚合(aggregator)、解码(decoder)三中模块组成。 .. image:: figures/text_classification.png -三大模块功能和例子如下: +fastNLP 在 :mod:`~fastNLP.modules` 模块中内置了三个模块的诸多组件,可以帮助用户快速搭建自己所需的网络。 +三个模块的功能和常见组件如下: +-----------------------+-----------------------+-----------------------+ | module type | functionality | example | @@ -35,8 +36,6 @@ fastNLP 是一款轻量级的 NLP 处理套件。你既可以使用它快速地 | | 形式 | | +-----------------------+-----------------------+-----------------------+ -fastNLP 在 :mod:`~fastNLP.module` 模块中内置了大量的组件,可以帮助用户快速搭建自己所需的网络 - 内置模型 ---------------- @@ -65,7 +64,7 @@ API 文档 除了用户手册之外,你还可以通过查阅 API 文档来找到你所需要的工具。 .. toctree:: - :maxdepth: 2 + :titlesonly: fastNLP diff --git a/docs/source/modules.rst b/docs/source/modules.rst index e9a92cb7..9ca3c7f3 100644 --- a/docs/source/modules.rst +++ b/docs/source/modules.rst @@ -2,6 +2,7 @@ fastNLP ======= .. toctree:: + :titlesonly: :maxdepth: 4 fastNLP From 39a1fe9567beb467ec5bd24cc8817b3f98e76ff4 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Mon, 13 May 2019 00:49:06 +0800 Subject: [PATCH 07/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86=E6=A8=A1?= =?UTF-8?q?=E5=9D=97=E6=96=87=E6=A1=A3=E5=B1=95=E7=A4=BA=E7=9A=84=E6=A0=87?= =?UTF-8?q?=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/source/fastNLP.core.rst | 4 ++-- docs/source/fastNLP.io.rst | 4 ++-- docs/source/fastNLP.models.rst | 4 ++-- docs/source/fastNLP.modules.rst | 4 ++-- docs/source/fastNLP.rst | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/source/fastNLP.core.rst b/docs/source/fastNLP.core.rst index ba784a59..82c13e46 100644 --- a/docs/source/fastNLP.core.rst +++ b/docs/source/fastNLP.core.rst @@ -1,5 +1,5 @@ -fastNLP.core 模块 -==================== +fastNLP.core +============ .. automodule:: fastNLP.core :members: diff --git a/docs/source/fastNLP.io.rst b/docs/source/fastNLP.io.rst index 88498d9a..fad05a21 100644 --- a/docs/source/fastNLP.io.rst +++ b/docs/source/fastNLP.io.rst @@ -1,5 +1,5 @@ -fastNLP.io 模块 -================== +fastNLP.io +========== .. automodule:: fastNLP.io :members: diff --git a/docs/source/fastNLP.models.rst b/docs/source/fastNLP.models.rst index 2c243295..57592bf4 100644 --- a/docs/source/fastNLP.models.rst +++ b/docs/source/fastNLP.models.rst @@ -1,5 +1,5 @@ -fastNLP.models 模块 -====================== +fastNLP.models +============== .. automodule:: fastNLP.models :members: diff --git a/docs/source/fastNLP.modules.rst b/docs/source/fastNLP.modules.rst index 4f05ae7b..d04ccdcf 100644 --- a/docs/source/fastNLP.modules.rst +++ b/docs/source/fastNLP.modules.rst @@ -1,5 +1,5 @@ -fastNLP.modules 模块 -======================= +fastNLP.modules +=============== .. automodule:: fastNLP.modules :members: diff --git a/docs/source/fastNLP.rst b/docs/source/fastNLP.rst index a795045a..f0c3d41c 100644 --- a/docs/source/fastNLP.rst +++ b/docs/source/fastNLP.rst @@ -1,4 +1,4 @@ -用户 API +API 文档 =============== .. automodule:: fastNLP From 0a014fe3aeffafa473aaa79c8f0d99b5641e74f1 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Mon, 13 May 2019 00:49:54 +0800 Subject: [PATCH 08/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86=20core=20?= =?UTF-8?q?=E5=92=8C=20io=20=E7=9A=84=E5=BC=80=E7=AF=87=E4=BB=8B=E7=BB=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- fastNLP/__init__.py | 11 +++++------ fastNLP/core/__init__.py | 9 +++++---- fastNLP/io/__init__.py | 13 ++++++------- fastNLP/io/dataset_loader.py | 5 ++--- fastNLP/io/embed_loader.py | 7 +------ 5 files changed, 19 insertions(+), 26 deletions(-) diff --git a/fastNLP/__init__.py b/fastNLP/__init__.py index bb9dfb4a..5dd5fd54 100644 --- a/fastNLP/__init__.py +++ b/fastNLP/__init__.py @@ -1,12 +1,11 @@ """ fastNLP 由 :mod:`~fastNLP.core` 、 :mod:`~fastNLP.io` 、:mod:`~fastNLP.modules`、:mod:`~fastNLP.models` -和 :mod:`~fastNLP.component` 等子模块组成。 +等子模块组成,你可以点进去查看每个模块的文档。 -- :mod:`~fastNLP.core` fastNLP 的核心模块,包括 DataSet、 Trainer、 Tester 等组件 -- :mod:`~fastNLP.io` fastNLP 的输入输出模块,实现了数据集的读取,模型的存取等功能 -- :mod:`~fastNLP.modules` TODO 如何介绍 -- :mod:`~fastNLP.models` 使用 fastNLP 实现的一些常见模型,具体参见 :doc:`fastNLP.models` -- :mod:`~fastNLP.component` TODO +- :mod:`~fastNLP.core` 是fastNLP 的核心模块,包括 DataSet、 Trainer、 Tester 等组件。详见文档 :doc:`/fastNLP.core` +- :mod:`~fastNLP.io` 是实现输入输出的模块,包括了数据集的读取,模型的存取等功能。详见文档 :doc:`/fastNLP.io` +- :mod:`~fastNLP.modules` 包含了用于搭建神经网络模型的诸多组件,可以帮助用户快速搭建自己所需的网络。详见文档 :doc:`/fastNLP.modules` +- :mod:`~fastNLP.models` 包含了一些使用 fastNLP 实现的完整网络模型,包括CNNText、SeqLabeling等常见模型。详见文档 :doc:`/fastNLP.models` fastNLP 中最常用的组件可以直接从 fastNLP 包中 import ,他们的文档如下: """ diff --git a/fastNLP/core/__init__.py b/fastNLP/core/__init__.py index 3c5b3f42..d6ab8983 100644 --- a/fastNLP/core/__init__.py +++ b/fastNLP/core/__init__.py @@ -1,5 +1,5 @@ """ -core 模块里实现了 fastNLP 的核心框架,常用的组件都可以从 fastNLP 包中直接 import。当然你也同样可以从 core 模块的子模块中 import, +core 模块里实现了 fastNLP 的核心框架,常用的功能都可以从 fastNLP 包中直接 import。当然你也同样可以从 core 模块的子模块中 import, 例如 Batch 组件有两种 import 的方式:: # 直接从 fastNLP 中 import @@ -8,10 +8,11 @@ core 模块里实现了 fastNLP 的核心框架,常用的组件都可以从 fa # 从 core 模块的子模块 batch 中 import from fastNLP.core.batch import Batch -对于常用的功能,你只需要在 :doc:`fastNLP` 中查看即可。如果想了解各个子模块的分工,您可以阅读以下文档: - - TODO 向导 +对于常用的功能,你只需要在 :doc:`fastNLP` 中查看即可。如果想了解各个子模块的具体作用,您可以在下面找到每个子模块的具体文档。 +.. todo:: + 介绍core 的子模块的分工,好像必要性不大 + """ from .batch import Batch from .callback import Callback, GradientClipCallback, EarlyStopCallback, TensorboardCallback, LRScheduler, ControlC diff --git a/fastNLP/io/__init__.py b/fastNLP/io/__init__.py index b855a1bb..3baf878c 100644 --- a/fastNLP/io/__init__.py +++ b/fastNLP/io/__init__.py @@ -5,15 +5,10 @@ 2. 用于读入数据的 :doc:`DataSetLoader ` 类 -3. 用于保存和载入模型的类, 参考 :doc:`Model-IO ` +3. 用于保存和载入模型的类, 参考 :doc:`/fastNLP.io.model_io` -这些类的使用方法可以在对应module的文档下查看. +这些类的使用方法如下: """ -from .embed_loader import EmbedLoader -from .dataset_loader import DataSetLoader, CSVLoader, JsonLoader, ConllLoader, SNLILoader, SSTLoader, \ - PeopleDailyCorpusLoader, Conll2003Loader -from .model_io import ModelLoader as ModelLoader, ModelSaver as ModelSaver - __all__ = [ 'EmbedLoader', @@ -29,3 +24,7 @@ __all__ = [ 'ModelLoader', 'ModelSaver', ] +from .embed_loader import EmbedLoader +from .dataset_loader import DataSetLoader, CSVLoader, JsonLoader, ConllLoader, SNLILoader, SSTLoader, \ + PeopleDailyCorpusLoader, Conll2003Loader +from .model_io import ModelLoader as ModelLoader, ModelSaver as ModelSaver \ No newline at end of file diff --git a/fastNLP/io/dataset_loader.py b/fastNLP/io/dataset_loader.py index 5df48d71..3cd475a5 100644 --- a/fastNLP/io/dataset_loader.py +++ b/fastNLP/io/dataset_loader.py @@ -1,8 +1,7 @@ """ dataset_loader模块实现了许多 DataSetLoader, 用于读取不同格式的数据, 并返回 `DataSet` , -得到的 :class:`~fastNLP.DataSet` 对象可以直接传入 :class:`~fastNLP.Trainer`, :class:`~fastNLP.Tester`, 用于模型的训练和测试 - -Example:: +得到的 :class:`~fastNLP.DataSet` 对象可以直接传入 :class:`~fastNLP.Trainer`, :class:`~fastNLP.Tester`, 用于模型的训练和测试。 +以SNLI数据集为例:: loader = SNLILoader() train_ds = loader.load('path/to/train') diff --git a/fastNLP/io/embed_loader.py b/fastNLP/io/embed_loader.py index 4cc8f596..9f3a73dd 100644 --- a/fastNLP/io/embed_loader.py +++ b/fastNLP/io/embed_loader.py @@ -1,8 +1,3 @@ -""" -.. _embed-loader: - -用于读取预训练的embedding, 读取结果可直接载入为模型参数 -""" import os import numpy as np @@ -16,7 +11,7 @@ class EmbedLoader(BaseLoader): """ 别名::class:`fastNLP.io.EmbedLoader` :class:`fastNLP.io.embed_loader.EmbedLoader` - 这个类用于从预训练的Embedding中load数据。 + 用于读取预训练的embedding, 读取结果可直接载入为模型参数。 """ def __init__(self): From 208cf5facb8b5644516ced00df3251100c8275d0 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Mon, 13 May 2019 10:08:27 +0800 Subject: [PATCH 09/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86=20modules=20?= =?UTF-8?q?=E5=92=8C=20models=20=E5=BC=80=E7=AF=87=E4=BB=8B=E7=BB=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/source/index.rst | 6 ++-- fastNLP/models/__init__.py | 8 ++++-- fastNLP/modules/__init__.py | 40 +++++++++++++++++++++++--- fastNLP/modules/aggregator/__init__.py | 9 +++++- fastNLP/modules/decoder/__init__.py | 8 +++++- fastNLP/modules/encoder/__init__.py | 10 ++++--- 6 files changed, 66 insertions(+), 15 deletions(-) diff --git a/docs/source/index.rst b/docs/source/index.rst index d77ae1c8..554b1afc 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -15,12 +15,12 @@ fastNLP 是一款轻量级的 NLP 处理套件。你既可以使用它快速地 内置组件 ------------ -大部分用于的 NLP 任务神经网络都可以看做由编码(encoder)、聚合(aggregator)、解码(decoder)三中模块组成。 +大部分用于的 NLP 任务神经网络都可以看做由编码(encoder)、聚合(aggregator)、解码(decoder)三种模块组成。 .. image:: figures/text_classification.png -fastNLP 在 :mod:`~fastNLP.modules` 模块中内置了三个模块的诸多组件,可以帮助用户快速搭建自己所需的网络。 -三个模块的功能和常见组件如下: +fastNLP 在 :mod:`~fastNLP.modules` 模块中内置了三种模块的诸多组件,可以帮助用户快速搭建自己所需的网络。 +三种模块的功能和常见组件如下: +-----------------------+-----------------------+-----------------------+ | module type | functionality | example | diff --git a/fastNLP/models/__init__.py b/fastNLP/models/__init__.py index bad96cf9..66af3a46 100644 --- a/fastNLP/models/__init__.py +++ b/fastNLP/models/__init__.py @@ -1,6 +1,10 @@ """ -使用 fastNLP 实现的一系列常见模型,具体有: -TODO 详细介绍的表格,与主页相对应 +fastNLP 在 :mod:`~fastNLP.models` 模块中内置了如 :class:`~fastNLP.models.CNNText` 、 +:class:`~fastNLP.models.SeqLabeling` 等完整的模型,以供用户直接使用。 + +.. todo:: + 这些模型的介绍(与主页一致) + """ __all__ = ["CNNText", "SeqLabeling", "ESIM", "STSeqLabel", "AdvSeqLabel", "STNLICls", "STSeqCls"] diff --git a/fastNLP/modules/__init__.py b/fastNLP/modules/__init__.py index 4022de9d..53d44f47 100644 --- a/fastNLP/modules/__init__.py +++ b/fastNLP/modules/__init__.py @@ -1,10 +1,25 @@ """ -modules 模块是 fastNLP 的重要组成部分,它实现了神经网络构建中常见的组件, -具体包括 TODO +大部分用于的 NLP 任务神经网络都可以看做由编码 :mod:`~fastNLP.modules.encoder` 、 +聚合 :mod:`~fastNLP.modules.aggregator` 、解码 :mod:`~fastNLP.modules.decoder` 三种模块组成。 -可以和 PyTorch 结合使用?TODO +.. image:: figures/text_classification.png -TODO __all__ 里面多暴露一些 +:mod:`~fastNLP.modules` 中实现了 fastNLP 提供的诸多模块组件,可以帮助用户快速搭建自己所需的网络。 +三种模块的功能和常见组件如下: + ++-----------------------+-----------------------+-----------------------+ +| module type | functionality | example | ++=======================+=======================+=======================+ +| encoder | 将输入编码为具有具 | embedding, RNN, CNN, | +| | 有表示能力的向量 | transformer | ++-----------------------+-----------------------+-----------------------+ +| aggregator | 从多个向量中聚合信息 | self-attention, | +| | | max-pooling | ++-----------------------+-----------------------+-----------------------+ +| decoder | 将具有某种表示意义的 | MLP, CRF | +| | 向量解码为需要的输出 | | +| | 形式 | | ++-----------------------+-----------------------+-----------------------+ """ from . import aggregator @@ -16,3 +31,20 @@ from .dropout import TimestepDropout from .encoder import * from .utils import get_embeddings +__all__ = [ + "LSTM", + "Embedding", + "ConvMaxpool", + "BertModel", + + "MaxPool", + "MaxPoolWithMask", + "AvgPool", + "MultiHeadAttention", + "BiAttention", + + "MLP", + "ConditionalRandomField", + "viterbi_decode", + "allowed_transitions", +] \ No newline at end of file diff --git a/fastNLP/modules/aggregator/__init__.py b/fastNLP/modules/aggregator/__init__.py index bfb7579b..4a76cf5b 100644 --- a/fastNLP/modules/aggregator/__init__.py +++ b/fastNLP/modules/aggregator/__init__.py @@ -1,7 +1,14 @@ -__all__ = ["MaxPool", "MaxPoolWithMask", "AvgPool", "MultiHeadAttention", "BiAttention"] from .pooling import MaxPool from .pooling import MaxPoolWithMask from .pooling import AvgPool from .pooling import MeanPoolWithMask from .attention import MultiHeadAttention, BiAttention +__all__ = [ + "MaxPool", + "MaxPoolWithMask", + "AvgPool", + + "MultiHeadAttention", + "BiAttention" +] diff --git a/fastNLP/modules/decoder/__init__.py b/fastNLP/modules/decoder/__init__.py index 84763e03..516b687a 100644 --- a/fastNLP/modules/decoder/__init__.py +++ b/fastNLP/modules/decoder/__init__.py @@ -1,5 +1,11 @@ -__all__ = ["MLP", "ConditionalRandomField", "viterbi_decode", "allowed_transitions"] from .CRF import ConditionalRandomField from .MLP import MLP from .utils import viterbi_decode from .CRF import allowed_transitions + +__all__ = [ + "MLP", + "ConditionalRandomField", + "viterbi_decode", + "allowed_transitions" +] diff --git a/fastNLP/modules/encoder/__init__.py b/fastNLP/modules/encoder/__init__.py index a1cd910b..67f69850 100644 --- a/fastNLP/modules/encoder/__init__.py +++ b/fastNLP/modules/encoder/__init__.py @@ -3,7 +3,9 @@ from .embedding import Embedding from .lstm import LSTM from .bert import BertModel -__all__ = ["LSTM", - "Embedding", - "ConvMaxpool", - "BertModel"] +__all__ = [ + "LSTM", + "Embedding", + "ConvMaxpool", + "BertModel" +] From fec3216a0eba641764bc65971fd2c34720f2b022 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Mon, 13 May 2019 10:40:06 +0800 Subject: [PATCH 10/19] =?UTF-8?q?=E4=BD=BF=E7=94=A8=20..=20todo::=20?= =?UTF-8?q?=E9=9A=90=E8=97=8F=E4=BA=86=E5=8F=AF=E8=83=BD=E8=A2=AB=E6=8A=BD?= =?UTF-8?q?=E5=88=B0=E6=96=87=E6=A1=A3=E4=B8=AD=E7=9A=84=20TODO=20?= =?UTF-8?q?=E6=B3=A8=E9=87=8A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- fastNLP/core/dataset.py | 5 ++++- fastNLP/core/losses.py | 3 +-- fastNLP/io/base_loader.py | 3 ++- fastNLP/modules/aggregator/attention.py | 3 ++- fastNLP/modules/utils.py | 5 ++++- 5 files changed, 13 insertions(+), 6 deletions(-) diff --git a/fastNLP/core/dataset.py b/fastNLP/core/dataset.py index d527bf76..7228842f 100644 --- a/fastNLP/core/dataset.py +++ b/fastNLP/core/dataset.py @@ -58,7 +58,10 @@ 2 DataSet与预处理 常见的预处理有如下几种 -2.1 从某个文本文件读取内容 # TODO 引用DataLoader +2.1 从某个文本文件读取内容 # + + .. todo:: + 引用DataLoader Example:: diff --git a/fastNLP/core/losses.py b/fastNLP/core/losses.py index ac08b46f..7a5fdf9d 100644 --- a/fastNLP/core/losses.py +++ b/fastNLP/core/losses.py @@ -221,8 +221,7 @@ class CrossEntropyLoss(LossBase): """ def __init__(self, pred=None, target=None, padding_idx=-100): - # TODO 需要做一些检查,F.cross_entropy在计算时,如果pred是(16, 10 ,4), target的形状按道理应该是(16, 10), 但实际却需要 - # TODO (16, 4) + # TODO 需要做一些检查,F.cross_entropy在计算时,如果pred是(16, 10 ,4), target的形状按道理应该是(16, 10), 但实际需要(16,4) super(CrossEntropyLoss, self).__init__() self._init_param_map(pred=pred, target=target) self.padding_idx = padding_idx diff --git a/fastNLP/io/base_loader.py b/fastNLP/io/base_loader.py index 569f7e2e..051de281 100644 --- a/fastNLP/io/base_loader.py +++ b/fastNLP/io/base_loader.py @@ -47,7 +47,6 @@ class BaseLoader(object): class DataLoaderRegister: - # TODO 这个类使用在何处? _readers = {} @classmethod @@ -64,3 +63,5 @@ class DataLoaderRegister: if read_fn_name in cls._readers: return cls._readers[read_fn_name] raise AttributeError('no read function: {}'.format(read_fn_name)) + + # TODO 这个类使用在何处? diff --git a/fastNLP/modules/aggregator/attention.py b/fastNLP/modules/aggregator/attention.py index 233dcb55..cea9c405 100644 --- a/fastNLP/modules/aggregator/attention.py +++ b/fastNLP/modules/aggregator/attention.py @@ -12,7 +12,8 @@ from ..utils import initial_parameter class DotAttention(nn.Module): """ - TODO + .. todo:: + 补上文档 """ def __init__(self, key_size, value_size, dropout=0): super(DotAttention, self).__init__() diff --git a/fastNLP/modules/utils.py b/fastNLP/modules/utils.py index 78851587..047ebb78 100644 --- a/fastNLP/modules/utils.py +++ b/fastNLP/modules/utils.py @@ -70,7 +70,10 @@ def initial_parameter(net, initial_method=None): def get_embeddings(init_embed): """ - 得到词嵌入 TODO + 得到词嵌入 + + .. todo:: + 补上文档 :param init_embed: 单词词典, 可以是 tuple, 包括(num_embedings, embedding_dim), 即 embedding的大小和每个词的维度. 也可以传入 nn.Embedding 对象, From 902a3a6bcd0b2f5667fdab387f9f550d1b8068bc Mon Sep 17 00:00:00 2001 From: ChenXin Date: Tue, 14 May 2019 16:48:32 +0800 Subject: [PATCH 11/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86=E4=B8=80?= =?UTF-8?q?=E4=BA=9B=E6=B3=A8=E9=87=8A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- fastNLP/core/dataset.py | 46 ++++++++++++++++--------------------- fastNLP/core/utils.py | 5 ++-- test/models/model_runner.py | 1 + 3 files changed, 23 insertions(+), 29 deletions(-) diff --git a/fastNLP/core/dataset.py b/fastNLP/core/dataset.py index 7228842f..b506dfae 100644 --- a/fastNLP/core/dataset.py +++ b/fastNLP/core/dataset.py @@ -212,39 +212,33 @@ target和input,这种情况下,fastNLP默认不进行pad。另外,当某个field已经被设置为了target或者input后,之后append的 instance对应的field必须要和前面已有的内容一致,否则会报错。 - 可以查看field的dtype - - Example:: + 可以查看field的dtype:: - from fastNLP import DataSet + from fastNLP import DataSet - d = DataSet({'a': [0, 1, 3], 'b':[[1.0, 2.0], [0.1, 0.2], [3]]}) - d.set_input('a', 'b') - d.a.dtype - >> numpy.int64 - d.b.dtype - >> numpy.float64 - # 默认情况下'a'这个field将被转换为torch.LongTensor,但如果需要其为torch.FloatTensor可以手动修改dtype - d.a.dtype = float # 请确保该field的确可以全部转换为float。 + d = DataSet({'a': [0, 1, 3], 'b':[[1.0, 2.0], [0.1, 0.2], [3]]}) + d.set_input('a', 'b') + d.a.dtype + >> numpy.int64 + d.b.dtype + >> numpy.float64 + # 默认情况下'a'这个field将被转换为torch.LongTensor,但如果需要其为torch.FloatTensor可以手动修改dtype + d.a.dtype = float # 请确保该field的确可以全部转换为float。 如果某个field中出现了多种类型混合(比如一部分为str,一部分为int)的情况,fastNLP无法判断该field的类型,会报如下的 - 错误: - - Example:: + 错误:: - from fastNLP import DataSet - d = DataSet({'data': [1, 'a']}) - d.set_input('data') - >> RuntimeError: Mixed data types in Field data: [, ] - - 可以通过设置以忽略对该field进行类型检查 + from fastNLP import DataSet + d = DataSet({'data': [1, 'a']}) + d.set_input('data') + >> RuntimeError: Mixed data types in Field data: [, ] - Example:: + 可以通过设置以忽略对该field进行类型检查:: - from fastNLP import DataSet - d = DataSet({'data': [1, 'a']}) - d.set_ignore_type('data') - d.set_input('data') + from fastNLP import DataSet + d = DataSet({'data': [1, 'a']}) + d.set_ignore_type('data') + d.set_input('data') 当某个field被设置为忽略type之后,fastNLP将不对其进行pad。 diff --git a/fastNLP/core/utils.py b/fastNLP/core/utils.py index f7539fd7..a9a7ac0c 100644 --- a/fastNLP/core/utils.py +++ b/fastNLP/core/utils.py @@ -35,9 +35,7 @@ def cache_results(_cache_fp, _refresh=False, _verbose=1): """ 别名::class:`fastNLP.cache_results` :class:`fastNLP.core.uitls.cache_results` - cache_results是fastNLP中用于cache数据的装饰器。通过下面的例子看一下如何使用 - - Example:: + cache_results是fastNLP中用于cache数据的装饰器。通过下面的例子看一下如何使用:: import time import numpy as np @@ -607,6 +605,7 @@ def seq_len_to_mask(seq_len): 转变 1-d seq_len到2-d mask. Example:: + >>> seq_len = torch.arange(2, 16) >>> mask = seq_len_to_mask(seq_len) >>> print(mask.size()) diff --git a/test/models/model_runner.py b/test/models/model_runner.py index 3f4e1200..405aa7d6 100644 --- a/test/models/model_runner.py +++ b/test/models/model_runner.py @@ -6,6 +6,7 @@ 此模块的测试仅保证模型能使用fastNLP进行训练和测试,不测试模型实际性能 Example:: + # import 全大写变量... from model_runner import * From 8a7bf582445f35e0f839ef5cd2d63108b5b60fa0 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Tue, 14 May 2019 16:59:18 +0800 Subject: [PATCH 12/19] delete an old metric in test --- test/core/test_metrics.py | 1 - 1 file changed, 1 deletion(-) diff --git a/test/core/test_metrics.py b/test/core/test_metrics.py index db508e39..a5f7c0c3 100644 --- a/test/core/test_metrics.py +++ b/test/core/test_metrics.py @@ -4,7 +4,6 @@ import numpy as np import torch from fastNLP import AccuracyMetric -from fastNLP import BMESF1PreRecMetric from fastNLP.core.metrics import _pred_topk, _accuracy_topk From 6cb75104b80c2ccb0a4d17c1c61e6a4ef7b50bd6 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Tue, 14 May 2019 19:27:02 +0800 Subject: [PATCH 13/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=20tutorials=20?= =?UTF-8?q?=E7=9A=84=E6=B5=8B=E8=AF=95=E6=96=87=E4=BB=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/test_tutorials.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/test_tutorials.py b/test/test_tutorials.py index 8c0e37bf..4b1889d4 100644 --- a/test/test_tutorials.py +++ b/test/test_tutorials.py @@ -10,7 +10,7 @@ from fastNLP.core.metrics import AccuracyMetric class TestTutorial(unittest.TestCase): def test_fastnlp_10min_tutorial(self): # 从csv读取数据到DataSet - sample_path = "tutorials/sample_data/tutorial_sample_dataset.csv" + sample_path = "data_for_tests/tutorial_sample_dataset.csv" dataset = DataSet.read_csv(sample_path, headers=('raw_sentence', 'label'), sep='\t') print(len(dataset)) @@ -76,9 +76,7 @@ class TestTutorial(unittest.TestCase): from copy import deepcopy # 更改DataSet中对应field的名称,要以模型的forward等参数名一致 - train_data.rename_field('words', 'word_seq') # input field 与 forward 参数一致 train_data.rename_field('label', 'label_seq') - test_data.rename_field('words', 'word_seq') test_data.rename_field('label', 'label_seq') loss = CrossEntropyLoss(pred="output", target="label_seq") From 1c9a0b5875bd48e7b6ad3fec268a8f34772e7245 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Tue, 14 May 2019 20:59:52 +0800 Subject: [PATCH 14/19] =?UTF-8?q?=E6=8A=8A=E6=9A=82=E4=B8=8D=E5=8F=91?= =?UTF-8?q?=E5=B8=83=E7=9A=84=E5=8A=9F=E8=83=BD=E7=A7=BB=E5=88=B0=20legacy?= =?UTF-8?q?=20=E6=96=87=E4=BB=B6=E5=A4=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- fastNLP/core/callback.py | 6 ++++-- fastNLP/models/enas_trainer.py | 4 ++-- {fastNLP => legacy}/api/README.md | 0 {fastNLP => legacy}/api/__init__.py | 0 {fastNLP => legacy}/api/api.py | 8 ++++---- {fastNLP => legacy}/api/converter.py | 0 {fastNLP => legacy}/api/examples.py | 0 {fastNLP => legacy}/api/pipeline.py | 0 {fastNLP => legacy}/api/processor.py | 15 ++++++--------- {fastNLP => legacy}/api/utils.py | 2 +- {fastNLP => legacy}/automl/__init__.py | 0 {fastNLP => legacy}/automl/enas_controller.py | 0 {fastNLP => legacy}/automl/enas_model.py | 0 {fastNLP => legacy}/automl/enas_trainer.py | 12 ++++++------ {fastNLP => legacy}/automl/enas_utils.py | 0 {fastNLP => legacy}/component/__init__.py | 0 {fastNLP => legacy}/component/bert_tokenizer.py | 0 {test => legacy/test}/api/test_pipeline.py | 0 {test => legacy/test}/api/test_processor.py | 0 {test => legacy/test}/automl/test_enas.py | 0 20 files changed, 23 insertions(+), 24 deletions(-) rename {fastNLP => legacy}/api/README.md (100%) rename {fastNLP => legacy}/api/__init__.py (100%) rename {fastNLP => legacy}/api/api.py (98%) rename {fastNLP => legacy}/api/converter.py (100%) rename {fastNLP => legacy}/api/examples.py (100%) rename {fastNLP => legacy}/api/pipeline.py (100%) rename {fastNLP => legacy}/api/processor.py (98%) rename {fastNLP => legacy}/api/utils.py (98%) rename {fastNLP => legacy}/automl/__init__.py (100%) rename {fastNLP => legacy}/automl/enas_controller.py (100%) rename {fastNLP => legacy}/automl/enas_model.py (100%) rename {fastNLP => legacy}/automl/enas_trainer.py (98%) rename {fastNLP => legacy}/automl/enas_utils.py (100%) rename {fastNLP => legacy}/component/__init__.py (100%) rename {fastNLP => legacy}/component/bert_tokenizer.py (100%) rename {test => legacy/test}/api/test_pipeline.py (100%) rename {test => legacy/test}/api/test_processor.py (100%) rename {test => legacy/test}/automl/test_enas.py (100%) diff --git a/fastNLP/core/callback.py b/fastNLP/core/callback.py index 76a9e2e2..c944ec96 100644 --- a/fastNLP/core/callback.py +++ b/fastNLP/core/callback.py @@ -66,8 +66,9 @@ from ..io.model_io import ModelSaver, ModelLoader try: from tensorboardX import SummaryWriter + tensorboardX_flag = True except: - pass + tensorboardX_flag = False class Callback(object): @@ -581,7 +582,8 @@ class TensorboardCallback(Callback): path = os.path.join("./", 'tensorboard_logs_{}'.format(self.trainer.start_time)) else: path = os.path.join(save_dir, 'tensorboard_logs_{}'.format(self.trainer.start_time)) - self._summary_writer = SummaryWriter(path) + if tensorboardX_flag: + self._summary_writer = SummaryWriter(path) def on_batch_begin(self, batch_x, batch_y, indices): if "model" in self.options and self.graph_added is False: diff --git a/fastNLP/models/enas_trainer.py b/fastNLP/models/enas_trainer.py index cce93556..9cd7d8d0 100644 --- a/fastNLP/models/enas_trainer.py +++ b/fastNLP/models/enas_trainer.py @@ -78,7 +78,7 @@ class ENASTrainer(Trainer): results['seconds'] = 0. return results try: - if torch.cuda.is_available() and self.use_cuda: + if torch.cuda.is_available() and "cuda" in self.device: self.model = self.model.cuda() self._model_device = self.model.parameters().__next__().device self._mode(self.model, is_test=False) @@ -337,7 +337,7 @@ class ENASTrainer(Trainer): # policy loss loss = -log_probs*utils.get_variable(adv, - self.use_cuda, + 'cuda' in self.device, requires_grad=False) loss = loss.sum() # or loss.mean() diff --git a/fastNLP/api/README.md b/legacy/api/README.md similarity index 100% rename from fastNLP/api/README.md rename to legacy/api/README.md diff --git a/fastNLP/api/__init__.py b/legacy/api/__init__.py similarity index 100% rename from fastNLP/api/__init__.py rename to legacy/api/__init__.py diff --git a/fastNLP/api/api.py b/legacy/api/api.py similarity index 98% rename from fastNLP/api/api.py rename to legacy/api/api.py index 2e7cbfcf..0b5d3cd3 100644 --- a/fastNLP/api/api.py +++ b/legacy/api/api.py @@ -5,13 +5,13 @@ import torch warnings.filterwarnings('ignore') import os -from ..core.dataset import DataSet +from fastNLP.core.dataset import DataSet from .utils import load_url from .processor import ModelProcessor -from ..io.dataset_loader import _cut_long_sentence, ConllLoader -from ..core.instance import Instance +from fastNLP.io.dataset_loader import _cut_long_sentence, ConllLoader +from fastNLP.core.instance import Instance from ..api.pipeline import Pipeline -from ..core.metrics import SpanFPreRecMetric +from fastNLP.core.metrics import SpanFPreRecMetric from .processor import IndexerProcessor # TODO add pretrain urls diff --git a/fastNLP/api/converter.py b/legacy/api/converter.py similarity index 100% rename from fastNLP/api/converter.py rename to legacy/api/converter.py diff --git a/fastNLP/api/examples.py b/legacy/api/examples.py similarity index 100% rename from fastNLP/api/examples.py rename to legacy/api/examples.py diff --git a/fastNLP/api/pipeline.py b/legacy/api/pipeline.py similarity index 100% rename from fastNLP/api/pipeline.py rename to legacy/api/pipeline.py diff --git a/fastNLP/api/processor.py b/legacy/api/processor.py similarity index 98% rename from fastNLP/api/processor.py rename to legacy/api/processor.py index 3c60e621..4c442ed2 100644 --- a/fastNLP/api/processor.py +++ b/legacy/api/processor.py @@ -3,10 +3,10 @@ from collections import defaultdict import torch -from ..core.batch import Batch -from ..core.dataset import DataSet -from ..core.sampler import SequentialSampler -from ..core.vocabulary import Vocabulary +from fastNLP.core.batch import Batch +from fastNLP.core.dataset import DataSet +from fastNLP.core.sampler import SequentialSampler +from fastNLP.core.vocabulary import Vocabulary class Processor(object): @@ -232,7 +232,7 @@ class SeqLenProcessor(Processor): return dataset -from ..core.utils import _build_args +from fastNLP.core.utils import _build_args class ModelProcessor(Processor): @@ -257,10 +257,7 @@ class ModelProcessor(Processor): data_iterator = Batch(dataset, batch_size=self.batch_size, sampler=SequentialSampler()) batch_output = defaultdict(list) - if hasattr(self.model, "predict"): - predict_func = self.model.predict - else: - predict_func = self.model.forward + predict_func = self.model.forward with torch.no_grad(): for batch_x, _ in data_iterator: refined_batch_x = _build_args(predict_func, **batch_x) diff --git a/fastNLP/api/utils.py b/legacy/api/utils.py similarity index 98% rename from fastNLP/api/utils.py rename to legacy/api/utils.py index e8e7c42a..184e5fe6 100644 --- a/fastNLP/api/utils.py +++ b/legacy/api/utils.py @@ -22,7 +22,7 @@ except ImportError: try: from tqdm.auto import tqdm except: - from ..core.utils import _pseudo_tqdm as tqdm + from fastNLP.core.utils import _pseudo_tqdm as tqdm # matches bfd8deac from resnet18-bfd8deac.pth HASH_REGEX = re.compile(r'-([a-f0-9]*)\.') diff --git a/fastNLP/automl/__init__.py b/legacy/automl/__init__.py similarity index 100% rename from fastNLP/automl/__init__.py rename to legacy/automl/__init__.py diff --git a/fastNLP/automl/enas_controller.py b/legacy/automl/enas_controller.py similarity index 100% rename from fastNLP/automl/enas_controller.py rename to legacy/automl/enas_controller.py diff --git a/fastNLP/automl/enas_model.py b/legacy/automl/enas_model.py similarity index 100% rename from fastNLP/automl/enas_model.py rename to legacy/automl/enas_model.py diff --git a/fastNLP/automl/enas_trainer.py b/legacy/automl/enas_trainer.py similarity index 98% rename from fastNLP/automl/enas_trainer.py rename to legacy/automl/enas_trainer.py index 8f51c2cd..e3524aa9 100644 --- a/fastNLP/automl/enas_trainer.py +++ b/legacy/automl/enas_trainer.py @@ -11,15 +11,15 @@ import torch try: from tqdm.auto import tqdm except: - from ..core.utils import _pseudo_tqdm as tqdm + from fastNLP.core.utils import _pseudo_tqdm as tqdm -from ..core.batch import Batch -from ..core.callback import CallbackException -from ..core.dataset import DataSet -from ..core.utils import _move_dict_value_to_device +from fastNLP.core.batch import Batch +from fastNLP.core.callback import CallbackException +from fastNLP.core.dataset import DataSet +from fastNLP.core.utils import _move_dict_value_to_device import fastNLP from . import enas_utils as utils -from ..core.utils import _build_args +from fastNLP.core.utils import _build_args from torch.optim import Adam diff --git a/fastNLP/automl/enas_utils.py b/legacy/automl/enas_utils.py similarity index 100% rename from fastNLP/automl/enas_utils.py rename to legacy/automl/enas_utils.py diff --git a/fastNLP/component/__init__.py b/legacy/component/__init__.py similarity index 100% rename from fastNLP/component/__init__.py rename to legacy/component/__init__.py diff --git a/fastNLP/component/bert_tokenizer.py b/legacy/component/bert_tokenizer.py similarity index 100% rename from fastNLP/component/bert_tokenizer.py rename to legacy/component/bert_tokenizer.py diff --git a/test/api/test_pipeline.py b/legacy/test/api/test_pipeline.py similarity index 100% rename from test/api/test_pipeline.py rename to legacy/test/api/test_pipeline.py diff --git a/test/api/test_processor.py b/legacy/test/api/test_processor.py similarity index 100% rename from test/api/test_processor.py rename to legacy/test/api/test_processor.py diff --git a/test/automl/test_enas.py b/legacy/test/automl/test_enas.py similarity index 100% rename from test/automl/test_enas.py rename to legacy/test/automl/test_enas.py From 14fe885ecfa522b003b7439f31b294c4ec7ceb7d Mon Sep 17 00:00:00 2001 From: ChenXin Date: Tue, 14 May 2019 22:05:52 +0800 Subject: [PATCH 15/19] =?UTF-8?q?=E5=88=A0=E9=99=A4=E4=BA=86=E4=B8=8D?= =?UTF-8?q?=E8=83=BD=E8=BF=90=E8=A1=8C=E7=9A=84=E6=B5=8B=E8=AF=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- legacy/test/api/test_pipeline.py | 6 -- legacy/test/api/test_processor.py | 101 --------------------------- legacy/test/automl/test_enas.py | 111 ------------------------------ 3 files changed, 218 deletions(-) delete mode 100644 legacy/test/api/test_pipeline.py delete mode 100644 legacy/test/api/test_processor.py delete mode 100644 legacy/test/automl/test_enas.py diff --git a/legacy/test/api/test_pipeline.py b/legacy/test/api/test_pipeline.py deleted file mode 100644 index c7094790..00000000 --- a/legacy/test/api/test_pipeline.py +++ /dev/null @@ -1,6 +0,0 @@ -import unittest - - -class TestPipeline(unittest.TestCase): - def test_case(self): - pass diff --git a/legacy/test/api/test_processor.py b/legacy/test/api/test_processor.py deleted file mode 100644 index 9611e458..00000000 --- a/legacy/test/api/test_processor.py +++ /dev/null @@ -1,101 +0,0 @@ -import random -import unittest - -import numpy as np - -from fastNLP import Vocabulary, Instance -from fastNLP.api.processor import FullSpaceToHalfSpaceProcessor, PreAppendProcessor, SliceProcessor, Num2TagProcessor, \ - IndexerProcessor, VocabProcessor, SeqLenProcessor, ModelProcessor, Index2WordProcessor, SetTargetProcessor, \ - SetInputProcessor, VocabIndexerProcessor -from fastNLP.core.dataset import DataSet - - -class TestProcessor(unittest.TestCase): - def test_FullSpaceToHalfSpaceProcessor(self): - ds = DataSet({"word": ["00, u1, u), (u2, u2"]}) - proc = FullSpaceToHalfSpaceProcessor("word") - ds = proc(ds) - self.assertEqual(ds.field_arrays["word"].content, ["00, u1, u), (u2, u2"]) - - def test_PreAppendProcessor(self): - ds = DataSet({"word": [["1234", "3456"], ["8789", "3464"]]}) - proc = PreAppendProcessor(data="abc", field_name="word") - ds = proc(ds) - self.assertEqual(ds.field_arrays["word"].content, [["abc", "1234", "3456"], ["abc", "8789", "3464"]]) - - def test_SliceProcessor(self): - ds = DataSet({"xx": [[random.randint(0, 10) for _ in range(30)]] * 40}) - proc = SliceProcessor(10, 20, 2, "xx", new_added_field_name="yy") - ds = proc(ds) - self.assertEqual(len(ds.field_arrays["yy"].content[0]), 5) - - def test_Num2TagProcessor(self): - ds = DataSet({"num": [["99.9982", "2134.0"], ["0.002", "234"]]}) - proc = Num2TagProcessor("", "num") - ds = proc(ds) - for data in ds.field_arrays["num"].content: - for d in data: - self.assertEqual(d, "") - - def test_VocabProcessor_and_IndexerProcessor(self): - ds = DataSet({"xx": [[str(random.randint(0, 10)) for _ in range(30)]] * 40}) - vocab_proc = VocabProcessor("xx") - vocab_proc(ds) - vocab = vocab_proc.vocab - self.assertTrue(isinstance(vocab, Vocabulary)) - self.assertTrue(len(vocab) > 5) - - proc = IndexerProcessor(vocab, "xx", "yy") - ds = proc(ds) - for data in ds.field_arrays["yy"].content[0]: - self.assertTrue(isinstance(data, int)) - - def test_SeqLenProcessor(self): - ds = DataSet({"xx": [[str(random.randint(0, 10)) for _ in range(30)]] * 10}) - proc = SeqLenProcessor("xx", "len") - ds = proc(ds) - for data in ds.field_arrays["len"].content: - self.assertEqual(data, 30) - - def test_ModelProcessor(self): - from fastNLP.models.cnn_text_classification import CNNText - model = CNNText((100, 100), 5) - ins_list = [] - for _ in range(64): - seq_len = np.random.randint(5, 30) - ins_list.append(Instance(word_seq=[np.random.randint(0, 100) for _ in range(seq_len)], seq_lens=seq_len)) - data_set = DataSet(ins_list) - data_set.set_input("word_seq", "seq_lens") - proc = ModelProcessor(model) - data_set = proc(data_set) - self.assertTrue("pred" in data_set) - - def test_Index2WordProcessor(self): - vocab = Vocabulary() - vocab.add_word_lst(["a", "b", "c", "d", "e"]) - proc = Index2WordProcessor(vocab, "tag_id", "tag") - data_set = DataSet([Instance(tag_id=[np.random.randint(0, 7) for _ in range(32)])]) - data_set = proc(data_set) - self.assertTrue("tag" in data_set) - - def test_SetTargetProcessor(self): - proc = SetTargetProcessor("a", "b", "c") - data_set = DataSet({"a": [1, 2, 3], "b": [1, 2, 3], "c": [1, 2, 3]}) - data_set = proc(data_set) - self.assertTrue(data_set["a"].is_target) - self.assertTrue(data_set["b"].is_target) - self.assertTrue(data_set["c"].is_target) - - def test_SetInputProcessor(self): - proc = SetInputProcessor("a", "b", "c") - data_set = DataSet({"a": [1, 2, 3], "b": [1, 2, 3], "c": [1, 2, 3]}) - data_set = proc(data_set) - self.assertTrue(data_set["a"].is_input) - self.assertTrue(data_set["b"].is_input) - self.assertTrue(data_set["c"].is_input) - - def test_VocabIndexerProcessor(self): - proc = VocabIndexerProcessor("word_seq", "word_ids") - data_set = DataSet([Instance(word_seq=["a", "b", "c", "d", "e"])]) - data_set = proc(data_set) - self.assertTrue("word_ids" in data_set) diff --git a/legacy/test/automl/test_enas.py b/legacy/test/automl/test_enas.py deleted file mode 100644 index 4fea1063..00000000 --- a/legacy/test/automl/test_enas.py +++ /dev/null @@ -1,111 +0,0 @@ -import unittest - -from fastNLP import DataSet -from fastNLP import Instance -from fastNLP import Vocabulary -from fastNLP.core.losses import CrossEntropyLoss -from fastNLP.core.metrics import AccuracyMetric - - -class TestENAS(unittest.TestCase): - def testENAS(self): - # 从csv读取数据到DataSet - sample_path = "tutorials/sample_data/tutorial_sample_dataset.csv" - dataset = DataSet.read_csv(sample_path, headers=('raw_sentence', 'label'), - sep='\t') - print(len(dataset)) - print(dataset[0]) - print(dataset[-3]) - - dataset.append(Instance(raw_sentence='fake data', label='0')) - # 将所有数字转为小写 - dataset.apply(lambda x: x['raw_sentence'].lower(), new_field_name='raw_sentence') - # label转int - dataset.apply(lambda x: int(x['label']), new_field_name='label') - - # 使用空格分割句子 - def split_sent(ins): - return ins['raw_sentence'].split() - - dataset.apply(split_sent, new_field_name='words') - - # 增加长度信息 - dataset.apply(lambda x: len(x['words']), new_field_name='seq_len') - print(len(dataset)) - print(dataset[0]) - - # DataSet.drop(func)筛除数据 - dataset.drop(lambda x: x['seq_len'] <= 3, inplace=True) - print(len(dataset)) - - # 设置DataSet中,哪些field要转为tensor - # set target,loss或evaluate中的golden,计算loss,模型评估时使用 - dataset.set_target("label") - # set input,模型forward时使用 - dataset.set_input("words", "seq_len") - - # 分出测试集、训练集 - test_data, train_data = dataset.split(0.5) - print(len(test_data)) - print(len(train_data)) - - # 构建词表, Vocabulary.add(word) - vocab = Vocabulary(min_freq=2) - train_data.apply(lambda x: [vocab.add(word) for word in x['words']]) - vocab.build_vocab() - - # index句子, Vocabulary.to_index(word) - train_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='words') - test_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='words') - print(test_data[0]) - - # 如果你们需要做强化学习或者GAN之类的项目,你们也可以使用这些数据预处理的工具 - from fastNLP.core.batch import Batch - from fastNLP.core.sampler import RandomSampler - - batch_iterator = Batch(dataset=train_data, batch_size=2, sampler=RandomSampler()) - for batch_x, batch_y in batch_iterator: - print("batch_x has: ", batch_x) - print("batch_y has: ", batch_y) - break - - from fastNLP.automl.enas_model import ENASModel - from fastNLP.automl.enas_controller import Controller - model = ENASModel(embed_num=len(vocab), num_classes=5) - controller = Controller() - - from fastNLP.automl.enas_trainer import ENASTrainer - - # 更改DataSet中对应field的名称,要以模型的forward等参数名一致 - train_data.rename_field('words', 'word_seq') # input field 与 forward 参数一致 - train_data.rename_field('label', 'label_seq') - test_data.rename_field('words', 'word_seq') - test_data.rename_field('label', 'label_seq') - - loss = CrossEntropyLoss(pred="output", target="label_seq") - metric = AccuracyMetric(pred="predict", target="label_seq") - - trainer = ENASTrainer(model=model, controller=controller, train_data=train_data, dev_data=test_data, - loss=CrossEntropyLoss(pred="output", target="label_seq"), - metrics=AccuracyMetric(pred="predict", target="label_seq"), - check_code_level=-1, - save_path=None, - batch_size=32, - print_every=1, - n_epochs=3, - final_epochs=1) - trainer.train() - print('Train finished!') - - # 调用Tester在test_data上评价效果 - from fastNLP import Tester - - tester = Tester(data=test_data, model=model, metrics=AccuracyMetric(pred="predict", target="label_seq"), - batch_size=4) - - acc = tester.test() - print(acc) - - -if __name__ == '__main__': - unittest.main() \ No newline at end of file From 38c2ef7d74a54fbcaafa2e6a18cce8299a13d13d Mon Sep 17 00:00:00 2001 From: ChenXin Date: Tue, 14 May 2019 22:49:49 +0800 Subject: [PATCH 16/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=20callback=20=E7=9A=84?= =?UTF-8?q?=E6=B5=8B=E8=AF=95=E6=96=87=E4=BB=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- fastNLP/core/callback.py | 15 +- test/core/test_dataset.py | 2 +- test/test_tutorials.py | 14 +- .../advance_tutorial.ipynb | 235 +++++++----------- 4 files changed, 100 insertions(+), 166 deletions(-) diff --git a/fastNLP/core/callback.py b/fastNLP/core/callback.py index c944ec96..f337975a 100644 --- a/fastNLP/core/callback.py +++ b/fastNLP/core/callback.py @@ -584,7 +584,9 @@ class TensorboardCallback(Callback): path = os.path.join(save_dir, 'tensorboard_logs_{}'.format(self.trainer.start_time)) if tensorboardX_flag: self._summary_writer = SummaryWriter(path) - + else: + self._summary_writer = None + def on_batch_begin(self, batch_x, batch_y, indices): if "model" in self.options and self.graph_added is False: # tesorboardX 这里有大bug,暂时没法画模型图 @@ -596,10 +598,10 @@ class TensorboardCallback(Callback): self.graph_added = True def on_backward_begin(self, loss): - if "loss" in self.options: + if "loss" in self.options and self._summary_writer: self._summary_writer.add_scalar("loss", loss.item(), global_step=self.trainer.step) - if "model" in self.options: + if "model" in self.options and self._summary_writer: for name, param in self.trainer.model.named_parameters(): if param.requires_grad: self._summary_writer.add_scalar(name + "_mean", param.mean(), global_step=self.trainer.step) @@ -608,15 +610,16 @@ class TensorboardCallback(Callback): global_step=self.trainer.step) def on_valid_end(self, eval_result, metric_key, optimizer, is_better_eval): - if "metric" in self.options: + if "metric" in self.options and self._summary_writer: for name, metric in eval_result.items(): for metric_key, metric_val in metric.items(): self._summary_writer.add_scalar("valid_{}_{}".format(name, metric_key), metric_val, global_step=self.trainer.step) def on_train_end(self): - self._summary_writer.close() - del self._summary_writer + if self._summary_writer: + self._summary_writer.close() + del self._summary_writer def on_exception(self, exception): if hasattr(self, "_summary_writer"): diff --git a/test/core/test_dataset.py b/test/core/test_dataset.py index 69548e73..0228f207 100644 --- a/test/core/test_dataset.py +++ b/test/core/test_dataset.py @@ -172,7 +172,7 @@ class TestDataSetMethods(unittest.TestCase): def split_sent(ins): return ins['raw_sentence'].split() csv_loader = CSVLoader(headers=['raw_sentence', 'label'],sep='\t') - dataset = csv_loader.load('../data_for_tests/tutorial_sample_dataset.csv') + dataset = csv_loader.load('test/data_for_tests/tutorial_sample_dataset.csv') dataset.drop(lambda x: len(x['raw_sentence'].split()) == 0, inplace=True) dataset.apply(split_sent, new_field_name='words', is_input=True) # print(dataset) diff --git a/test/test_tutorials.py b/test/test_tutorials.py index 4b1889d4..255b391e 100644 --- a/test/test_tutorials.py +++ b/test/test_tutorials.py @@ -10,7 +10,7 @@ from fastNLP.core.metrics import AccuracyMetric class TestTutorial(unittest.TestCase): def test_fastnlp_10min_tutorial(self): # 从csv读取数据到DataSet - sample_path = "data_for_tests/tutorial_sample_dataset.csv" + sample_path = "test/data_for_tests/tutorial_sample_dataset.csv" dataset = DataSet.read_csv(sample_path, headers=('raw_sentence', 'label'), sep='\t') print(len(dataset)) @@ -113,14 +113,14 @@ class TestTutorial(unittest.TestCase): def test_fastnlp_1min_tutorial(self): # tutorials/fastnlp_1min_tutorial.ipynb - data_path = "tutorials/sample_data/tutorial_sample_dataset.csv" + data_path = "test/data_for_tests/tutorial_sample_dataset.csv" ds = DataSet.read_csv(data_path, headers=('raw_sentence', 'label'), sep='\t') print(ds[1]) # 将所有数字转为小写 ds.apply(lambda x: x['raw_sentence'].lower(), new_field_name='raw_sentence') # label转int - ds.apply(lambda x: int(x['label']), new_field_name='label_seq', is_target=True) + ds.apply(lambda x: int(x['label']), new_field_name='target', is_target=True) def split_sent(ins): return ins['raw_sentence'].split() @@ -137,9 +137,9 @@ class TestTutorial(unittest.TestCase): train_data.apply(lambda x: [vocab.add(word) for word in x['words']]) # index句子, Vocabulary.to_index(word) - train_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='word_seq', + train_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='words', is_input=True) - dev_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='word_seq', + dev_data.apply(lambda x: [vocab.to_index(word) for word in x['words']], new_field_name='words', is_input=True) from fastNLP.models import CNNText @@ -152,14 +152,14 @@ class TestTutorial(unittest.TestCase): dev_data=dev_data, loss=CrossEntropyLoss(), optimizer= Adam(), - metrics=AccuracyMetric(target='label_seq') + metrics=AccuracyMetric(target='target') ) trainer.train() print('Train finished!') def test_fastnlp_advanced_tutorial(self): import os - os.chdir("tutorials/fastnlp_advanced_tutorial") + os.chdir("test/tutorials/fastnlp_advanced_tutorial") from fastNLP import DataSet from fastNLP import Instance diff --git a/tutorials/fastnlp_advanced_tutorial/advance_tutorial.ipynb b/tutorials/fastnlp_advanced_tutorial/advance_tutorial.ipynb index 64eb3462..7e487933 100644 --- a/tutorials/fastnlp_advanced_tutorial/advance_tutorial.ipynb +++ b/tutorials/fastnlp_advanced_tutorial/advance_tutorial.ipynb @@ -170,11 +170,11 @@ { "data": { "text/plain": [ - "DataSet({'image': tensor([[ 4.7106e-01, -1.2246e+00, 3.1234e-01, -1.6781e+00, -8.7967e-01],\n", - " [ 1.1454e+00, 1.2236e-01, 3.0258e-01, -1.5454e+00, 8.9201e-01],\n", - " [-5.7143e-03, 3.9488e-01, 2.0287e-01, -1.5726e+00, 9.3171e-01],\n", - " [ 6.8914e-01, -2.6302e-01, -8.2694e-01, 9.5942e-01, -5.2589e-01],\n", - " [-5.7798e-03, -9.1621e-03, 1.0077e-03, 9.1716e-02, 1.0565e+00]]) type=torch.Tensor,\n", + "DataSet({'image': tensor([[ 0.3582, -1.0358, 1.4785, -1.5288, -0.9982],\n", + " [-0.3973, -0.4294, 0.9215, -1.9631, -1.6556],\n", + " [ 0.3313, -1.7714, 0.8729, 0.6976, -1.3172],\n", + " [-0.6403, 0.5023, -0.9919, 1.1178, -0.3710],\n", + " [-0.3692, 1.8631, -1.3646, -0.7290, -1.0774]]) type=torch.Tensor,\n", "'label': 0 type=int})" ] }, @@ -524,7 +524,11 @@ "outputs": [], "source": [ "# 设定特征域、标签域\n", - "data_set.set_input(\"premise\", \"premise_len\", \"hypothesis\", \"hypothesis_len\")\n", + "data_set.rename_field(\"premise\",\"words1\")\n", + "data_set.rename_field(\"premise_len\",\"seq_len1\")\n", + "data_set.rename_field(\"hypothesis\",\"words2\")\n", + "data_set.rename_field(\"hypothesis_len\",\"seq_len2\")\n", + "data_set.set_input(\"words1\", \"seq_len1\", \"words2\", \"seq_len2\")\n", "data_set.set_target(\"truth\")" ] }, @@ -536,10 +540,10 @@ { "data": { "text/plain": [ - "{'premise': ['a', 'woman', 'is', 'walking', 'across', 'the', 'street', 'eating', 'a', 'banana', ',', 'while', 'a', 'man', 'is', 'following', 'with', 'his', 'briefcase', '.'] type=list,\n", - "'hypothesis': ['a', 'woman', 'eating', 'a', 'banana', 'crosses', 'a', 'street'] type=list,\n", - "'premise_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - "'hypothesis_len': [1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", + "{'words1': ['a', 'woman', 'is', 'walking', 'across', 'the', 'street', 'eating', 'a', 'banana', ',', 'while', 'a', 'man', 'is', 'following', 'with', 'his', 'briefcase', '.'] type=list,\n", + "'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", + "'words2': ['a', 'woman', 'eating', 'a', 'banana', 'crosses', 'a', 'street'] type=list,\n", + "'seq_len2': [1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", "'label': 0 type=int}" ] }, @@ -613,7 +617,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ @@ -622,49 +626,49 @@ "vocab = Vocabulary(max_size=10000, min_freq=2, unknown='', padding='')\n", "\n", "# 构建词表\n", - "train_data.apply(lambda x: [vocab.add(word) for word in x['premise']])\n", - "train_data.apply(lambda x: [vocab.add(word) for word in x['hypothesis']])\n", + "train_data.apply(lambda x: [vocab.add(word) for word in x['words1']])\n", + "train_data.apply(lambda x: [vocab.add(word) for word in x['words2']])\n", "vocab.build_vocab()" ] }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 24, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "({'premise': [2, 10, 9, 2, 15, 115, 6, 11, 5, 132, 17, 2, 76, 9, 77, 55, 3] type=list,\n", - " 'hypothesis': [1, 2, 56, 17, 1, 4, 13, 49, 123, 12, 6, 11, 3] type=list,\n", - " 'premise_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'hypothesis_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'label': 0 type=int},\n", - " {'premise': [50, 124, 10, 7, 68, 91, 92, 38, 2, 55, 3] type=list,\n", - " 'hypothesis': [21, 10, 5, 2, 55, 7, 99, 64, 48, 1, 22, 1, 3] type=list,\n", - " 'premise_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'hypothesis_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", + "({'words1': [2, 9, 4, 2, 75, 85, 7, 86, 76, 77, 87, 88, 89, 2, 90, 3] type=list,\n", + " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", + " 'words2': [18, 9, 10, 1, 3] type=list,\n", + " 'seq_len2': [1, 1, 1, 1, 1] type=list,\n", " 'label': 1 type=int},\n", - " {'premise': [13, 24, 4, 14, 29, 5, 25, 4, 8, 39, 9, 14, 34, 4, 40, 41, 4, 16, 12, 2, 11, 4, 30, 28, 2, 42, 8, 2, 43, 44, 17, 2, 45, 35, 26, 31, 27, 5, 6, 32, 3] type=list,\n", - " 'hypothesis': [37, 49, 123, 30, 28, 2, 55, 12, 2, 11, 3] type=list,\n", - " 'premise_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'hypothesis_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'label': 0 type=int})" + " {'words1': [22, 32, 5, 110, 81, 111, 112, 5, 82, 3] type=list,\n", + " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", + " 'words2': [64, 32, 82, 133, 84, 3] type=list,\n", + " 'seq_len2': [1, 1, 1, 1, 1, 1] type=list,\n", + " 'label': 0 type=int},\n", + " {'words1': [2, 9, 97, 1, 20, 7, 54, 5, 1, 1, 70, 2, 11, 110, 2, 62, 3] type=list,\n", + " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", + " 'words2': [23, 1, 58, 10, 12, 1, 70, 133, 84, 3] type=list,\n", + " 'seq_len2': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", + " 'label': 1 type=int})" ] }, - "execution_count": 23, + "execution_count": 24, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# 根据词表index句子\n", - "train_data.apply(lambda x: [vocab.to_index(word) for word in x['premise']], new_field_name='premise')\n", - "train_data.apply(lambda x: [vocab.to_index(word) for word in x['hypothesis']], new_field_name='hypothesis')\n", - "dev_data.apply(lambda x: [vocab.to_index(word) for word in x['premise']], new_field_name='premise')\n", - "dev_data.apply(lambda x: [vocab.to_index(word) for word in x['hypothesis']], new_field_name='hypothesis')\n", - "test_data.apply(lambda x: [vocab.to_index(word) for word in x['premise']], new_field_name='premise')\n", - "test_data.apply(lambda x: [vocab.to_index(word) for word in x['hypothesis']], new_field_name='hypothesis')\n", + "train_data.apply(lambda x: [vocab.to_index(word) for word in x['words1']], new_field_name='words1')\n", + "train_data.apply(lambda x: [vocab.to_index(word) for word in x['words2']], new_field_name='words2')\n", + "dev_data.apply(lambda x: [vocab.to_index(word) for word in x['words1']], new_field_name='words1')\n", + "dev_data.apply(lambda x: [vocab.to_index(word) for word in x['words2']], new_field_name='words2')\n", + "test_data.apply(lambda x: [vocab.to_index(word) for word in x['words1']], new_field_name='words1')\n", + "test_data.apply(lambda x: [vocab.to_index(word) for word in x['words2']], new_field_name='words2')\n", "train_data[-1], dev_data[-1], test_data[-1]" ] }, @@ -679,7 +683,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 25, "metadata": {}, "outputs": [], "source": [ @@ -703,35 +707,35 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 26, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "({'premise': [1037, 2158, 1998, 1037, 2450, 2892, 1996, 2395, 1999, 2392, 1997, 1037, 10733, 1998, 100, 4825, 1012] type=list,\n", - " 'hypothesis': [100, 1037, 3232, 1997, 7884, 1010, 2048, 2111, 3328, 2408, 1996, 2395, 1012] type=list,\n", - " 'premise_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'hypothesis_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'label': 0 type=int},\n", - " {'premise': [2019, 3080, 2158, 2003, 5948, 4589, 10869, 2012, 1037, 4825, 1012] type=list,\n", - " 'hypothesis': [100, 2158, 1999, 1037, 4825, 2003, 3403, 2005, 2010, 7954, 2000, 7180, 1012] type=list,\n", - " 'premise_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'hypothesis_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'label': 1 type=int})" + "({'words1': [1037, 2450, 1999, 1037, 2665, 6598, 1998, 7415, 2058, 2014, 2132, 2559, 2875, 1037, 3028, 1012] type=list,\n", + " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", + " 'words2': [100, 2450, 2003, 3147, 1012] type=list,\n", + " 'seq_len2': [1, 1, 1, 1, 1] type=list,\n", + " 'label': 1 type=int},\n", + " {'words1': [2048, 2308, 1010, 3173, 2833, 100, 16143, 1010, 8549, 1012] type=list,\n", + " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", + " 'words2': [100, 2308, 8549, 2169, 2060, 1012] type=list,\n", + " 'seq_len2': [1, 1, 1, 1, 1, 1] type=list,\n", + " 'label': 0 type=int})" ] }, - "execution_count": 25, + "execution_count": 26, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# 根据词表index句子\n", - "train_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['premise']], new_field_name='premise')\n", - "train_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['hypothesis']], new_field_name='hypothesis')\n", - "dev_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['premise']], new_field_name='premise')\n", - "dev_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['hypothesis']], new_field_name='hypothesis')\n", + "train_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['words1']], new_field_name='words1')\n", + "train_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['words2']], new_field_name='words2')\n", + "dev_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['words1']], new_field_name='words1')\n", + "dev_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['words2']], new_field_name='words2')\n", "train_data_2[-1], dev_data_2[-1]" ] }, @@ -747,7 +751,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 27, "metadata": {}, "outputs": [ { @@ -760,10 +764,10 @@ " 'num_classes': 3,\n", " 'gpu': True,\n", " 'batch_size': 32,\n", - " 'vocab_size': 156}" + " 'vocab_size': 143}" ] }, - "execution_count": 26, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } @@ -779,7 +783,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 28, "metadata": {}, "outputs": [ { @@ -788,21 +792,17 @@ "ESIM(\n", " (drop): Dropout(p=0.3)\n", " (embedding): Embedding(\n", - " (embed): Embedding(156, 300, padding_idx=0)\n", + " 143, 300\n", " (dropout): Dropout(p=0.3)\n", " )\n", - " (embedding_layer): Linear(\n", - " (linear): Linear(in_features=300, out_features=300, bias=True)\n", - " )\n", + " (embedding_layer): Linear(in_features=300, out_features=300, bias=True)\n", " (encoder): LSTM(\n", " (lstm): LSTM(300, 300, batch_first=True, bidirectional=True)\n", " )\n", - " (bi_attention): Bi_Attention()\n", + " (bi_attention): BiAttention()\n", " (mean_pooling): MeanPoolWithMask()\n", " (max_pooling): MaxPoolWithMask()\n", - " (inference_layer): Linear(\n", - " (linear): Linear(in_features=1200, out_features=300, bias=True)\n", - " )\n", + " (inference_layer): Linear(in_features=1200, out_features=300, bias=True)\n", " (decoder): LSTM(\n", " (lstm): LSTM(300, 300, batch_first=True, bidirectional=True)\n", " )\n", @@ -816,7 +816,7 @@ ")" ] }, - "execution_count": 27, + "execution_count": 28, "metadata": {}, "output_type": "execute_result" } @@ -824,49 +824,10 @@ "source": [ "# step 2:加载ESIM模型\n", "from fastNLP.models import ESIM\n", - "model = ESIM(**args.data)\n", + "model = ESIM(args[\"vocab_size\"], args[\"embed_dim\"], args[\"hidden_size\"], args[\"dropout\"], args[\"num_classes\"])\n", "model" ] }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "CNNText(\n", - " (embed): Embedding(\n", - " (embed): Embedding(156, 50, padding_idx=0)\n", - " (dropout): Dropout(p=0.0)\n", - " )\n", - " (conv_pool): ConvMaxpool(\n", - " (convs): ModuleList(\n", - " (0): Conv1d(50, 3, kernel_size=(3,), stride=(1,), padding=(2,))\n", - " (1): Conv1d(50, 4, kernel_size=(4,), stride=(1,), padding=(2,))\n", - " (2): Conv1d(50, 5, kernel_size=(5,), stride=(1,), padding=(2,))\n", - " )\n", - " )\n", - " (dropout): Dropout(p=0.1)\n", - " (fc): Linear(\n", - " (linear): Linear(in_features=12, out_features=5, bias=True)\n", - " )\n", - ")" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 另一个例子:加载CNN文本分类模型\n", - "from fastNLP.models import CNNText\n", - "cnn_text_model = CNNText(embed_num=len(vocab), embed_dim=50, num_classes=5, padding=2, dropout=0.1)\n", - "cnn_text_model" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -1009,54 +970,25 @@ "name": "stdout", "output_type": "stream", "text": [ - "training epochs started 2019-04-14-23-22-28\n", - "[epoch: 1 step: 1] train loss: 1.51372 time: 0:00:00\n", - "[epoch: 1 step: 2] train loss: 1.26874 time: 0:00:00\n", - "[epoch: 1 step: 3] train loss: 1.49786 time: 0:00:00\n", - "[epoch: 1 step: 4] train loss: 1.37505 time: 0:00:00\n", - "Evaluation at Epoch 1/5. Step:4/20. AccuracyMetric: acc=0.344828\n", - "\n", - "[epoch: 2 step: 5] train loss: 1.21877 time: 0:00:00\n", - "[epoch: 2 step: 6] train loss: 1.14183 time: 0:00:00\n", - "[epoch: 2 step: 7] train loss: 1.15934 time: 0:00:00\n", - "[epoch: 2 step: 8] train loss: 1.55148 time: 0:00:00\n", - "Evaluation at Epoch 2/5. Step:8/20. AccuracyMetric: acc=0.344828\n", - "\n", - "[epoch: 3 step: 9] train loss: 1.1457 time: 0:00:00\n", - "[epoch: 3 step: 10] train loss: 1.0547 time: 0:00:00\n", - "[epoch: 3 step: 11] train loss: 1.40139 time: 0:00:00\n", - "[epoch: 3 step: 12] train loss: 0.551445 time: 0:00:00\n", - "Evaluation at Epoch 3/5. Step:12/20. AccuracyMetric: acc=0.275862\n", - "\n", - "[epoch: 4 step: 13] train loss: 1.07965 time: 0:00:00\n", - "[epoch: 4 step: 14] train loss: 1.04118 time: 0:00:00\n", - "[epoch: 4 step: 15] train loss: 1.11719 time: 0:00:00\n", - "[epoch: 4 step: 16] train loss: 1.09861 time: 0:00:00\n", - "Evaluation at Epoch 4/5. Step:16/20. AccuracyMetric: acc=0.275862\n", - "\n", - "[epoch: 5 step: 17] train loss: 1.10795 time: 0:00:00\n", - "[epoch: 5 step: 18] train loss: 1.26715 time: 0:00:00\n", - "[epoch: 5 step: 19] train loss: 1.19875 time: 0:00:00\n", - "[epoch: 5 step: 20] train loss: 1.09862 time: 0:00:00\n", - "Evaluation at Epoch 5/5. Step:20/20. AccuracyMetric: acc=0.37931\n", - "\n", - "\n", - "In Epoch:5/Step:20, got best dev performance:AccuracyMetric: acc=0.37931\n", - "Reloaded the best model.\n" + "training epochs started 2019-05-14-19-49-25\n" ] }, { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.37931}},\n", - " 'best_epoch': 5,\n", - " 'best_step': 20,\n", - " 'seconds': 0.5}" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" + "ename": "AssertionError", + "evalue": "seq_len can only have one dimension, got False.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAssertionError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0muse_tqdm\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 18\u001b[0m )\n\u001b[0;32m---> 19\u001b[0;31m \u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(self, load_best_model)\u001b[0m\n\u001b[1;32m 522\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 523\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcallback_manager\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_train_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 524\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_train\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 525\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcallback_manager\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_train_end\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 526\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mCallbackException\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mKeyboardInterrupt\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m_train\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 573\u001b[0m \u001b[0;31m# negative sampling; replace unknown; re-weight batch_y\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 574\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcallback_manager\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_batch_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_x\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_y\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mindices\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 575\u001b[0;31m \u001b[0mprediction\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_data_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_x\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 576\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 577\u001b[0m \u001b[0;31m# edit prediction\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m_data_forward\u001b[0;34m(self, network, x)\u001b[0m\n\u001b[1;32m 661\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_data_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnetwork\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 662\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_build_args\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnetwork\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 663\u001b[0;31m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnetwork\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 664\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 665\u001b[0m raise TypeError(\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 489\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 490\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 492\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 493\u001b[0m \u001b[0mhook_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/models/snli.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, words1, words2, seq_len1, seq_len2, target)\u001b[0m\n\u001b[1;32m 76\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 77\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mseq_len1\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 78\u001b[0;31m \u001b[0mseq_len1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mseq_len_to_mask\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mseq_len1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 79\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 80\u001b[0m \u001b[0mseq_len1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mones\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpremise0\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpremise0\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/utils.py\u001b[0m in \u001b[0;36mseq_len_to_mask\u001b[0;34m(seq_len)\u001b[0m\n\u001b[1;32m 626\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 627\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mseq_len\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 628\u001b[0;31m \u001b[0;32massert\u001b[0m \u001b[0mseq_len\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdim\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34mf\"seq_len can only have one dimension, got {seq_len.dim() == 1}.\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 629\u001b[0m \u001b[0mbatch_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mseq_len\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 630\u001b[0m \u001b[0mmax_len\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mseq_len\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmax\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlong\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAssertionError\u001b[0m: seq_len can only have one dimension, got False." + ] } ], "source": [ @@ -1073,7 +1005,6 @@ " print_every=-1,\n", " validate_every=-1,\n", " dev_data=dev_data,\n", - " use_cuda=True,\n", " optimizer=Adam(lr=1e-3, weight_decay=0),\n", " check_code_level=-1,\n", " metric_key='acc',\n", @@ -1178,7 +1109,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.0" + "version": "3.6.7" } }, "nbformat": 4, From 63e023b2fad41b833ddf56ceb1443367043e3310 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Tue, 14 May 2019 23:08:00 +0800 Subject: [PATCH 17/19] =?UTF-8?q?=E5=88=A0=E9=99=A4=E4=BA=86=E8=BF=87?= =?UTF-8?q?=E6=97=B6=E7=9A=84=E6=95=99=E7=A8=8B=E5=92=8C=E6=B5=8B=E8=AF=95?= =?UTF-8?q?=E6=96=87=E4=BB=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/test_tutorials.py | 278 - .../advance_tutorial.ipynb | 1117 - .../fastnlp_advanced_tutorial/data/config | 8 - .../fastnlp_advanced_tutorial/hypothesis | 100 - tutorials/fastnlp_advanced_tutorial/label | 100 - tutorials/fastnlp_advanced_tutorial/premise | 100 - .../tutorial_sample_dataset.csv | 77 - tutorials/fastnlp_advanced_tutorial/vocab.txt | 30522 ---------------- 8 files changed, 32302 deletions(-) delete mode 100644 tutorials/fastnlp_advanced_tutorial/advance_tutorial.ipynb delete mode 100644 tutorials/fastnlp_advanced_tutorial/data/config delete mode 100644 tutorials/fastnlp_advanced_tutorial/hypothesis delete mode 100644 tutorials/fastnlp_advanced_tutorial/label delete mode 100644 tutorials/fastnlp_advanced_tutorial/premise delete mode 100644 tutorials/fastnlp_advanced_tutorial/tutorial_sample_dataset.csv delete mode 100644 tutorials/fastnlp_advanced_tutorial/vocab.txt diff --git a/test/test_tutorials.py b/test/test_tutorials.py index 255b391e..128e4235 100644 --- a/test/test_tutorials.py +++ b/test/test_tutorials.py @@ -157,284 +157,6 @@ class TestTutorial(unittest.TestCase): trainer.train() print('Train finished!') - def test_fastnlp_advanced_tutorial(self): - import os - os.chdir("test/tutorials/fastnlp_advanced_tutorial") - - from fastNLP import DataSet - from fastNLP import Instance - from fastNLP import Vocabulary - from fastNLP import Trainer - from fastNLP import Tester - - # ### Instance - # Instance表示一个样本,由一个或者多个field(域、属性、特征)组成,每个field具有自己的名字以及值 - # 在初始化Instance的时候可以定义它包含的field,使用"field_name=field_value"的写法 - - # In[2]: - - # 组织一个Instance,这个Instance由premise、hypothesis、label三个field组成 - instance = Instance(premise='an premise example .', hypothesis='an hypothesis example.', label=1) - instance - - # In[3]: - - data_set = DataSet([instance] * 5) - data_set.append(instance) - data_set[-2:] - - # In[4]: - - # 如果某一个field的类型与dataset对应的field类型不一样仍可被加入dataset中 - instance2 = Instance(premise='the second premise example .', hypothesis='the second hypothesis example.', - label='1') - try: - data_set.append(instance2) - except: - pass - data_set[-2:] - - # In[5]: - - # 如果某一个field的名字不对,则该instance不能被append到dataset中 - instance3 = Instance(premises='the third premise example .', hypothesis='the third hypothesis example.', - label=1) - try: - data_set.append(instance3) - except: - print('cannot append instance') - pass - data_set[-2:] - - # In[6]: - - # 除了文本以外,还可以将tensor作为其中一个field的value - import torch - tensor_ins = Instance(image=torch.randn(5, 5), label=0) - ds = DataSet() - ds.append(tensor_ins) - ds - - from fastNLP import DataSet - from fastNLP import Instance - - # 从csv读取数据到DataSet - # 类csv文件,即每一行为一个example的文件,都可以使用这种方法进行数据读取 - dataset = DataSet.read_csv('tutorial_sample_dataset.csv', headers=('raw_sentence', 'label'), sep='\t') - # 查看DataSet的大小 - len(dataset) - - # In[8]: - - # 使用数字索引[k],获取第k个样本 - dataset[0] - - # In[9]: - - # 获取的样本是一个Instance - type(dataset[0]) - - # In[10]: - - # 使用数字索引[a: b],获取第a到第b个样本 - dataset[0: 3] - - # In[11]: - - # 索引也可以是负数 - dataset[-1] - - data_path = ['premise', 'hypothesis', 'label'] - - # 读入文件 - with open(data_path[0]) as f: - premise = f.readlines() - - with open(data_path[1]) as f: - hypothesis = f.readlines() - - with open(data_path[2]) as f: - label = f.readlines() - - assert len(premise) == len(hypothesis) and len(hypothesis) == len(label) - - # 组织DataSet - data_set = DataSet() - for p, h, l in zip(premise, hypothesis, label): - p = p.strip() # 将行末空格去除 - h = h.strip() # 将行末空格去除 - data_set.append(Instance(premise=p, hypothesis=h, truth=l)) - - data_set[0] - - # ### DataSet的其他操作 - # 在构建完毕DataSet后,仍然可以对DataSet的内容进行操作,函数接口为DataSet.apply() - - # In[13]: - - # 将premise域的所有文本转成小写 - data_set.apply(lambda x: x['premise'].lower(), new_field_name='premise') - data_set[-2:] - - # In[14]: - - # label转int - data_set.apply(lambda x: int(x['truth']), new_field_name='truth') - data_set[-2:] - - # In[15]: - - # 使用空格分割句子 - def split_sent(ins): - return ins['premise'].split() - - data_set.apply(split_sent, new_field_name='premise') - data_set.apply(lambda x: x['hypothesis'].split(), new_field_name='hypothesis') - data_set[-2:] - - # In[16]: - - # 筛选数据 - origin_data_set_len = len(data_set) - data_set.drop(lambda x: len(x['premise']) <= 6, inplace=True) - origin_data_set_len, len(data_set) - - # In[17]: - - # 增加长度信息 - data_set.apply(lambda x: [1] * len(x['premise']), new_field_name='premise_len') - data_set.apply(lambda x: [1] * len(x['hypothesis']), new_field_name='hypothesis_len') - data_set[-1] - - # In[18]: - - # 设定特征域、标签域 - data_set.set_input("premise", "premise_len", "hypothesis", "hypothesis_len") - data_set.set_target("truth") - - # In[19]: - - # 重命名field - data_set.rename_field('truth', 'label') - data_set[-1] - - # In[20]: - - # 切分训练、验证集、测试集 - train_data, vad_data = data_set.split(0.5) - dev_data, test_data = vad_data.split(0.4) - len(train_data), len(dev_data), len(test_data) - - # In[21]: - - # 深拷贝一个数据集 - import copy - train_data_2, dev_data_2 = copy.deepcopy(train_data), copy.deepcopy(dev_data) - del copy - - # 初始化词表,该词表最大的vocab_size为10000,词表中每个词出现的最低频率为2,''表示未知词语,''表示padding词语 - # Vocabulary默认初始化参数为max_size=None, min_freq=None, unknown='', padding='' - vocab = Vocabulary(max_size=10000, min_freq=2, unknown='', padding='') - - # 构建词表 - train_data.apply(lambda x: [vocab.add(word) for word in x['premise']]) - train_data.apply(lambda x: [vocab.add(word) for word in x['hypothesis']]) - vocab.build_vocab() - - # In[23]: - - # 根据词表index句子 - train_data.apply(lambda x: [vocab.to_index(word) for word in x['premise']], new_field_name='premise') - train_data.apply(lambda x: [vocab.to_index(word) for word in x['hypothesis']], new_field_name='hypothesis') - dev_data.apply(lambda x: [vocab.to_index(word) for word in x['premise']], new_field_name='premise') - dev_data.apply(lambda x: [vocab.to_index(word) for word in x['hypothesis']], new_field_name='hypothesis') - test_data.apply(lambda x: [vocab.to_index(word) for word in x['premise']], new_field_name='premise') - test_data.apply(lambda x: [vocab.to_index(word) for word in x['hypothesis']], new_field_name='hypothesis') - train_data[-1], dev_data[-1], test_data[-1] - - # 读入vocab文件 - with open('vocab.txt', encoding='utf-8') as f: - lines = f.readlines() - vocabs = [] - for line in lines: - vocabs.append(line.strip()) - - # 实例化Vocabulary - vocab_bert = Vocabulary(unknown=None, padding=None) - # 将vocabs列表加入Vocabulary - vocab_bert.add_word_lst(vocabs) - # 构建词表 - vocab_bert.build_vocab() - # 更新unknown与padding的token文本 - vocab_bert.unknown = '[UNK]' - vocab_bert.padding = '[PAD]' - - # In[25]: - - # 根据词表index句子 - train_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['premise']], new_field_name='premise') - train_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['hypothesis']], - new_field_name='hypothesis') - dev_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['premise']], new_field_name='premise') - dev_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['hypothesis']], new_field_name='hypothesis') - train_data_2[-1], dev_data_2[-1] - - for data in [train_data, dev_data, test_data]: - data.rename_field('premise', 'words1') - data.rename_field('hypothesis', 'words2') - data.rename_field('premise_len', 'seq_len1') - data.rename_field('hypothesis_len', 'seq_len2') - data.set_input('words1', 'words2', 'seq_len1', 'seq_len2') - - - # step 1:加载模型参数(非必选) - from fastNLP.io.config_io import ConfigSection, ConfigLoader - args = ConfigSection() - ConfigLoader().load_config("./data/config", {"esim_model": args}) - args["vocab_size"] = len(vocab) - args.data - - # In[27]: - - # step 2:加载ESIM模型 - from fastNLP.models import ESIM - model = ESIM(**args.data) - model - - # In[28]: - - # 另一个例子:加载CNN文本分类模型 - from fastNLP.models import CNNText - cnn_text_model = CNNText((len(vocab), 50), num_classes=5, padding=2, dropout=0.1) - - from fastNLP import CrossEntropyLoss - from fastNLP import Adam - from fastNLP import AccuracyMetric - trainer = Trainer( - train_data=train_data, - model=model, - loss=CrossEntropyLoss(pred='pred', target='label'), - metrics=AccuracyMetric(target='label'), - n_epochs=3, - batch_size=16, - print_every=-1, - validate_every=-1, - dev_data=dev_data, - optimizer=Adam(lr=1e-3, weight_decay=0), - check_code_level=-1, - metric_key='acc', - use_tqdm=False, - ) - trainer.train() - - tester = Tester( - data=test_data, - model=model, - metrics=AccuracyMetric(target='label'), - batch_size=args["batch_size"], - ) - tester.test() - def setUp(self): import os self._init_wd = os.path.abspath(os.curdir) diff --git a/tutorials/fastnlp_advanced_tutorial/advance_tutorial.ipynb b/tutorials/fastnlp_advanced_tutorial/advance_tutorial.ipynb deleted file mode 100644 index 7e487933..00000000 --- a/tutorials/fastnlp_advanced_tutorial/advance_tutorial.ipynb +++ /dev/null @@ -1,1117 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# fastNLP开发进阶教程\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 组织数据部分\n", - "## DataSet & Instance\n", - "fastNLP用DataSet和Instance保存和处理数据。每个DataSet表示一个数据集,每个Instance表示一个数据样本。一个DataSet存有多个Instance,每个Instance可以自定义存哪些内容。" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "# 声明部件\n", - "import torch\n", - "import fastNLP\n", - "from fastNLP import DataSet\n", - "from fastNLP import Instance\n", - "from fastNLP import Vocabulary\n", - "from fastNLP import Trainer\n", - "from fastNLP import Tester" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Instance\n", - "Instance表示一个样本,由一个或者多个field(域、属性、特征)组成,每个field具有自己的名字以及值\n", - "在初始化Instance的时候可以定义它包含的field,使用\"field_name=field_value\"的写法" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'premise': an premise example . type=str,\n", - "'hypothesis': an hypothesis example. type=str,\n", - "'label': 1 type=int}" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 组织一个Instance,这个Instance由premise、hypothesis、label三个field组成\n", - "instance = Instance(premise='an premise example .', hypothesis='an hypothesis example.', label=1)\n", - "instance" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "DataSet({'premise': an premise example . type=str,\n", - "'hypothesis': an hypothesis example. type=str,\n", - "'label': 1 type=int},\n", - "{'premise': an premise example . type=str,\n", - "'hypothesis': an hypothesis example. type=str,\n", - "'label': 1 type=int})" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "data_set = DataSet([instance] * 5)\n", - "data_set.append(instance)\n", - "data_set[-2: ]" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "DataSet({'premise': an premise example . type=str,\n", - "'hypothesis': an hypothesis example. type=str,\n", - "'label': 1 type=int},\n", - "{'premise': the second premise example . type=str,\n", - "'hypothesis': the second hypothesis example. type=str,\n", - "'label': 1 type=str})" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 如果某一个field的类型与dataset对应的field类型不一样仍可被加入dataset中\n", - "instance2 = Instance(premise='the second premise example .', hypothesis='the second hypothesis example.', label='1')\n", - "try:\n", - " data_set.append(instance2)\n", - "except:\n", - " pass\n", - "data_set[-2: ]" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "cannot append instance\n" - ] - }, - { - "data": { - "text/plain": [ - "DataSet({'premise': an premise example . type=str,\n", - "'hypothesis': an hypothesis example. type=str,\n", - "'label': 1 type=int},\n", - "{'premise': the second premise example . type=str,\n", - "'hypothesis': the second hypothesis example. type=str,\n", - "'label': 1 type=str})" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 如果某一个field的名字不对,则该instance不能被append到dataset中\n", - "instance3 = Instance(premises='the third premise example .', hypothesis='the third hypothesis example.', label=1)\n", - "try:\n", - " data_set.append(instance3)\n", - "except:\n", - " print('cannot append instance')\n", - " pass\n", - "data_set[-2: ]" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "DataSet({'image': tensor([[ 0.3582, -1.0358, 1.4785, -1.5288, -0.9982],\n", - " [-0.3973, -0.4294, 0.9215, -1.9631, -1.6556],\n", - " [ 0.3313, -1.7714, 0.8729, 0.6976, -1.3172],\n", - " [-0.6403, 0.5023, -0.9919, 1.1178, -0.3710],\n", - " [-0.3692, 1.8631, -1.3646, -0.7290, -1.0774]]) type=torch.Tensor,\n", - "'label': 0 type=int})" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 除了文本以外,还可以将tensor作为其中一个field的value\n", - "import torch\n", - "tensor_ins = Instance(image=torch.randn(5, 5), label=0)\n", - "ds = DataSet()\n", - "ds.append(tensor_ins)\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### DataSet\n", - "### 使用现有代码读取并组织DataSet\n", - "在DataSet类当中有一些read_* 方法,可以从文件中读取数据并组织DataSet" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "77" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import fastNLP\n", - "from fastNLP import DataSet\n", - "from fastNLP import Instance\n", - "\n", - "# 从csv读取数据到DataSet\n", - "# 类csv文件,即每一行为一个example的文件,都可以使用这种方法进行数据读取\n", - "dataset = DataSet.read_csv('tutorial_sample_dataset.csv', headers=('raw_sentence', 'label'), sep='\\t')\n", - "# 查看DataSet的大小\n", - "len(dataset)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", - "'label': 1 type=str}" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 使用数字索引[k],获取第k个样本\n", - "dataset[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "fastNLP.core.instance.Instance" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 获取的样本是一个Instance\n", - "type(dataset[0])" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "DataSet({'raw_sentence': A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . type=str,\n", - "'label': 1 type=str},\n", - "{'raw_sentence': This quiet , introspective and entertaining independent is worth seeking . type=str,\n", - "'label': 4 type=str},\n", - "{'raw_sentence': Even fans of Ismail Merchant 's work , I suspect , would have a hard time sitting through this one . type=str,\n", - "'label': 1 type=str})" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 使用数字索引[a: b],获取第a到第b个样本\n", - "dataset[0: 3]" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'raw_sentence': A film that clearly means to preach exclusively to the converted . type=str,\n", - "'label': 2 type=str}" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 索引也可以是负数\n", - "dataset[-1]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 自行读取并组织DataSet\n", - "以SNLI数据集为例,\n", - "SNLI数据集的训练、验证、测试集分别三个文件组成:第一个文件每一行是一句话,代表一个example当中的premise;第二个文件每一行也是一句话,代表一个example当中的hypothesis;第三个文件每一行是一个label" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'premise': A person on a horse jumps over a broken down airplane . type=str,\n", - "'hypothesis': A person is training his horse for a competition . type=str,\n", - "'truth': 1\n", - " type=str}" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "data_path = ['premise', 'hypothesis', 'label']\n", - "\n", - "# 读入文件\n", - "with open(data_path[0]) as f:\n", - " premise = f.readlines()\n", - "\n", - "with open(data_path[1]) as f:\n", - " hypothesis = f.readlines()\n", - "\n", - "with open(data_path[2]) as f:\n", - " label = f.readlines()\n", - "\n", - "assert len(premise) == len(hypothesis) and len(hypothesis) == len(label)\n", - "\n", - "# 组织DataSet\n", - "data_set = DataSet()\n", - "for p, h, l in zip(premise, hypothesis, label):\n", - " p = p.strip() # 将行末空格去除\n", - " h = h.strip() # 将行末空格去除\n", - " data_set.append(Instance(premise=p, hypothesis=h, truth=l))\n", - "\n", - "data_set[0]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### DataSet的其他操作\n", - "在构建完毕DataSet后,仍然可以对DataSet的内容进行操作,函数接口为DataSet.apply()" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "DataSet({'premise': a woman is walking across the street eating a banana , while a man is following with his briefcase . type=str,\n", - "'hypothesis': An actress and her favorite assistant talk a walk in the city . type=str,\n", - "'truth': 1\n", - " type=str},\n", - "{'premise': a woman is walking across the street eating a banana , while a man is following with his briefcase . type=str,\n", - "'hypothesis': a woman eating a banana crosses a street type=str,\n", - "'truth': 0\n", - " type=str})" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 将premise域的所有文本转成小写\n", - "data_set.apply(lambda x: x['premise'].lower(), new_field_name='premise')\n", - "data_set[-2: ]" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "DataSet({'premise': a woman is walking across the street eating a banana , while a man is following with his briefcase . type=str,\n", - "'hypothesis': An actress and her favorite assistant talk a walk in the city . type=str,\n", - "'truth': 1 type=int},\n", - "{'premise': a woman is walking across the street eating a banana , while a man is following with his briefcase . type=str,\n", - "'hypothesis': a woman eating a banana crosses a street type=str,\n", - "'truth': 0 type=int})" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# label转int\n", - "data_set.apply(lambda x: int(x['truth']), new_field_name='truth')\n", - "data_set[-2: ]" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "DataSet({'premise': ['a', 'woman', 'is', 'walking', 'across', 'the', 'street', 'eating', 'a', 'banana', ',', 'while', 'a', 'man', 'is', 'following', 'with', 'his', 'briefcase', '.'] type=list,\n", - "'hypothesis': ['An', 'actress', 'and', 'her', 'favorite', 'assistant', 'talk', 'a', 'walk', 'in', 'the', 'city', '.'] type=list,\n", - "'truth': 1 type=int},\n", - "{'premise': ['a', 'woman', 'is', 'walking', 'across', 'the', 'street', 'eating', 'a', 'banana', ',', 'while', 'a', 'man', 'is', 'following', 'with', 'his', 'briefcase', '.'] type=list,\n", - "'hypothesis': ['a', 'woman', 'eating', 'a', 'banana', 'crosses', 'a', 'street'] type=list,\n", - "'truth': 0 type=int})" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 使用空格分割句子\n", - "def split_sent(ins):\n", - " return ins['premise'].split()\n", - "data_set.apply(split_sent, new_field_name='premise')\n", - "data_set.apply(lambda x: x['hypothesis'].split(), new_field_name='hypothesis')\n", - "data_set[-2:]" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(100, 97)" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 筛选数据\n", - "origin_data_set_len = len(data_set)\n", - "data_set.drop(lambda x: len(x['premise']) <= 6)\n", - "origin_data_set_len, len(data_set)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'premise': ['a', 'woman', 'is', 'walking', 'across', 'the', 'street', 'eating', 'a', 'banana', ',', 'while', 'a', 'man', 'is', 'following', 'with', 'his', 'briefcase', '.'] type=list,\n", - "'hypothesis': ['a', 'woman', 'eating', 'a', 'banana', 'crosses', 'a', 'street'] type=list,\n", - "'truth': 0 type=int,\n", - "'premise_len': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - "'hypothesis_len': [1, 1, 1, 1, 1, 1, 1, 1] type=list}" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 增加长度信息\n", - "data_set.apply(lambda x: [1] * len(x['premise']), new_field_name='premise_len')\n", - "data_set.apply(lambda x: [1] * len(x['hypothesis']), new_field_name='hypothesis_len')\n", - "data_set[-1]" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [], - "source": [ - "# 设定特征域、标签域\n", - "data_set.rename_field(\"premise\",\"words1\")\n", - "data_set.rename_field(\"premise_len\",\"seq_len1\")\n", - "data_set.rename_field(\"hypothesis\",\"words2\")\n", - "data_set.rename_field(\"hypothesis_len\",\"seq_len2\")\n", - "data_set.set_input(\"words1\", \"seq_len1\", \"words2\", \"seq_len2\")\n", - "data_set.set_target(\"truth\")" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'words1': ['a', 'woman', 'is', 'walking', 'across', 'the', 'street', 'eating', 'a', 'banana', ',', 'while', 'a', 'man', 'is', 'following', 'with', 'his', 'briefcase', '.'] type=list,\n", - "'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - "'words2': ['a', 'woman', 'eating', 'a', 'banana', 'crosses', 'a', 'street'] type=list,\n", - "'seq_len2': [1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - "'label': 0 type=int}" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 重命名field\n", - "data_set.rename_field('truth', 'label')\n", - "data_set[-1]" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(49, 29, 19)" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 切分训练、验证集、测试集\n", - "train_data, vad_data = data_set.split(0.5)\n", - "dev_data, test_data = vad_data.split(0.4)\n", - "len(train_data), len(dev_data), len(test_data)" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "# 深拷贝一个数据集\n", - "import copy\n", - "train_data_2, dev_data_2 = copy.deepcopy(train_data), copy.deepcopy(dev_data)\n", - "del copy" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### DataSet的总结:\n", - "将DataSet的ield设置为input和target后,这些field在接下来的代码中将被使用。其中被设置为input的field会被传递给Model.forward,这个过程是通过键匹配的方式进行的。举例如下: \n", - "假设DataSet中有'x1', 'x2', 'x3'被设置为了input,而 \n", - "   (1)函数是Model.forward(self, x1, x3), 那么DataSet中'x1', 'x3'会被传递给forward函数。多余的'x2'会被忽略 \n", - "   (2)函数是Model.forward(self, x1, x4), 这里多需要了一个'x4', 但是DataSet的input field中没有这个field,会报错。 \n", - "   (3)函数是Model.forward(self, x1, kwargs), 会把'x1', 'x2', 'x3'都传入。但如果是Model.forward(self, x4, kwargs)就会发生报错,因为没有'x4'。 \n", - "   (4)对于设置为target的field的名称,我们建议取名为'target'(如果只有一个需要predict的值),但是不强制。如果这个名称不是target,那么在加载loss函数的时候需要手动添加名称转换map" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Vocabulary\n", - "fastNLP中的Vocabulary轻松构建词表,并将词转成数字。构建词表有两种方式:根据数据集构建词表;载入现有词表\n", - "### 根据数据集构建词表" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [], - "source": [ - "# 初始化词表,该词表最大的vocab_size为10000,词表中每个词出现的最低频率为2,''表示未知词语,''表示padding词语\n", - "# Vocabulary默认初始化参数为max_size=None, min_freq=None, unknown='', padding=''\n", - "vocab = Vocabulary(max_size=10000, min_freq=2, unknown='', padding='')\n", - "\n", - "# 构建词表\n", - "train_data.apply(lambda x: [vocab.add(word) for word in x['words1']])\n", - "train_data.apply(lambda x: [vocab.add(word) for word in x['words2']])\n", - "vocab.build_vocab()" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "({'words1': [2, 9, 4, 2, 75, 85, 7, 86, 76, 77, 87, 88, 89, 2, 90, 3] type=list,\n", - " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'words2': [18, 9, 10, 1, 3] type=list,\n", - " 'seq_len2': [1, 1, 1, 1, 1] type=list,\n", - " 'label': 1 type=int},\n", - " {'words1': [22, 32, 5, 110, 81, 111, 112, 5, 82, 3] type=list,\n", - " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'words2': [64, 32, 82, 133, 84, 3] type=list,\n", - " 'seq_len2': [1, 1, 1, 1, 1, 1] type=list,\n", - " 'label': 0 type=int},\n", - " {'words1': [2, 9, 97, 1, 20, 7, 54, 5, 1, 1, 70, 2, 11, 110, 2, 62, 3] type=list,\n", - " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'words2': [23, 1, 58, 10, 12, 1, 70, 133, 84, 3] type=list,\n", - " 'seq_len2': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'label': 1 type=int})" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 根据词表index句子\n", - "train_data.apply(lambda x: [vocab.to_index(word) for word in x['words1']], new_field_name='words1')\n", - "train_data.apply(lambda x: [vocab.to_index(word) for word in x['words2']], new_field_name='words2')\n", - "dev_data.apply(lambda x: [vocab.to_index(word) for word in x['words1']], new_field_name='words1')\n", - "dev_data.apply(lambda x: [vocab.to_index(word) for word in x['words2']], new_field_name='words2')\n", - "test_data.apply(lambda x: [vocab.to_index(word) for word in x['words1']], new_field_name='words1')\n", - "test_data.apply(lambda x: [vocab.to_index(word) for word in x['words2']], new_field_name='words2')\n", - "train_data[-1], dev_data[-1], test_data[-1]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 载入现有词表\n", - "以BERT pretrained model为例,词表由一个vocab.txt文件来保存\n", - "用以下方法可以载入现有词表,并保证词表顺序不变" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [], - "source": [ - "# 读入vocab文件\n", - "with open('vocab.txt') as f:\n", - " lines = f.readlines()\n", - "vocabs = []\n", - "for line in lines:\n", - " vocabs.append(line.strip())\n", - "\n", - "# 实例化Vocabulary\n", - "vocab_bert = Vocabulary(unknown=None, padding=None)\n", - "# 将vocabs列表加入Vocabulary\n", - "vocab_bert.add_word_lst(vocabs)\n", - "# 构建词表\n", - "vocab_bert.build_vocab()\n", - "# 更新unknown与padding的token文本\n", - "vocab_bert.unknown = '[UNK]'\n", - "vocab_bert.padding = '[PAD]'" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "({'words1': [1037, 2450, 1999, 1037, 2665, 6598, 1998, 7415, 2058, 2014, 2132, 2559, 2875, 1037, 3028, 1012] type=list,\n", - " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'words2': [100, 2450, 2003, 3147, 1012] type=list,\n", - " 'seq_len2': [1, 1, 1, 1, 1] type=list,\n", - " 'label': 1 type=int},\n", - " {'words1': [2048, 2308, 1010, 3173, 2833, 100, 16143, 1010, 8549, 1012] type=list,\n", - " 'seq_len1': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] type=list,\n", - " 'words2': [100, 2308, 8549, 2169, 2060, 1012] type=list,\n", - " 'seq_len2': [1, 1, 1, 1, 1, 1] type=list,\n", - " 'label': 0 type=int})" - ] - }, - "execution_count": 26, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# 根据词表index句子\n", - "train_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['words1']], new_field_name='words1')\n", - "train_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['words2']], new_field_name='words2')\n", - "dev_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['words1']], new_field_name='words1')\n", - "dev_data_2.apply(lambda x: [vocab_bert.to_index(word) for word in x['words2']], new_field_name='words2')\n", - "train_data_2[-1], dev_data_2[-1]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 模型部分\n", - "## Model\n", - "模型部分fastNLP提供两种使用方式:调用fastNLP现有模型;开发者自行搭建模型\n", - "### 调用fastNLP现有模型" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'embed_dim': 300,\n", - " 'hidden_size': 300,\n", - " 'batch_first': True,\n", - " 'dropout': 0.3,\n", - " 'num_classes': 3,\n", - " 'gpu': True,\n", - " 'batch_size': 32,\n", - " 'vocab_size': 143}" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# step 1:加载模型参数(非必选)\n", - "from fastNLP.io.config_io import ConfigSection, ConfigLoader\n", - "args = ConfigSection()\n", - "ConfigLoader().load_config(\"./data/config\", {\"esim_model\": args})\n", - "args[\"vocab_size\"] = len(vocab)\n", - "args.data" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "ESIM(\n", - " (drop): Dropout(p=0.3)\n", - " (embedding): Embedding(\n", - " 143, 300\n", - " (dropout): Dropout(p=0.3)\n", - " )\n", - " (embedding_layer): Linear(in_features=300, out_features=300, bias=True)\n", - " (encoder): LSTM(\n", - " (lstm): LSTM(300, 300, batch_first=True, bidirectional=True)\n", - " )\n", - " (bi_attention): BiAttention()\n", - " (mean_pooling): MeanPoolWithMask()\n", - " (max_pooling): MaxPoolWithMask()\n", - " (inference_layer): Linear(in_features=1200, out_features=300, bias=True)\n", - " (decoder): LSTM(\n", - " (lstm): LSTM(300, 300, batch_first=True, bidirectional=True)\n", - " )\n", - " (output): MLP(\n", - " (hiddens): ModuleList(\n", - " (0): Linear(in_features=1200, out_features=300, bias=True)\n", - " )\n", - " (output): Linear(in_features=300, out_features=3, bias=True)\n", - " (dropout): Dropout(p=0.3)\n", - " )\n", - ")" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# step 2:加载ESIM模型\n", - "from fastNLP.models import ESIM\n", - "model = ESIM(args[\"vocab_size\"], args[\"embed_dim\"], args[\"hidden_size\"], args[\"dropout\"], args[\"num_classes\"])\n", - "model" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "这是上述模型的forward方法。如果你不知道什么是forward方法,请参考我们的PyTorch教程。\n", - "\n", - "注意两点:\n", - "1. forward参数名字叫**word_seq**,请记住。\n", - "2. forward的返回值是一个**dict**,其中有个key的名字叫**pred**。\n", - "\n", - "```Python\n", - " def forward(self, word_seq):\n", - " \"\"\"\n", - "\n", - " :param word_seq: torch.LongTensor, [batch_size, seq_len]\n", - " :return output: dict of torch.LongTensor, [batch_size, num_classes]\n", - " \"\"\"\n", - " x = self.embed(word_seq) # [N,L] -> [N,L,C]\n", - " x = self.conv_pool(x) # [N,L,C] -> [N,C]\n", - " x = self.dropout(x)\n", - " x = self.fc(x) # [N,C] -> [N, N_class]\n", - " return {'pred': x}\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 自行搭载模型\n", - "自行搭载的模型必须是nn.Module的子类, \n", - "(1)必须实现forward方法,并且forward方法不能出现**\\*arg**这种参数,例如\n", - "```Python\n", - " def forword(self, word_seq, *args): # 这是不允许的\n", - " xxx\n", - "```\n", - "forward函数的返回值必须是一个**dict**。 \n", - "dict当中模型预测的值所对应的key建议用**'pred'**,这里不做强制限制,但是如果不是pred的话,在加载loss函数的时候需要手动添加名称转换map \n", - "(2)如果实现了predict方法,在做evaluation的时候将调用predict方法而不是forward。如果没有predict方法,则在evaluation时调用forward方法。predict方法也不能使用\\*args这种参数形式,同时结果也必须返回一个dict,同样推荐key为'pred'。 \n", - "(3)forward函数可以计算loss并返回结果,在dict中的key为'loss',如: \n", - "```Python\n", - " def forword(self, word_seq, *args): \n", - " xxx\n", - " return {'pred': pred, 'loss': loss}\n", - "```\n", - "当loss函数没有在trainer里被定义的时候,trainer将会根据forward函数返回的dict中key为'loss'的值来进行反向传播,具体见loss部分" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 训练测试部分\n", - "## Loss\n", - "### 键映射\n", - "根据上文DataSet与Model部分可以知道,fastNLP并不限制Model.forward()的返回值,也不限制DataSet中target field的key。因此在计算loss的时候,需要通过键映射的方式来完成取值。 \n", - "这里以CrossEntropyLoss为例,我们的交叉熵函数部分如下:\n", - "```Python\n", - " def get_loss(self, pred, target):\n", - " return F.cross_entropy(input=pred, target=target,\n", - " ignore_index=self.padding_idx)\n", - "```\n", - "这里接收的两个参数名字分别为pred和target,其中pred是从model的forward函数返回值中取得,target是从DataSet的is_target的field当中取得。在没有设置键映射的基础上,pred从model的forward函数返回的dict中取'pred'键得到;target从DataSet的'target'field中得到。\n", - "。" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 修改键映射\n", - "在初始化CrossEntropyLoss的时候,可以传入两个参数(pred=None, target=None), 这两个参数接受的类型是str,假设(pred='output', target='label'),那么CrossEntropyLoss会使用'output'这个key在forward的output与batch_y中寻找值;'label'也是在forward的output与d ataset的is target field中寻找值。注意这里pred或target的来源并不一定非要来自于model.forward与dataset的is target field,也可以只来自于forward的结果" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 创建一个自己的loss\n", - "    (1)采用fastNLP.LossInForward,在model.forward()的结果中包含一个'loss'的key,具体见**自行搭载模型**部分 \n", - "    (2)自己定义一个继承于fastNLP.core.loss.LossBase的class。重写get_loss方法。 \n", - "      (2.1)在初始化自己的loss class的时候,需要初始化需要映射的值 \n", - "      (2.2)在get_loss函数中,参数的名字需要与初始化时的映射是一致的 \n", - "以L1Loss为例子:\n", - "```Python\n", - "class L1Loss(LossBase):\n", - " def __init__(self, pred=None, target=None):\n", - " super(L1Loss, self).__init__()\n", - " \"\"\"\n", - " 这里传入_init_param_map以使得pred和target被正确注册,但这一步不是必须的, 建议调用。传入_init_param_map的是用于\n", - " \"键映射\"的键值对。假设初始化__init__(pred=None, target=None, threshold=0.1)中threshold是用于控制loss计算的,\n", - " 则\\不要将threshold传入_init_param_map.\n", - " \"\"\"\n", - " self._init_param_map(pred=pred, target=target)\n", - "\n", - " def get_loss(self, pred, target):\n", - " # 这里'pred', 'target'必须和初始化的映射是一致的。\n", - " return F.l1_loss(input=pred, target=target)\n", - "```\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Trainer\n", - "trainer的作用是训练模型,是一个对训练过程的封装。trainer当中比较重要的函数是trainer.train(),train函数的主要步骤包括: \n", - "(1)创建batch \n", - "```Python\n", - "batch = Batch(dataset, batch_size, sampler=sampler)\n", - "```\n", - "(2)for batch_x, batch_y in batch: (batch_x, batch_y的内容分别为dataset中is input和is target的部分,这两个dict的key就是DataSet中的key,value会根据情况做好padding及tensor) \n", - "  (2.1)将batch_x, batch_y中的tensor移动到model所在的device \n", - "  (2.2)根据model.forward的参数列表,从batch_x中取出需要传递给forward的数据 \n", - "  (2.3)获取model.forward返回的dict,并与batch_y一起传递给loss函数,求得loss \n", - "  (2.4)对loss进行反向梯度传播并更新参数 \n", - "(3)如果有验证集,则进行验证 \n", - "(4)如果验证集的结果是当前最佳结果,则保存模型" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "除了以上的内容, Trainer中还提供了\"预跑\"的功能。该功能通过check_code_level管理,如果check_code_level为-1,则不进行\"预跑\"。 check_code_level=0,1,2代表不同的提醒级别。目前不同提醒级别对应的是对DataSet中设置为input或target但又没有使用的field的提醒级别。 0是忽略(默认);1是会warning发生了未使用field的情况;2是出现了unused会直接报错并退出运行 \"预跑\"的主要目的有两个: \n", - "(1)防止train完了之后进行evaluation的时候出现错误。之前的train就白费了 \n", - "(2)由于存在\"键映射\",直接运行导致的报错可能不太容易debug,通过\"预跑\"过程的报错会有一些debug提示 \"预跑\"会进行以下的操作: \n", - "  (i) 使用很小的batch_size, 检查batch_x中是否包含Model.forward所需要的参数。只会运行两个循环。 \n", - "  (ii)将Model.foward的输出pred_dict与batch_y输入到loss中,并尝试backward。不会更新参数,而且grad会被清零。如果传入了dev_data,还将进行metric的测试 \n", - "  (iii)创建Tester,并传入少量数据,检测是否可以正常运行 \n", - "\"预跑\"操作是在Trainer初始化的时候执行的。正常情况下,应该不需要改动\"预跑\"的代码。但如果遇到bug或者有什么好的建议,欢迎在开发群或者github提交issue。" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "training epochs started 2019-05-14-19-49-25\n" - ] - }, - { - "ename": "AssertionError", - "evalue": "seq_len can only have one dimension, got False.", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAssertionError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0muse_tqdm\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 18\u001b[0m )\n\u001b[0;32m---> 19\u001b[0;31m \u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(self, load_best_model)\u001b[0m\n\u001b[1;32m 522\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 523\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcallback_manager\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_train_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 524\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_train\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 525\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcallback_manager\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_train_end\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 526\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mCallbackException\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mKeyboardInterrupt\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m_train\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 573\u001b[0m \u001b[0;31m# negative sampling; replace unknown; re-weight batch_y\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 574\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcallback_manager\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_batch_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_x\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_y\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mindices\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 575\u001b[0;31m \u001b[0mprediction\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_data_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_x\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 576\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 577\u001b[0m \u001b[0;31m# edit prediction\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m_data_forward\u001b[0;34m(self, network, x)\u001b[0m\n\u001b[1;32m 661\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_data_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnetwork\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 662\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_build_args\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnetwork\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 663\u001b[0;31m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnetwork\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 664\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 665\u001b[0m raise TypeError(\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 489\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 490\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 492\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 493\u001b[0m \u001b[0mhook_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/models/snli.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, words1, words2, seq_len1, seq_len2, target)\u001b[0m\n\u001b[1;32m 76\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 77\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mseq_len1\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 78\u001b[0;31m \u001b[0mseq_len1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mseq_len_to_mask\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mseq_len1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 79\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 80\u001b[0m \u001b[0mseq_len1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mones\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpremise0\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpremise0\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/utils.py\u001b[0m in \u001b[0;36mseq_len_to_mask\u001b[0;34m(seq_len)\u001b[0m\n\u001b[1;32m 626\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 627\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mseq_len\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 628\u001b[0;31m \u001b[0;32massert\u001b[0m \u001b[0mseq_len\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdim\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34mf\"seq_len can only have one dimension, got {seq_len.dim() == 1}.\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 629\u001b[0m \u001b[0mbatch_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mseq_len\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 630\u001b[0m \u001b[0mmax_len\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mseq_len\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmax\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlong\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mAssertionError\u001b[0m: seq_len can only have one dimension, got False." - ] - } - ], - "source": [ - "from fastNLP import CrossEntropyLoss\n", - "from fastNLP import Adam\n", - "from fastNLP import AccuracyMetric\n", - "trainer = Trainer(\n", - " train_data=train_data,\n", - " model=model,\n", - " loss=CrossEntropyLoss(pred='pred', target='label'), # 模型预测值通过'pred'来取得,目标值(ground truth)由'label'取得\n", - " metrics=AccuracyMetric(target='label'), # 目标值(ground truth)由'label'取得\n", - " n_epochs=5,\n", - " batch_size=16,\n", - " print_every=-1,\n", - " validate_every=-1,\n", - " dev_data=dev_data,\n", - " optimizer=Adam(lr=1e-3, weight_decay=0),\n", - " check_code_level=-1,\n", - " metric_key='acc',\n", - " use_tqdm=False,\n", - ")\n", - "trainer.train()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Tester\n", - "tester的作用是训练模型,是一个对训练过程的封装。tester当中比较重要的函数是tester.test(),test函数的主要步骤包括:\n", - "(1)创建batch\n", - "```Python\n", - "batch = Batch(dataset, batch_size, sampler=sampler)\n", - "```\n", - "(2)for batch_x, batch_y in batch: (batch_x, batch_y的内容分别为dataset中is input和is target的部分,这两个dict的key就是DataSet中的key,value会根据情况做好padding及tensor) \n", - "  (2.1)同步数据与model将batch_x, batch_y中的tensor移动到model所在的device \n", - "  (2.2)根据predict_func的参数列表,从batch_x中取出需要传递给predict_func的数据,得到结果pred_dict \n", - "  (2.3)调用metric(pred_dict, batch_y) \n", - "  (2.4)当所有batch都运行完毕,会调用metric的get_metric方法,并且以返回的值作为evaluation的结果 " - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[tester] \n", - "AccuracyMetric: acc=0.368421\n" - ] - }, - { - "data": { - "text/plain": [ - "{'AccuracyMetric': {'acc': 0.368421}}" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tester = Tester(\n", - " data=test_data,\n", - " model=model,\n", - " metrics=AccuracyMetric(target='label'),\n", - " batch_size=args[\"batch_size\"],\n", - ")\n", - "tester.test()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tutorials/fastnlp_advanced_tutorial/data/config b/tutorials/fastnlp_advanced_tutorial/data/config deleted file mode 100644 index 87348b72..00000000 --- a/tutorials/fastnlp_advanced_tutorial/data/config +++ /dev/null @@ -1,8 +0,0 @@ -[esim_model] -embed_dim = 300 -hidden_size = 300 -batch_first = true -dropout = 0.3 -num_classes = 3 -gpu = true -batch_size = 32 diff --git a/tutorials/fastnlp_advanced_tutorial/hypothesis b/tutorials/fastnlp_advanced_tutorial/hypothesis deleted file mode 100644 index 07aec3c9..00000000 --- a/tutorials/fastnlp_advanced_tutorial/hypothesis +++ /dev/null @@ -1,100 +0,0 @@ -A person is training his horse for a competition . -A person is at a diner , ordering an omelette . -A person is outdoors , on a horse . -They are smiling at their parents -There are children present -The kids are frowning -The boy skates down the sidewalk . -The boy does a skateboarding trick . -The boy is wearing safety equipment . -An older man drinks his juice as he waits for his daughter to get off work . -A boy flips a burger . -An elderly man sits in a small shop . -Some women are hugging on vacation . -The women are sleeping . -There are women showing affection . -The people are eating omelettes . -The people are sitting at desks in school . -The diners are at a restaurant . -A man is drinking juice . -Two women are at a restaurant drinking wine . -A man in a restaurant is waiting for his meal to arrive . -A blond man getting a drink of water from a fountain in the park . -A blond man wearing a brown shirt is reading a book on a bench in the park -A blond man drinking water from a fountain . -The friends scowl at each other over a full dinner table . -There are two woman in this picture . -The friends have just met for the first time in 20 years , and have had a great time catching up . -The two sisters saw each other across the crowded diner and shared a hug , both clutching their doggie bags . -Two groups of rival gang members flipped each other off . -Two women hug each other . -A team is trying to score the games winning out . -A team is trying to tag a runner out . -A team is playing baseball on Saturn . -A school hosts a basketball game . -A high school is hosting an event . -A school is hosting an event . -The women do not care what clothes they wear . -Women are waiting by a tram . -The women enjoy having a good fashion sense . -A child with mom and dad , on summer vacation at the beach . -A family of three is at the beach . -A family of three is at the mall shopping . -The people waiting on the train are sitting . -There are people just getting on a train -There are people waiting on a train . -A couple are playing with a young child outside . -A couple are playing frisbee with a young child at the beach . -A couple watch a little girl play by herself on the beach . -The family is sitting down for dinner . -The family is outside . -The family is on vacation . -The people are standing still on the curb . -Near a couple of restaurants , two people walk across the street . -The couple are walking across the street together . -The woman is nake . -The woman is cold . -The woman is wearing green . -The man with the sign is caucasian . -They are protesting outside the capital . -A woman in white . -A man is advertising for a restaurant . -The woman is wearing black . -A man and a woman walk down a crowded city street . -The woman is wearing white . -They are working for John 's Pizza . -Olympic swimming . -A man and a soman are eating together at John 's Pizza and Gyro . -They are walking with a sign . -The woman is waiting for a friend . -The man is sitting down while he has a sign for John 's Pizza and Gyro in his arms . -The woman and man are outdoors . -A woman ordering pizza . -The people are related . -Two adults run across the street to get away from a red shirted person chasing them . -The adults are both male and female . -Two people walk home after a tasty steak dinner . -Two adults swimming in water -Two adults walk across a street . -Two people ride bicycles into a tunnel . -Two people walk away from a restaurant across a street . -Two adults walking across a road near the convicted prisoner dressed in red -Two friends cross a street . -Some people board a train . -Two adults walk across the street . -Two adults walking across a road -There are no women in the picture . -Two adults walk across the street to get away from a red shirted person who is chasing them . -A married couple is sleeping . -A female is next to a man . -A married couple is walking next to each other . -Nobody has food . -A woman eats a banana and walks across a street , and there is a man trailing behind her . -The woman and man are playing baseball together . -two coworkers cross pathes on a street -A woman eats ice cream walking down the sidewalk , and there is another woman in front of her with a purse . -The mans briefcase is for work . -A person eating . -A person that is hungry . -An actress and her favorite assistant talk a walk in the city . -a woman eating a banana crosses a street diff --git a/tutorials/fastnlp_advanced_tutorial/label b/tutorials/fastnlp_advanced_tutorial/label deleted file mode 100644 index e28836df..00000000 --- a/tutorials/fastnlp_advanced_tutorial/label +++ /dev/null @@ -1,100 +0,0 @@ -1 -2 -0 -1 -0 -2 -2 -0 -1 -1 -2 -1 -1 -2 -0 -1 -2 -0 -0 -2 -1 -1 -2 -0 -2 -0 -1 -1 -2 -0 -1 -0 -2 -2 -1 -0 -2 -0 -1 -1 -0 -2 -1 -0 -0 -0 -1 -2 -2 -0 -1 -2 -0 -1 -2 -1 -0 -1 -2 -0 -0 -2 -1 -0 -1 -2 -2 -0 -1 -2 -0 -1 -1 -2 -0 -1 -2 -0 -2 -0 -1 -1 -2 -0 -0 -2 -1 -2 -0 -1 -2 -0 -2 -1 -2 -1 -0 -1 -1 -0 diff --git a/tutorials/fastnlp_advanced_tutorial/premise b/tutorials/fastnlp_advanced_tutorial/premise deleted file mode 100644 index 0c9af30e..00000000 --- a/tutorials/fastnlp_advanced_tutorial/premise +++ /dev/null @@ -1,100 +0,0 @@ -A person on a horse jumps over a broken down airplane . -A person on a horse jumps over a broken down airplane . -A person on a horse jumps over a broken down airplane . -Children smiling and waving at camera -Children smiling and waving at camera -Children smiling and waving at camera -A boy is jumping on skateboard in the middle of a red bridge . -A boy is jumping on skateboard in the middle of a red bridge . -A boy is jumping on skateboard in the middle of a red bridge . -An older man sits with his orange juice at a small table in a coffee shop while employees in bright colored shirts smile in the background . -An older man sits with his orange juice at a small table in a coffee shop while employees in bright colored shirts smile in the background . -An older man sits with his orange juice at a small table in a coffee shop while employees in bright colored shirts smile in the background . -Two blond women are hugging one another . -Two blond women are hugging one another . -Two blond women are hugging one another . -A few people in a restaurant setting , one of them is drinking orange juice . -A few people in a restaurant setting , one of them is drinking orange juice . -A few people in a restaurant setting , one of them is drinking orange juice . -An older man is drinking orange juice at a restaurant . -An older man is drinking orange juice at a restaurant . -An older man is drinking orange juice at a restaurant . -A man with blond-hair , and a brown shirt drinking out of a public water fountain . -A man with blond-hair , and a brown shirt drinking out of a public water fountain . -A man with blond-hair , and a brown shirt drinking out of a public water fountain . -Two women who just had lunch hugging and saying goodbye . -Two women who just had lunch hugging and saying goodbye . -Two women who just had lunch hugging and saying goodbye . -Two women , holding food carryout containers , hug . -Two women , holding food carryout containers , hug . -Two women , holding food carryout containers , hug . -A Little League team tries to catch a runner sliding into a base in an afternoon game . -A Little League team tries to catch a runner sliding into a base in an afternoon game . -A Little League team tries to catch a runner sliding into a base in an afternoon game . -The school is having a special event in order to show the american culture on how other cultures are dealt with in parties . -The school is having a special event in order to show the american culture on how other cultures are dealt with in parties . -The school is having a special event in order to show the american culture on how other cultures are dealt with in parties . -High fashion ladies wait outside a tram beside a crowd of people in the city . -High fashion ladies wait outside a tram beside a crowd of people in the city . -High fashion ladies wait outside a tram beside a crowd of people in the city . -A man , woman , and child enjoying themselves on a beach . -A man , woman , and child enjoying themselves on a beach . -A man , woman , and child enjoying themselves on a beach . -People waiting to get on a train or just getting off . -People waiting to get on a train or just getting off . -People waiting to get on a train or just getting off . -A couple playing with a little boy on the beach . -A couple playing with a little boy on the beach . -A couple playing with a little boy on the beach . -A couple play in the tide with their young son . -A couple play in the tide with their young son . -A couple play in the tide with their young son . -A man and a woman cross the street in front of a pizza and gyro restaurant . -A man and a woman cross the street in front of a pizza and gyro restaurant . -A man and a woman cross the street in front of a pizza and gyro restaurant . -A woman in a green jacket and hood over her head looking towards a valley . -A woman in a green jacket and hood over her head looking towards a valley . -A woman in a green jacket and hood over her head looking towards a valley . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Woman in white in foreground and a man slightly behind walking with a sign for John 's Pizza and Gyro in the background . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -Two adults , one female in white , with shades and one male , gray clothes , walking across a street , away from a eatery with a blurred image of a dark colored red shirted person in the foreground . -A woman wearing all white and eating , walks next to a man holding a briefcase . -A woman wearing all white and eating , walks next to a man holding a briefcase . -A woman wearing all white and eating , walks next to a man holding a briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . -A woman is walking across the street eating a banana , while a man is following with his briefcase . diff --git a/tutorials/fastnlp_advanced_tutorial/tutorial_sample_dataset.csv b/tutorials/fastnlp_advanced_tutorial/tutorial_sample_dataset.csv deleted file mode 100644 index e5c0a74f..00000000 --- a/tutorials/fastnlp_advanced_tutorial/tutorial_sample_dataset.csv +++ /dev/null @@ -1,77 +0,0 @@ -A series of escapades demonstrating the adage that what is good for the goose is also good for the gander , some of which occasionally amuses but none of which amounts to much of a story . 1 -This quiet , introspective and entertaining independent is worth seeking . 4 -Even fans of Ismail Merchant 's work , I suspect , would have a hard time sitting through this one . 1 -A positively thrilling combination of ethnography and all the intrigue , betrayal , deceit and murder of a Shakespearean tragedy or a juicy soap opera . 3 -Aggressive self-glorification and a manipulative whitewash . 1 -A comedy-drama of nearly epic proportions rooted in a sincere performance by the title character undergoing midlife crisis . 4 -Narratively , Trouble Every Day is a plodding mess . 1 -The Importance of Being Earnest , so thick with wit it plays like a reading from Bartlett 's Familiar Quotations 3 -But it does n't leave you with much . 1 -You could hate it for the same reason . 1 -There 's little to recommend Snow Dogs , unless one considers cliched dialogue and perverse escapism a source of high hilarity . 1 -Kung Pow is Oedekerk 's realization of his childhood dream to be in a martial-arts flick , and proves that sometimes the dreams of youth should remain just that . 1 -The performances are an absolute joy . 4 -Fresnadillo has something serious to say about the ways in which extravagant chance can distort our perspective and throw us off the path of good sense . 3 -I still like Moonlight Mile , better judgment be damned . 3 -A welcome relief from baseball movies that try too hard to be mythic , this one is a sweet and modest and ultimately winning story . 3 -a bilingual charmer , just like the woman who inspired it 3 -Like a less dizzily gorgeous companion to Mr. Wong 's In the Mood for Love -- very much a Hong Kong movie despite its mainland setting . 2 -As inept as big-screen remakes of The Avengers and The Wild Wild West . 1 -It 's everything you 'd expect -- but nothing more . 2 -Best indie of the year , so far . 4 -Hatfield and Hicks make the oddest of couples , and in this sense the movie becomes a study of the gambles of the publishing world , offering a case study that exists apart from all the movie 's political ramifications . 3 -It 's like going to a house party and watching the host defend himself against a frothing ex-girlfriend . 1 -That the Chuck Norris `` grenade gag '' occurs about 7 times during Windtalkers is a good indication of how serious-minded the film is . 2 -The plot is romantic comedy boilerplate from start to finish . 2 -It arrives with an impeccable pedigree , mongrel pep , and almost indecipherable plot complications . 2 -A film that clearly means to preach exclusively to the converted . 2 -While The Importance of Being Earnest offers opportunities for occasional smiles and chuckles , it does n't give us a reason to be in the theater beyond Wilde 's wit and the actors ' performances . 1 -The latest vapid actor 's exercise to appropriate the structure of Arthur Schnitzler 's Reigen . 1 -More vaudeville show than well-constructed narrative , but on those terms it 's inoffensive and actually rather sweet . 2 -Nothing more than a run-of-the-mill action flick . 2 -Hampered -- no , paralyzed -- by a self-indulgent script ... that aims for poetry and ends up sounding like satire . 0 -Ice Age is the first computer-generated feature cartoon to feel like other movies , and that makes for some glacial pacing early on . 2 -There 's very little sense to what 's going on here , but the makers serve up the cliches with considerable dash . 2 -Cattaneo should have followed the runaway success of his first film , The Full Monty , with something different . 2 -They 're the unnamed , easily substitutable forces that serve as whatever terror the heroes of horror movies try to avoid . 1 -It almost feels as if the movie is more interested in entertaining itself than in amusing us . 1 -The movie 's progression into rambling incoherence gives new meaning to the phrase ` fatal script error . ' 0 -I still like Moonlight Mile , better judgment be damned . 3 -A welcome relief from baseball movies that try too hard to be mythic , this one is a sweet and modest and ultimately winning story . 3 -a bilingual charmer , just like the woman who inspired it 3 -Like a less dizzily gorgeous companion to Mr. Wong 's In the Mood for Love -- very much a Hong Kong movie despite its mainland setting . 2 -As inept as big-screen remakes of The Avengers and The Wild Wild West . 1 -It 's everything you 'd expect -- but nothing more . 2 -Best indie of the year , so far . 4 -Hatfield and Hicks make the oddest of couples , and in this sense the movie becomes a study of the gambles of the publishing world , offering a case study that exists apart from all the movie 's political ramifications . 3 -It 's like going to a house party and watching the host defend himself against a frothing ex-girlfriend . 1 -That the Chuck Norris `` grenade gag '' occurs about 7 times during Windtalkers is a good indication of how serious-minded the film is . 2 -The plot is romantic comedy boilerplate from start to finish . 2 -It arrives with an impeccable pedigree , mongrel pep , and almost indecipherable plot complications . 2 -A film that clearly means to preach exclusively to the converted . 2 -I still like Moonlight Mile , better judgment be damned . 3 -A welcome relief from baseball movies that try too hard to be mythic , this one is a sweet and modest and ultimately winning story . 3 -a bilingual charmer , just like the woman who inspired it 3 -Like a less dizzily gorgeous companion to Mr. Wong 's In the Mood for Love -- very much a Hong Kong movie despite its mainland setting . 2 -As inept as big-screen remakes of The Avengers and The Wild Wild West . 1 -It 's everything you 'd expect -- but nothing more . 2 -Best indie of the year , so far . 4 -Hatfield and Hicks make the oddest of couples , and in this sense the movie becomes a study of the gambles of the publishing world , offering a case study that exists apart from all the movie 's political ramifications . 3 -It 's like going to a house party and watching the host defend himself against a frothing ex-girlfriend . 1 -That the Chuck Norris `` grenade gag '' occurs about 7 times during Windtalkers is a good indication of how serious-minded the film is . 2 -The plot is romantic comedy boilerplate from start to finish . 2 -It arrives with an impeccable pedigree , mongrel pep , and almost indecipherable plot complications . 2 -A film that clearly means to preach exclusively to the converted . 2 -I still like Moonlight Mile , better judgment be damned . 3 -A welcome relief from baseball movies that try too hard to be mythic , this one is a sweet and modest and ultimately winning story . 3 -a bilingual charmer , just like the woman who inspired it 3 -Like a less dizzily gorgeous companion to Mr. Wong 's In the Mood for Love -- very much a Hong Kong movie despite its mainland setting . 2 -As inept as big-screen remakes of The Avengers and The Wild Wild West . 1 -It 's everything you 'd expect -- but nothing more . 2 -Best indie of the year , so far . 4 -Hatfield and Hicks make the oddest of couples , and in this sense the movie becomes a study of the gambles of the publishing world , offering a case study that exists apart from all the movie 's political ramifications . 3 -It 's like going to a house party and watching the host defend himself against a frothing ex-girlfriend . 1 -That the Chuck Norris `` grenade gag '' occurs about 7 times during Windtalkers is a good indication of how serious-minded the film is . 2 -The plot is romantic comedy boilerplate from start to finish . 2 -It arrives with an impeccable pedigree , mongrel pep , and almost indecipherable plot complications . 2 -A film that clearly means to preach exclusively to the converted . 2 \ No newline at end of file diff --git a/tutorials/fastnlp_advanced_tutorial/vocab.txt b/tutorials/fastnlp_advanced_tutorial/vocab.txt deleted file mode 100644 index fb140275..00000000 --- a/tutorials/fastnlp_advanced_tutorial/vocab.txt +++ /dev/null @@ -1,30522 +0,0 @@ -[PAD] -[unused0] -[unused1] -[unused2] -[unused3] -[unused4] -[unused5] -[unused6] -[unused7] -[unused8] -[unused9] -[unused10] -[unused11] -[unused12] -[unused13] -[unused14] -[unused15] -[unused16] -[unused17] -[unused18] -[unused19] -[unused20] -[unused21] -[unused22] -[unused23] -[unused24] -[unused25] -[unused26] -[unused27] -[unused28] -[unused29] -[unused30] -[unused31] -[unused32] -[unused33] -[unused34] -[unused35] -[unused36] -[unused37] -[unused38] -[unused39] -[unused40] -[unused41] -[unused42] -[unused43] -[unused44] -[unused45] -[unused46] -[unused47] -[unused48] -[unused49] -[unused50] -[unused51] -[unused52] -[unused53] -[unused54] -[unused55] -[unused56] -[unused57] -[unused58] -[unused59] -[unused60] -[unused61] -[unused62] -[unused63] -[unused64] -[unused65] -[unused66] -[unused67] -[unused68] -[unused69] -[unused70] -[unused71] -[unused72] -[unused73] -[unused74] -[unused75] -[unused76] -[unused77] -[unused78] -[unused79] -[unused80] -[unused81] -[unused82] -[unused83] -[unused84] -[unused85] -[unused86] -[unused87] -[unused88] -[unused89] -[unused90] -[unused91] -[unused92] -[unused93] -[unused94] -[unused95] -[unused96] -[unused97] -[unused98] -[UNK] -[CLS] -[SEP] -[MASK] -[unused99] -[unused100] -[unused101] -[unused102] -[unused103] -[unused104] -[unused105] -[unused106] -[unused107] -[unused108] -[unused109] -[unused110] -[unused111] -[unused112] -[unused113] -[unused114] -[unused115] -[unused116] -[unused117] -[unused118] -[unused119] -[unused120] -[unused121] -[unused122] -[unused123] -[unused124] -[unused125] -[unused126] -[unused127] -[unused128] -[unused129] -[unused130] -[unused131] -[unused132] -[unused133] -[unused134] -[unused135] -[unused136] -[unused137] -[unused138] -[unused139] -[unused140] -[unused141] -[unused142] -[unused143] -[unused144] -[unused145] -[unused146] -[unused147] -[unused148] -[unused149] -[unused150] -[unused151] -[unused152] -[unused153] -[unused154] -[unused155] -[unused156] -[unused157] -[unused158] -[unused159] -[unused160] -[unused161] -[unused162] -[unused163] -[unused164] -[unused165] -[unused166] -[unused167] -[unused168] -[unused169] -[unused170] -[unused171] -[unused172] -[unused173] -[unused174] -[unused175] -[unused176] -[unused177] -[unused178] -[unused179] -[unused180] -[unused181] -[unused182] -[unused183] -[unused184] -[unused185] -[unused186] -[unused187] -[unused188] -[unused189] -[unused190] -[unused191] -[unused192] -[unused193] -[unused194] -[unused195] -[unused196] -[unused197] -[unused198] -[unused199] -[unused200] -[unused201] -[unused202] -[unused203] -[unused204] -[unused205] -[unused206] -[unused207] -[unused208] -[unused209] -[unused210] -[unused211] -[unused212] -[unused213] -[unused214] -[unused215] -[unused216] -[unused217] -[unused218] -[unused219] -[unused220] -[unused221] -[unused222] -[unused223] -[unused224] -[unused225] -[unused226] -[unused227] -[unused228] -[unused229] -[unused230] -[unused231] -[unused232] -[unused233] -[unused234] -[unused235] -[unused236] -[unused237] -[unused238] -[unused239] -[unused240] -[unused241] -[unused242] -[unused243] -[unused244] -[unused245] -[unused246] -[unused247] -[unused248] -[unused249] -[unused250] -[unused251] -[unused252] -[unused253] -[unused254] -[unused255] -[unused256] -[unused257] -[unused258] -[unused259] -[unused260] -[unused261] -[unused262] -[unused263] -[unused264] -[unused265] -[unused266] -[unused267] -[unused268] -[unused269] -[unused270] -[unused271] -[unused272] -[unused273] -[unused274] -[unused275] -[unused276] -[unused277] -[unused278] -[unused279] -[unused280] -[unused281] -[unused282] -[unused283] -[unused284] -[unused285] -[unused286] -[unused287] -[unused288] -[unused289] -[unused290] -[unused291] -[unused292] -[unused293] -[unused294] -[unused295] -[unused296] -[unused297] -[unused298] -[unused299] -[unused300] -[unused301] -[unused302] -[unused303] -[unused304] -[unused305] -[unused306] -[unused307] -[unused308] -[unused309] -[unused310] -[unused311] -[unused312] -[unused313] -[unused314] -[unused315] -[unused316] -[unused317] -[unused318] -[unused319] -[unused320] -[unused321] -[unused322] -[unused323] -[unused324] -[unused325] -[unused326] -[unused327] -[unused328] -[unused329] -[unused330] -[unused331] -[unused332] -[unused333] -[unused334] -[unused335] -[unused336] -[unused337] -[unused338] -[unused339] -[unused340] -[unused341] -[unused342] -[unused343] -[unused344] -[unused345] -[unused346] -[unused347] -[unused348] -[unused349] -[unused350] -[unused351] -[unused352] -[unused353] -[unused354] -[unused355] -[unused356] -[unused357] -[unused358] -[unused359] -[unused360] -[unused361] -[unused362] -[unused363] -[unused364] -[unused365] -[unused366] -[unused367] -[unused368] -[unused369] -[unused370] -[unused371] -[unused372] -[unused373] -[unused374] -[unused375] -[unused376] -[unused377] -[unused378] -[unused379] -[unused380] -[unused381] -[unused382] -[unused383] -[unused384] -[unused385] -[unused386] -[unused387] -[unused388] -[unused389] -[unused390] -[unused391] -[unused392] -[unused393] -[unused394] -[unused395] -[unused396] -[unused397] -[unused398] -[unused399] -[unused400] -[unused401] -[unused402] -[unused403] -[unused404] -[unused405] -[unused406] -[unused407] -[unused408] -[unused409] -[unused410] -[unused411] -[unused412] -[unused413] -[unused414] -[unused415] -[unused416] -[unused417] -[unused418] -[unused419] -[unused420] -[unused421] -[unused422] -[unused423] -[unused424] -[unused425] -[unused426] -[unused427] -[unused428] -[unused429] -[unused430] -[unused431] -[unused432] -[unused433] -[unused434] -[unused435] -[unused436] -[unused437] -[unused438] -[unused439] -[unused440] -[unused441] -[unused442] -[unused443] -[unused444] -[unused445] -[unused446] -[unused447] -[unused448] -[unused449] -[unused450] -[unused451] -[unused452] -[unused453] -[unused454] -[unused455] -[unused456] -[unused457] -[unused458] -[unused459] -[unused460] -[unused461] -[unused462] -[unused463] -[unused464] -[unused465] -[unused466] -[unused467] -[unused468] -[unused469] -[unused470] -[unused471] -[unused472] -[unused473] -[unused474] -[unused475] -[unused476] -[unused477] -[unused478] -[unused479] -[unused480] -[unused481] -[unused482] -[unused483] -[unused484] -[unused485] -[unused486] -[unused487] -[unused488] -[unused489] -[unused490] -[unused491] -[unused492] -[unused493] -[unused494] -[unused495] -[unused496] -[unused497] -[unused498] -[unused499] -[unused500] -[unused501] -[unused502] -[unused503] -[unused504] -[unused505] -[unused506] -[unused507] -[unused508] -[unused509] -[unused510] -[unused511] -[unused512] -[unused513] -[unused514] -[unused515] -[unused516] -[unused517] -[unused518] -[unused519] -[unused520] -[unused521] -[unused522] -[unused523] -[unused524] -[unused525] -[unused526] -[unused527] -[unused528] -[unused529] -[unused530] -[unused531] -[unused532] -[unused533] -[unused534] -[unused535] -[unused536] -[unused537] -[unused538] -[unused539] -[unused540] -[unused541] -[unused542] -[unused543] -[unused544] -[unused545] -[unused546] -[unused547] -[unused548] -[unused549] -[unused550] -[unused551] -[unused552] -[unused553] -[unused554] -[unused555] -[unused556] -[unused557] -[unused558] -[unused559] -[unused560] -[unused561] -[unused562] -[unused563] -[unused564] -[unused565] -[unused566] -[unused567] -[unused568] -[unused569] -[unused570] -[unused571] -[unused572] -[unused573] -[unused574] -[unused575] -[unused576] -[unused577] -[unused578] -[unused579] -[unused580] -[unused581] -[unused582] -[unused583] -[unused584] -[unused585] -[unused586] -[unused587] -[unused588] -[unused589] -[unused590] -[unused591] -[unused592] -[unused593] -[unused594] -[unused595] -[unused596] -[unused597] -[unused598] -[unused599] -[unused600] -[unused601] -[unused602] -[unused603] -[unused604] -[unused605] -[unused606] -[unused607] -[unused608] -[unused609] -[unused610] -[unused611] -[unused612] -[unused613] -[unused614] -[unused615] -[unused616] -[unused617] -[unused618] -[unused619] -[unused620] -[unused621] -[unused622] -[unused623] -[unused624] -[unused625] -[unused626] -[unused627] -[unused628] -[unused629] -[unused630] -[unused631] -[unused632] -[unused633] -[unused634] -[unused635] -[unused636] -[unused637] -[unused638] -[unused639] -[unused640] -[unused641] -[unused642] -[unused643] -[unused644] -[unused645] -[unused646] -[unused647] -[unused648] -[unused649] -[unused650] -[unused651] -[unused652] -[unused653] -[unused654] -[unused655] -[unused656] -[unused657] -[unused658] -[unused659] -[unused660] -[unused661] -[unused662] -[unused663] -[unused664] -[unused665] -[unused666] -[unused667] -[unused668] -[unused669] -[unused670] -[unused671] -[unused672] -[unused673] -[unused674] -[unused675] -[unused676] -[unused677] -[unused678] -[unused679] -[unused680] -[unused681] -[unused682] -[unused683] -[unused684] -[unused685] -[unused686] -[unused687] -[unused688] -[unused689] -[unused690] -[unused691] -[unused692] -[unused693] -[unused694] -[unused695] -[unused696] -[unused697] -[unused698] -[unused699] -[unused700] -[unused701] -[unused702] -[unused703] -[unused704] -[unused705] -[unused706] -[unused707] -[unused708] -[unused709] -[unused710] -[unused711] -[unused712] -[unused713] -[unused714] -[unused715] -[unused716] -[unused717] -[unused718] -[unused719] -[unused720] -[unused721] -[unused722] -[unused723] -[unused724] -[unused725] -[unused726] -[unused727] -[unused728] -[unused729] -[unused730] -[unused731] -[unused732] -[unused733] -[unused734] -[unused735] -[unused736] -[unused737] -[unused738] -[unused739] -[unused740] -[unused741] -[unused742] -[unused743] -[unused744] -[unused745] -[unused746] -[unused747] -[unused748] -[unused749] -[unused750] -[unused751] -[unused752] -[unused753] -[unused754] -[unused755] -[unused756] -[unused757] -[unused758] -[unused759] -[unused760] -[unused761] -[unused762] -[unused763] -[unused764] -[unused765] -[unused766] -[unused767] -[unused768] -[unused769] -[unused770] -[unused771] -[unused772] -[unused773] -[unused774] -[unused775] -[unused776] -[unused777] -[unused778] -[unused779] -[unused780] -[unused781] -[unused782] -[unused783] -[unused784] -[unused785] -[unused786] -[unused787] -[unused788] -[unused789] -[unused790] -[unused791] -[unused792] -[unused793] -[unused794] -[unused795] -[unused796] -[unused797] -[unused798] -[unused799] -[unused800] -[unused801] -[unused802] -[unused803] -[unused804] -[unused805] -[unused806] -[unused807] -[unused808] -[unused809] -[unused810] -[unused811] -[unused812] -[unused813] -[unused814] -[unused815] -[unused816] -[unused817] -[unused818] -[unused819] -[unused820] -[unused821] -[unused822] -[unused823] -[unused824] -[unused825] -[unused826] -[unused827] -[unused828] -[unused829] -[unused830] -[unused831] -[unused832] -[unused833] -[unused834] -[unused835] -[unused836] -[unused837] -[unused838] -[unused839] -[unused840] -[unused841] -[unused842] -[unused843] -[unused844] -[unused845] -[unused846] -[unused847] -[unused848] -[unused849] -[unused850] -[unused851] -[unused852] -[unused853] -[unused854] -[unused855] -[unused856] -[unused857] -[unused858] -[unused859] -[unused860] -[unused861] -[unused862] -[unused863] -[unused864] -[unused865] -[unused866] -[unused867] -[unused868] -[unused869] -[unused870] -[unused871] -[unused872] -[unused873] -[unused874] -[unused875] -[unused876] -[unused877] -[unused878] -[unused879] -[unused880] -[unused881] -[unused882] -[unused883] -[unused884] -[unused885] -[unused886] -[unused887] -[unused888] -[unused889] -[unused890] -[unused891] -[unused892] -[unused893] -[unused894] -[unused895] -[unused896] -[unused897] -[unused898] -[unused899] -[unused900] -[unused901] -[unused902] -[unused903] -[unused904] -[unused905] -[unused906] -[unused907] -[unused908] -[unused909] -[unused910] -[unused911] -[unused912] -[unused913] -[unused914] -[unused915] -[unused916] -[unused917] -[unused918] -[unused919] -[unused920] -[unused921] -[unused922] -[unused923] -[unused924] -[unused925] -[unused926] -[unused927] -[unused928] -[unused929] -[unused930] -[unused931] -[unused932] -[unused933] -[unused934] -[unused935] -[unused936] -[unused937] -[unused938] -[unused939] -[unused940] -[unused941] -[unused942] -[unused943] -[unused944] -[unused945] -[unused946] -[unused947] -[unused948] -[unused949] -[unused950] -[unused951] -[unused952] -[unused953] -[unused954] -[unused955] -[unused956] -[unused957] -[unused958] -[unused959] -[unused960] -[unused961] -[unused962] -[unused963] -[unused964] -[unused965] -[unused966] -[unused967] -[unused968] -[unused969] -[unused970] -[unused971] -[unused972] -[unused973] -[unused974] -[unused975] -[unused976] -[unused977] -[unused978] -[unused979] -[unused980] -[unused981] -[unused982] -[unused983] -[unused984] -[unused985] -[unused986] -[unused987] -[unused988] -[unused989] -[unused990] -[unused991] -[unused992] -[unused993] -! -" -# -$ -% -& -' -( -) -* -+ -, -- -. -/ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -: -; -< -= -> -? -@ -[ -\ -] -^ -_ -` -a -b -c -d -e -f -g -h -i -j -k -l -m -n -o -p -q -r -s -t -u -v -w -x -y -z -{ -| -} -~ -¡ -¢ -£ -¤ -¥ -¦ -§ -¨ -© -ª -« -¬ -® -° -± -² -³ -´ -µ -¶ -· -¹ -º -» -¼ -½ -¾ -¿ -× -ß -æ -ð -÷ -ø -þ -đ -ħ -ı -ł -ŋ -œ -ƒ -ɐ -ɑ -ɒ -ɔ -ɕ -ə -ɛ -ɡ -ɣ -ɨ -ɪ -ɫ -ɬ -ɯ -ɲ -ɴ -ɹ -ɾ -ʀ -ʁ -ʂ -ʃ -ʉ -ʊ -ʋ -ʌ -ʎ -ʐ -ʑ -ʒ -ʔ -ʰ -ʲ -ʳ -ʷ -ʸ -ʻ -ʼ -ʾ -ʿ -ˈ -ː -ˡ -ˢ -ˣ -ˤ -α -β -γ -δ -ε -ζ -η -θ -ι -κ -λ -μ -ν -ξ -ο -π -ρ -ς -σ -τ -υ -φ -χ -ψ -ω -а -б -в -г -д -е -ж -з -и -к -л -м -н -о -п -р -с -т -у -ф -х -ц -ч -ш -щ -ъ -ы -ь -э -ю -я -ђ -є -і -ј -љ -њ -ћ -ӏ -ա -բ -գ -դ -ե -թ -ի -լ -կ -հ -մ -յ -ն -ո -պ -ս -վ -տ -ր -ւ -ք -־ -א -ב -ג -ד -ה -ו -ז -ח -ט -י -ך -כ -ל -ם -מ -ן -נ -ס -ע -ף -פ -ץ -צ -ק -ר -ש -ת -، -ء -ا -ب -ة -ت -ث -ج -ح -خ -د -ذ -ر -ز -س -ش -ص -ض -ط -ظ -ع -غ -ـ -ف -ق -ك -ل -م -ن -ه -و -ى -ي -ٹ -پ -چ -ک -گ -ں -ھ -ہ -ی -ے -अ -आ -उ -ए -क -ख -ग -च -ज -ट -ड -ण -त -थ -द -ध -न -प -ब -भ -म -य -र -ल -व -श -ष -स -ह -ा -ि -ी -ो -। -॥ -ং -অ -আ -ই -উ -এ -ও -ক -খ -গ -চ -ছ -জ -ট -ড -ণ -ত -থ -দ -ধ -ন -প -ব -ভ -ম -য -র -ল -শ -ষ -স -হ -া -ি -ী -ে -க -ச -ட -த -ந -ன -ப -ம -ய -ர -ல -ள -வ -ா -ி -ு -ே -ை -ನ -ರ -ಾ -ක -ය -ර -ල -ව -ා -ก -ง -ต -ท -น -พ -ม -ย -ร -ล -ว -ส -อ -า -เ -་ -། -ག -ང -ད -ན -པ -བ -མ -འ -ར -ལ -ས -မ -ა -ბ -გ -დ -ე -ვ -თ -ი -კ -ლ -მ -ნ -ო -რ -ს -ტ -უ -ᄀ -ᄂ -ᄃ -ᄅ -ᄆ -ᄇ -ᄉ -ᄊ -ᄋ -ᄌ -ᄎ -ᄏ -ᄐ -ᄑ -ᄒ -ᅡ -ᅢ -ᅥ -ᅦ -ᅧ -ᅩ -ᅪ -ᅭ -ᅮ -ᅯ -ᅲ -ᅳ -ᅴ -ᅵ -ᆨ -ᆫ -ᆯ -ᆷ -ᆸ -ᆼ -ᴬ -ᴮ -ᴰ -ᴵ -ᴺ -ᵀ -ᵃ -ᵇ -ᵈ -ᵉ -ᵍ -ᵏ -ᵐ -ᵒ -ᵖ -ᵗ -ᵘ -ᵢ -ᵣ -ᵤ -ᵥ -ᶜ -ᶠ -‐ -‑ -‒ -– -— -― -‖ -‘ -’ -‚ -“ -” -„ -† -‡ -• -… -‰ -′ -″ -› -‿ -⁄ -⁰ -ⁱ -⁴ -⁵ -⁶ -⁷ -⁸ -⁹ -⁺ -⁻ -ⁿ -₀ -₁ -₂ -₃ -₄ -₅ -₆ -₇ -₈ -₉ -₊ -₍ -₎ -ₐ -ₑ -ₒ -ₓ -ₕ -ₖ -ₗ -ₘ -ₙ -ₚ -ₛ -ₜ -₤ -₩ -€ -₱ -₹ -ℓ -№ -ℝ -™ -⅓ -⅔ -← -↑ -→ -↓ -↔ -↦ -⇄ -⇌ -⇒ -∂ -∅ -∆ -∇ -∈ -− -∗ -∘ -√ -∞ -∧ -∨ -∩ -∪ -≈ -≡ -≤ -≥ -⊂ -⊆ -⊕ -⊗ -⋅ -─ -│ -■ -▪ -● -★ -☆ -☉ -♠ -♣ -♥ -♦ -♭ -♯ -⟨ -⟩ -ⱼ -⺩ -⺼ -⽥ -、 -。 -〈 -〉 -《 -》 -「 -」 -『 -』 -〜 -あ -い -う -え -お -か -き -く -け -こ -さ -し -す -せ -そ -た -ち -っ -つ -て -と -な -に -ぬ -ね -の -は -ひ -ふ -へ -ほ -ま -み -む -め -も -や -ゆ -よ -ら -り -る -れ -ろ -を -ん -ァ -ア -ィ -イ -ウ -ェ -エ -オ -カ -キ -ク -ケ -コ -サ -シ -ス -セ -タ -チ -ッ -ツ -テ -ト -ナ -ニ -ノ -ハ -ヒ -フ -ヘ -ホ -マ -ミ -ム -メ -モ -ャ -ュ -ョ -ラ -リ -ル -レ -ロ -ワ -ン -・ -ー -一 -三 -上 -下 -不 -世 -中 -主 -久 -之 -也 -事 -二 -五 -井 -京 -人 -亻 -仁 -介 -代 -仮 -伊 -会 -佐 -侍 -保 -信 -健 -元 -光 -八 -公 -内 -出 -分 -前 -劉 -力 -加 -勝 -北 -区 -十 -千 -南 -博 -原 -口 -古 -史 -司 -合 -吉 -同 -名 -和 -囗 -四 -国 -國 -土 -地 -坂 -城 -堂 -場 -士 -夏 -外 -大 -天 -太 -夫 -奈 -女 -子 -学 -宀 -宇 -安 -宗 -定 -宣 -宮 -家 -宿 -寺 -將 -小 -尚 -山 -岡 -島 -崎 -川 -州 -巿 -帝 -平 -年 -幸 -广 -弘 -張 -彳 -後 -御 -德 -心 -忄 -志 -忠 -愛 -成 -我 -戦 -戸 -手 -扌 -政 -文 -新 -方 -日 -明 -星 -春 -昭 -智 -曲 -書 -月 -有 -朝 -木 -本 -李 -村 -東 -松 -林 -森 -楊 -樹 -橋 -歌 -止 -正 -武 -比 -氏 -民 -水 -氵 -氷 -永 -江 -沢 -河 -治 -法 -海 -清 -漢 -瀬 -火 -版 -犬 -王 -生 -田 -男 -疒 -発 -白 -的 -皇 -目 -相 -省 -真 -石 -示 -社 -神 -福 -禾 -秀 -秋 -空 -立 -章 -竹 -糹 -美 -義 -耳 -良 -艹 -花 -英 -華 -葉 -藤 -行 -街 -西 -見 -訁 -語 -谷 -貝 -貴 -車 -軍 -辶 -道 -郎 -郡 -部 -都 -里 -野 -金 -鈴 -镇 -長 -門 -間 -阝 -阿 -陳 -陽 -雄 -青 -面 -風 -食 -香 -馬 -高 -龍 -龸 -fi -fl -! -( -) -, -- -. -/ -: -? -~ -the -of -and -in -to -was -he -is -as -for -on -with -that -it -his -by -at -from -her -##s -she -you -had -an -were -but -be -this -are -not -my -they -one -which -or -have -him -me -first -all -also -their -has -up -who -out -been -when -after -there -into -new -two -its -##a -time -would -no -what -about -said -we -over -then -other -so -more -##e -can -if -like -back -them -only -some -could -##i -where -just -##ing -during -before -##n -do -##o -made -school -through -than -now -years -most -world -may -between -down -well -three -##d -year -while -will -##ed -##r -##y -later -##t -city -under -around -did -such -being -used -state -people -part -know -against -your -many -second -university -both -national -##er -these -don -known -off -way -until -re -how -even -get -head -... -didn -##ly -team -american -because -de -##l -born -united -film -since -still -long -work -south -us -became -any -high -again -day -family -see -right -man -eyes -house -season -war -states -including -took -life -north -same -each -called -name -much -place -however -go -four -group -another -found -won -area -here -going -10 -away -series -left -home -music -best -make -hand -number -company -several -never -last -john -000 -very -album -take -end -good -too -following -released -game -played -little -began -district -##m -old -want -those -side -held -own -early -county -ll -league -use -west -##u -face -think -##es -2010 -government -##h -march -came -small -general -town -june -##on -line -based -something -##k -september -thought -looked -along -international -2011 -air -july -club -went -january -october -our -august -april -york -12 -few -2012 -2008 -east -show -member -college -2009 -father -public -##us -come -men -five -set -station -church -##c -next -former -november -room -party -located -december -2013 -age -got -2007 -##g -system -let -love -2006 -though -every -2014 -look -song -water -century -without -body -black -night -within -great -women -single -ve -building -large -population -river -named -band -white -started -##an -once -15 -20 -should -18 -2015 -service -top -built -british -open -death -king -moved -local -times -children -february -book -why -11 -door -need -president -order -final -road -wasn -although -due -major -died -village -third -knew -2016 -asked -turned -st -wanted -say -##p -together -received -main -son -served -different -##en -behind -himself -felt -members -power -football -law -voice -play -##in -near -park -history -30 -having -2005 -16 -##man -saw -mother -##al -army -point -front -help -english -street -art -late -hands -games -award -##ia -young -14 -put -published -country -division -across -told -13 -often -ever -french -london -center -six -red -2017 -led -days -include -light -25 -find -tell -among -species -really -according -central -half -2004 -form -original -gave -office -making -enough -lost -full -opened -must -included -live -given -german -player -run -business -woman -community -cup -might -million -land -2000 -court -development -17 -short -round -ii -km -seen -class -story -always -become -sure -research -almost -director -council -la -##2 -career -things -using -island -##z -couldn -car -##is -24 -close -force -##1 -better -free -support -control -field -students -2003 -education -married -##b -nothing -worked -others -record -big -inside -level -anything -continued -give -james -##3 -military -established -non -returned -feel -does -title -written -thing -feet -william -far -co -association -hard -already -2002 -##ra -championship -human -western -100 -##na -department -hall -role -various -production -21 -19 -heart -2001 -living -fire -version -##ers -##f -television -royal -##4 -produced -working -act -case -society -region -present -radio -period -looking -least -total -keep -england -wife -program -per -brother -mind -special -22 -##le -am -works -soon -##6 -political -george -services -taken -created -##7 -further -able -reached -david -union -joined -upon -done -important -social -information -either -##ic -##x -appeared -position -ground -lead -rock -dark -election -23 -board -france -hair -course -arms -site -police -girl -instead -real -sound -##v -words -moment -##te -someone -##8 -summer -project -announced -san -less -wrote -past -followed -##5 -blue -founded -al -finally -india -taking -records -america -##ne -1999 -design -considered -northern -god -stop -battle -toward -european -outside -described -track -today -playing -language -28 -call -26 -heard -professional -low -australia -miles -california -win -yet -green -##ie -trying -blood -##ton -southern -science -maybe -everything -match -square -27 -mouth -video -race -recorded -leave -above -##9 -daughter -points -space -1998 -museum -change -middle -common -##0 -move -tv -post -##ta -lake -seven -tried -elected -closed -ten -paul -minister -##th -months -start -chief -return -canada -person -sea -release -similar -modern -brought -rest -hit -formed -mr -##la -1997 -floor -event -doing -thomas -1996 -robert -care -killed -training -star -week -needed -turn -finished -railway -rather -news -health -sent -example -ran -term -michael -coming -currently -yes -forces -despite -gold -areas -50 -stage -fact -29 -dead -says -popular -2018 -originally -germany -probably -developed -result -pulled -friend -stood -money -running -mi -signed -word -songs -child -eventually -met -tour -average -teams -minutes -festival -current -deep -kind -1995 -decided -usually -eastern -seemed -##ness -episode -bed -added -table -indian -private -charles -route -available -idea -throughout -centre -addition -appointed -style -1994 -books -eight -construction -press -mean -wall -friends -remained -schools -study -##ch -##um -institute -oh -chinese -sometimes -events -possible -1992 -australian -type -brown -forward -talk -process -food -debut -seat -performance -committee -features -character -arts -herself -else -lot -strong -russian -range -hours -peter -arm -##da -morning -dr -sold -##ry -quickly -directed -1993 -guitar -china -##w -31 -list -##ma -performed -media -uk -players -smile -##rs -myself -40 -placed -coach -province -towards -wouldn -leading -whole -boy -official -designed -grand -census -##el -europe -attack -japanese -henry -1991 -##re -##os -cross -getting -alone -action -lower -network -wide -washington -japan -1990 -hospital -believe -changed -sister -##ar -hold -gone -sir -hadn -ship -##ka -studies -academy -shot -rights -below -base -bad -involved -kept -largest -##ist -bank -future -especially -beginning -mark -movement -section -female -magazine -plan -professor -lord -longer -##ian -sat -walked -hill -actually -civil -energy -model -families -size -thus -aircraft -completed -includes -data -captain -##or -fight -vocals -featured -richard -bridge -fourth -1989 -officer -stone -hear -##ism -means -medical -groups -management -self -lips -competition -entire -lived -technology -leaving -federal -tournament -bit -passed -hot -independent -awards -kingdom -mary -spent -fine -doesn -reported -##ling -jack -fall -raised -itself -stay -true -studio -1988 -sports -replaced -paris -systems -saint -leader -theatre -whose -market -capital -parents -spanish -canadian -earth -##ity -cut -degree -writing -bay -christian -awarded -natural -higher -bill -##as -coast -provided -previous -senior -ft -valley -organization -stopped -onto -countries -parts -conference -queen -security -interest -saying -allowed -master -earlier -phone -matter -smith -winning -try -happened -moving -campaign -los -##ley -breath -nearly -mid -1987 -certain -girls -date -italian -african -standing -fell -artist -##ted -shows -deal -mine -industry -1986 -##ng -everyone -republic -provide -collection -library -student -##ville -primary -owned -older -via -heavy -1st -makes -##able -attention -anyone -africa -##ri -stated -length -ended -fingers -command -staff -skin -foreign -opening -governor -okay -medal -kill -sun -cover -job -1985 -introduced -chest -hell -feeling -##ies -success -meet -reason -standard -meeting -novel -1984 -trade -source -buildings -##land -rose -guy -goal -##ur -chapter -native -husband -previously -unit -limited -entered -weeks -producer -operations -mountain -takes -covered -forced -related -roman -complete -successful -key -texas -cold -##ya -channel -1980 -traditional -films -dance -clear -approximately -500 -nine -van -prince -question -active -tracks -ireland -regional -silver -author -personal -sense -operation -##ine -economic -1983 -holding -twenty -isbn -additional -speed -hour -edition -regular -historic -places -whom -shook -movie -km² -secretary -prior -report -chicago -read -foundation -view -engine -scored -1982 -units -ask -airport -property -ready -immediately -lady -month -listed -contract -##de -manager -themselves -lines -##ki -navy -writer -meant -##ts -runs -##ro -practice -championships -singer -glass -commission -required -forest -starting -culture -generally -giving -access -attended -test -couple -stand -catholic -martin -caught -executive -##less -eye -##ey -thinking -chair -quite -shoulder -1979 -hope -decision -plays -defeated -municipality -whether -structure -offered -slowly -pain -ice -direction -##ion -paper -mission -1981 -mostly -200 -noted -individual -managed -nature -lives -plant -##ha -helped -except -studied -computer -figure -relationship -issue -significant -loss -die -smiled -gun -ago -highest -1972 -##am -male -bring -goals -mexico -problem -distance -commercial -completely -location -annual -famous -drive -1976 -neck -1978 -surface -caused -italy -understand -greek -highway -wrong -hotel -comes -appearance -joseph -double -issues -musical -companies -castle -income -review -assembly -bass -initially -parliament -artists -experience -1974 -particular -walk -foot -engineering -talking -window -dropped -##ter -miss -baby -boys -break -1975 -stars -edge -remember -policy -carried -train -stadium -bar -sex -angeles -evidence -##ge -becoming -assistant -soviet -1977 -upper -step -wing -1970 -youth -financial -reach -##ll -actor -numerous -##se -##st -nodded -arrived -##ation -minute -##nt -believed -sorry -complex -beautiful -victory -associated -temple -1968 -1973 -chance -perhaps -metal -##son -1945 -bishop -##et -lee -launched -particularly -tree -le -retired -subject -prize -contains -yeah -theory -empire -##ce -suddenly -waiting -trust -recording -##to -happy -terms -camp -champion -1971 -religious -pass -zealand -names -2nd -port -ancient -tom -corner -represented -watch -legal -anti -justice -cause -watched -brothers -45 -material -changes -simply -response -louis -fast -##ting -answer -60 -historical -1969 -stories -straight -create -feature -increased -rate -administration -virginia -el -activities -cultural -overall -winner -programs -basketball -legs -guard -beyond -cast -doctor -mm -flight -results -remains -cost -effect -winter -##ble -larger -islands -problems -chairman -grew -commander -isn -1967 -pay -failed -selected -hurt -fort -box -regiment -majority -journal -35 -edward -plans -##ke -##ni -shown -pretty -irish -characters -directly -scene -likely -operated -allow -spring -##j -junior -matches -looks -mike -houses -fellow -##tion -beach -marriage -##ham -##ive -rules -oil -65 -florida -expected -nearby -congress -sam -peace -recent -iii -wait -subsequently -cell -##do -variety -serving -agreed -please -poor -joe -pacific -attempt -wood -democratic -piece -prime -##ca -rural -mile -touch -appears -township -1964 -1966 -soldiers -##men -##ized -1965 -pennsylvania -closer -fighting -claimed -score -jones -physical -editor -##ous -filled -genus -specific -sitting -super -mom -##va -therefore -supported -status -fear -cases -store -meaning -wales -minor -spain -tower -focus -vice -frank -follow -parish -separate -golden -horse -fifth -remaining -branch -32 -presented -stared -##id -uses -secret -forms -##co -baseball -exactly -##ck -choice -note -discovered -travel -composed -truth -russia -ball -color -kiss -dad -wind -continue -ring -referred -numbers -digital -greater -##ns -metres -slightly -direct -increase -1960 -responsible -crew -rule -trees -troops -##no -broke -goes -individuals -hundred -weight -creek -sleep -memory -defense -provides -ordered -code -value -jewish -windows -1944 -safe -judge -whatever -corps -realized -growing -pre -##ga -cities -alexander -gaze -lies -spread -scott -letter -showed -situation -mayor -transport -watching -workers -extended -##li -expression -normal -##ment -chart -multiple -border -##ba -host -##ner -daily -mrs -walls -piano -##ko -heat -cannot -##ate -earned -products -drama -era -authority -seasons -join -grade -##io -sign -difficult -machine -1963 -territory -mainly -##wood -stations -squadron -1962 -stepped -iron -19th -##led -serve -appear -sky -speak -broken -charge -knowledge -kilometres -removed -ships -article -campus -simple -##ty -pushed -britain -##ve -leaves -recently -cd -soft -boston -latter -easy -acquired -poland -##sa -quality -officers -presence -planned -nations -mass -broadcast -jean -share -image -influence -wild -offer -emperor -electric -reading -headed -ability -promoted -yellow -ministry -1942 -throat -smaller -politician -##by -latin -spoke -cars -williams -males -lack -pop -80 -##ier -acting -seeing -consists -##ti -estate -1961 -pressure -johnson -newspaper -jr -chris -olympics -online -conditions -beat -elements -walking -vote -##field -needs -carolina -text -featuring -global -block -shirt -levels -francisco -purpose -females -et -dutch -duke -ahead -gas -twice -safety -serious -turning -highly -lieutenant -firm -maria -amount -mixed -daniel -proposed -perfect -agreement -affairs -3rd -seconds -contemporary -paid -1943 -prison -save -kitchen -label -administrative -intended -constructed -academic -nice -teacher -races -1956 -formerly -corporation -ben -nation -issued -shut -1958 -drums -housing -victoria -seems -opera -1959 -graduated -function -von -mentioned -picked -build -recognized -shortly -protection -picture -notable -exchange -elections -1980s -loved -percent -racing -fish -elizabeth -garden -volume -hockey -1941 -beside -settled -##ford -1940 -competed -replied -drew -1948 -actress -marine -scotland -steel -glanced -farm -steve -1957 -risk -tonight -positive -magic -singles -effects -gray -screen -dog -##ja -residents -bus -sides -none -secondary -literature -polish -destroyed -flying -founder -households -1939 -lay -reserve -usa -gallery -##ler -1946 -industrial -younger -approach -appearances -urban -ones -1950 -finish -avenue -powerful -fully -growth -page -honor -jersey -projects -advanced -revealed -basic -90 -infantry -pair -equipment -visit -33 -evening -search -grant -effort -solo -treatment -buried -republican -primarily -bottom -owner -1970s -israel -gives -jim -dream -bob -remain -spot -70 -notes -produce -champions -contact -ed -soul -accepted -ways -del -##ally -losing -split -price -capacity -basis -trial -questions -##ina -1955 -20th -guess -officially -memorial -naval -initial -##ization -whispered -median -engineer -##ful -sydney -##go -columbia -strength -300 -1952 -tears -senate -00 -card -asian -agent -1947 -software -44 -draw -warm -supposed -com -pro -##il -transferred -leaned -##at -candidate -escape -mountains -asia -potential -activity -entertainment -seem -traffic -jackson -murder -36 -slow -product -orchestra -haven -agency -bbc -taught -website -comedy -unable -storm -planning -albums -rugby -environment -scientific -grabbed -protect -##hi -boat -typically -1954 -1953 -damage -principal -divided -dedicated -mount -ohio -##berg -pick -fought -driver -##der -empty -shoulders -sort -thank -berlin -prominent -account -freedom -necessary -efforts -alex -headquarters -follows -alongside -des -simon -andrew -suggested -operating -learning -steps -1949 -sweet -technical -begin -easily -34 -teeth -speaking -settlement -scale -##sh -renamed -ray -max -enemy -semi -joint -compared -##rd -scottish -leadership -analysis -offers -georgia -pieces -captured -animal -deputy -guest -organized -##lin -tony -combined -method -challenge -1960s -huge -wants -battalion -sons -rise -crime -types -facilities -telling -path -1951 -platform -sit -1990s -##lo -tells -assigned -rich -pull -##ot -commonly -alive -##za -letters -concept -conducted -wearing -happen -bought -becomes -holy -gets -ocean -defeat -languages -purchased -coffee -occurred -titled -##q -declared -applied -sciences -concert -sounds -jazz -brain -##me -painting -fleet -tax -nick -##ius -michigan -count -animals -leaders -episodes -##line -content -##den -birth -##it -clubs -64 -palace -critical -refused -fair -leg -laughed -returning -surrounding -participated -formation -lifted -pointed -connected -rome -medicine -laid -taylor -santa -powers -adam -tall -shared -focused -knowing -yards -entrance -falls -##wa -calling -##ad -sources -chosen -beneath -resources -yard -##ite -nominated -silence -zone -defined -##que -gained -thirty -38 -bodies -moon -##ard -adopted -christmas -widely -register -apart -iran -premier -serves -du -unknown -parties -##les -generation -##ff -continues -quick -fields -brigade -quiet -teaching -clothes -impact -weapons -partner -flat -theater -supreme -1938 -37 -relations -##tor -plants -suffered -1936 -wilson -kids -begins -##age -1918 -seats -armed -internet -models -worth -laws -400 -communities -classes -background -knows -thanks -quarter -reaching -humans -carry -killing -format -kong -hong -setting -75 -architecture -disease -railroad -inc -possibly -wish -arthur -thoughts -harry -doors -density -##di -crowd -illinois -stomach -tone -unique -reports -anyway -##ir -liberal -der -vehicle -thick -dry -drug -faced -largely -facility -theme -holds -creation -strange -colonel -##mi -revolution -bell -politics -turns -silent -rail -relief -independence -combat -shape -write -determined -sales -learned -4th -finger -oxford -providing -1937 -heritage -fiction -situated -designated -allowing -distribution -hosted -##est -sight -interview -estimated -reduced -##ria -toronto -footballer -keeping -guys -damn -claim -motion -sport -sixth -stayed -##ze -en -rear -receive -handed -twelve -dress -audience -granted -brazil -##well -spirit -##ated -noticed -etc -olympic -representative -eric -tight -trouble -reviews -drink -vampire -missing -roles -ranked -newly -household -finals -wave -critics -##ee -phase -massachusetts -pilot -unlike -philadelphia -bright -guns -crown -organizations -roof -42 -respectively -clearly -tongue -marked -circle -fox -korea -bronze -brian -expanded -sexual -supply -yourself -inspired -labour -fc -##ah -reference -vision -draft -connection -brand -reasons -1935 -classic -driving -trip -jesus -cells -entry -1920 -neither -trail -claims -atlantic -orders -labor -nose -afraid -identified -intelligence -calls -cancer -attacked -passing -stephen -positions -imperial -grey -jason -39 -sunday -48 -swedish -avoid -extra -uncle -message -covers -allows -surprise -materials -fame -hunter -##ji -1930 -citizens -figures -davis -environmental -confirmed -shit -titles -di -performing -difference -acts -attacks -##ov -existing -votes -opportunity -nor -shop -entirely -trains -opposite -pakistan -##pa -develop -resulted -representatives -actions -reality -pressed -##ish -barely -wine -conversation -faculty -northwest -ends -documentary -nuclear -stock -grace -sets -eat -alternative -##ps -bag -resulting -creating -surprised -cemetery -1919 -drop -finding -sarah -cricket -streets -tradition -ride -1933 -exhibition -target -ear -explained -rain -composer -injury -apartment -municipal -educational -occupied -netherlands -clean -billion -constitution -learn -1914 -maximum -classical -francis -lose -opposition -jose -ontario -bear -core -hills -rolled -ending -drawn -permanent -fun -##tes -##lla -lewis -sites -chamber -ryan -##way -scoring -height -1934 -##house -lyrics -staring -55 -officials -1917 -snow -oldest -##tic -orange -##ger -qualified -interior -apparently -succeeded -thousand -dinner -lights -existence -fans -heavily -41 -greatest -conservative -send -bowl -plus -enter -catch -##un -economy -duty -1929 -speech -authorities -princess -performances -versions -shall -graduate -pictures -effective -remembered -poetry -desk -crossed -starring -starts -passenger -sharp -##ant -acres -ass -weather -falling -rank -fund -supporting -check -adult -publishing -heads -cm -southeast -lane -##burg -application -bc -##ura -les -condition -transfer -prevent -display -ex -regions -earl -federation -cool -relatively -answered -besides -1928 -obtained -portion -##town -mix -##ding -reaction -liked -dean -express -peak -1932 -##tte -counter -religion -chain -rare -miller -convention -aid -lie -vehicles -mobile -perform -squad -wonder -lying -crazy -sword -##ping -attempted -centuries -weren -philosophy -category -##ize -anna -interested -47 -sweden -wolf -frequently -abandoned -kg -literary -alliance -task -entitled -##ay -threw -promotion -factory -tiny -soccer -visited -matt -fm -achieved -52 -defence -internal -persian -43 -methods -##ging -arrested -otherwise -cambridge -programming -villages -elementary -districts -rooms -criminal -conflict -worry -trained -1931 -attempts -waited -signal -bird -truck -subsequent -programme -##ol -ad -49 -communist -details -faith -sector -patrick -carrying -laugh -##ss -controlled -korean -showing -origin -fuel -evil -1927 -##ent -brief -identity -darkness -address -pool -missed -publication -web -planet -ian -anne -wings -invited -##tt -briefly -standards -kissed -##be -ideas -climate -causing -walter -worse -albert -articles -winners -desire -aged -northeast -dangerous -gate -doubt -1922 -wooden -multi -##ky -poet -rising -funding -46 -communications -communication -violence -copies -prepared -ford -investigation -skills -1924 -pulling -electronic -##ak -##ial -##han -containing -ultimately -offices -singing -understanding -restaurant -tomorrow -fashion -christ -ward -da -pope -stands -5th -flow -studios -aired -commissioned -contained -exist -fresh -americans -##per -wrestling -approved -kid -employed -respect -suit -1925 -angel -asking -increasing -frame -angry -selling -1950s -thin -finds -##nd -temperature -statement -ali -explain -inhabitants -towns -extensive -narrow -51 -jane -flowers -images -promise -somewhere -object -fly -closely -##ls -1912 -bureau -cape -1926 -weekly -presidential -legislative -1921 -##ai -##au -launch -founding -##ny -978 -##ring -artillery -strike -un -institutions -roll -writers -landing -chose -kevin -anymore -pp -##ut -attorney -fit -dan -billboard -receiving -agricultural -breaking -sought -dave -admitted -lands -mexican -##bury -charlie -specifically -hole -iv -howard -credit -moscow -roads -accident -1923 -proved -wear -struck -hey -guards -stuff -slid -expansion -1915 -cat -anthony -##kin -melbourne -opposed -sub -southwest -architect -failure -plane -1916 -##ron -map -camera -tank -listen -regarding -wet -introduction -metropolitan -link -ep -fighter -inch -grown -gene -anger -fixed -buy -dvd -khan -domestic -worldwide -chapel -mill -functions -examples -##head -developing -1910 -turkey -hits -pocket -antonio -papers -grow -unless -circuit -18th -concerned -attached -journalist -selection -journey -converted -provincial -painted -hearing -aren -bands -negative -aside -wondered -knight -lap -survey -ma -##ow -noise -billy -##ium -shooting -guide -bedroom -priest -resistance -motor -homes -sounded -giant -##mer -150 -scenes -equal -comic -patients -hidden -solid -actual -bringing -afternoon -touched -funds -wedding -consisted -marie -canal -sr -kim -treaty -turkish -recognition -residence -cathedral -broad -knees -incident -shaped -fired -norwegian -handle -cheek -contest -represent -##pe -representing -beauty -##sen -birds -advantage -emergency -wrapped -drawing -notice -pink -broadcasting -##ong -somehow -bachelor -seventh -collected -registered -establishment -alan -assumed -chemical -personnel -roger -retirement -jeff -portuguese -wore -tied -device -threat -progress -advance -##ised -banks -hired -manchester -nfl -teachers -structures -forever -##bo -tennis -helping -saturday -sale -applications -junction -hip -incorporated -neighborhood -dressed -ceremony -##ds -influenced -hers -visual -stairs -decades -inner -kansas -hung -hoped -gain -scheduled -downtown -engaged -austria -clock -norway -certainly -pale -protected -1913 -victor -employees -plate -putting -surrounded -##ists -finishing -blues -tropical -##ries -minnesota -consider -philippines -accept -54 -retrieved -1900 -concern -anderson -properties -institution -gordon -successfully -vietnam -##dy -backing -outstanding -muslim -crossing -folk -producing -usual -demand -occurs -observed -lawyer -educated -##ana -kelly -string -pleasure -budget -items -quietly -colorado -philip -typical -##worth -derived -600 -survived -asks -mental -##ide -56 -jake -jews -distinguished -ltd -1911 -sri -extremely -53 -athletic -loud -thousands -worried -shadow -transportation -horses -weapon -arena -importance -users -tim -objects -contributed -dragon -douglas -aware -senator -johnny -jordan -sisters -engines -flag -investment -samuel -shock -capable -clark -row -wheel -refers -session -familiar -biggest -wins -hate -maintained -drove -hamilton -request -expressed -injured -underground -churches -walker -wars -tunnel -passes -stupid -agriculture -softly -cabinet -regarded -joining -indiana -##ea -##ms -push -dates -spend -behavior -woods -protein -gently -chase -morgan -mention -burning -wake -combination -occur -mirror -leads -jimmy -indeed -impossible -singapore -paintings -covering -##nes -soldier -locations -attendance -sell -historian -wisconsin -invasion -argued -painter -diego -changing -egypt -##don -experienced -inches -##ku -missouri -vol -grounds -spoken -switzerland -##gan -reform -rolling -ha -forget -massive -resigned -burned -allen -tennessee -locked -values -improved -##mo -wounded -universe -sick -dating -facing -pack -purchase -user -##pur -moments -##ul -merged -anniversary -1908 -coal -brick -understood -causes -dynasty -queensland -establish -stores -crisis -promote -hoping -views -cards -referee -extension -##si -raise -arizona -improve -colonial -formal -charged -##rt -palm -lucky -hide -rescue -faces -95 -feelings -candidates -juan -##ell -goods -6th -courses -weekend -59 -luke -cash -fallen -##om -delivered -affected -installed -carefully -tries -swiss -hollywood -costs -lincoln -responsibility -##he -shore -file -proper -normally -maryland -assistance -jump -constant -offering -friendly -waters -persons -realize -contain -trophy -800 -partnership -factor -58 -musicians -cry -bound -oregon -indicated -hero -houston -medium -##ure -consisting -somewhat -##ara -57 -cycle -##che -beer -moore -frederick -gotten -eleven -worst -weak -approached -arranged -chin -loan -universal -bond -fifteen -pattern -disappeared -##ney -translated -##zed -lip -arab -capture -interests -insurance -##chi -shifted -cave -prix -warning -sections -courts -coat -plot -smell -feed -golf -favorite -maintain -knife -vs -voted -degrees -finance -quebec -opinion -translation -manner -ruled -operate -productions -choose -musician -discovery -confused -tired -separated -stream -techniques -committed -attend -ranking -kings -throw -passengers -measure -horror -fan -mining -sand -danger -salt -calm -decade -dam -require -runner -##ik -rush -associate -greece -##ker -rivers -consecutive -matthew -##ski -sighed -sq -documents -steam -edited -closing -tie -accused -1905 -##ini -islamic -distributed -directors -organisation -bruce -7th -breathing -mad -lit -arrival -concrete -taste -08 -composition -shaking -faster -amateur -adjacent -stating -1906 -twin -flew -##ran -tokyo -publications -##tone -obviously -ridge -storage -1907 -carl -pages -concluded -desert -driven -universities -ages -terminal -sequence -borough -250 -constituency -creative -cousin -economics -dreams -margaret -notably -reduce -montreal -mode -17th -ears -saved -jan -vocal -##ica -1909 -andy -##jo -riding -roughly -threatened -##ise -meters -meanwhile -landed -compete -repeated -grass -czech -regularly -charges -tea -sudden -appeal -##ung -solution -describes -pierre -classification -glad -parking -##ning -belt -physics -99 -rachel -add -hungarian -participate -expedition -damaged -gift -childhood -85 -fifty -##red -mathematics -jumped -letting -defensive -mph -##ux -##gh -testing -##hip -hundreds -shoot -owners -matters -smoke -israeli -kentucky -dancing -mounted -grandfather -emma -designs -profit -argentina -##gs -truly -li -lawrence -cole -begun -detroit -willing -branches -smiling -decide -miami -enjoyed -recordings -##dale -poverty -ethnic -gay -##bi -gary -arabic -09 -accompanied -##one -##ons -fishing -determine -residential -acid -##ary -alice -returns -starred -mail -##ang -jonathan -strategy -##ue -net -forty -cook -businesses -equivalent -commonwealth -distinct -ill -##cy -seriously -##ors -##ped -shift -harris -replace -rio -imagine -formula -ensure -##ber -additionally -scheme -conservation -occasionally -purposes -feels -favor -##and -##ore -1930s -contrast -hanging -hunt -movies -1904 -instruments -victims -danish -christopher -busy -demon -sugar -earliest -colony -studying -balance -duties -##ks -belgium -slipped -carter -05 -visible -stages -iraq -fifa -##im -commune -forming -zero -07 -continuing -talked -counties -legend -bathroom -option -tail -clay -daughters -afterwards -severe -jaw -visitors -##ded -devices -aviation -russell -kate -##vi -entering -subjects -##ino -temporary -swimming -forth -smooth -ghost -audio -bush -operates -rocks -movements -signs -eddie -##tz -ann -voices -honorary -06 -memories -dallas -pure -measures -racial -promised -66 -harvard -ceo -16th -parliamentary -indicate -benefit -flesh -dublin -louisiana -1902 -1901 -patient -sleeping -1903 -membership -coastal -medieval -wanting -element -scholars -rice -62 -limit -survive -makeup -rating -definitely -collaboration -obvious -##tan -boss -ms -baron -birthday -linked -soil -diocese -##lan -ncaa -##mann -offensive -shell -shouldn -waist -##tus -plain -ross -organ -resolution -manufacturing -adding -relative -kennedy -98 -whilst -moth -marketing -gardens -crash -72 -heading -partners -credited -carlos -moves -cable -##zi -marshall -##out -depending -bottle -represents -rejected -responded -existed -04 -jobs -denmark -lock -##ating -treated -graham -routes -talent -commissioner -drugs -secure -tests -reign -restored -photography -##gi -contributions -oklahoma -designer -disc -grin -seattle -robin -paused -atlanta -unusual -##gate -praised -las -laughing -satellite -hungary -visiting -##sky -interesting -factors -deck -poems -norman -##water -stuck -speaker -rifle -domain -premiered -##her -dc -comics -actors -01 -reputation -eliminated -8th -ceiling -prisoners -script -##nce -leather -austin -mississippi -rapidly -admiral -parallel -charlotte -guilty -tools -gender -divisions -fruit -##bs -laboratory -nelson -fantasy -marry -rapid -aunt -tribe -requirements -aspects -suicide -amongst -adams -bone -ukraine -abc -kick -sees -edinburgh -clothing -column -rough -gods -hunting -broadway -gathered -concerns -##ek -spending -ty -12th -snapped -requires -solar -bones -cavalry -##tta -iowa -drinking -waste -index -franklin -charity -thompson -stewart -tip -flash -landscape -friday -enjoy -singh -poem -listening -##back -eighth -fred -differences -adapted -bomb -ukrainian -surgery -corporate -masters -anywhere -##more -waves -odd -sean -portugal -orleans -dick -debate -kent -eating -puerto -cleared -96 -expect -cinema -97 -guitarist -blocks -electrical -agree -involving -depth -dying -panel -struggle -##ged -peninsula -adults -novels -emerged -vienna -metro -debuted -shoes -tamil -songwriter -meets -prove -beating -instance -heaven -scared -sending -marks -artistic -passage -superior -03 -significantly -shopping -##tive -retained -##izing -malaysia -technique -cheeks -##ola -warren -maintenance -destroy -extreme -allied -120 -appearing -##yn -fill -advice -alabama -qualifying -policies -cleveland -hat -battery -smart -authors -10th -soundtrack -acted -dated -lb -glance -equipped -coalition -funny -outer -ambassador -roy -possibility -couples -campbell -dna -loose -ethan -supplies -1898 -gonna -88 -monster -##res -shake -agents -frequency -springs -dogs -practices -61 -gang -plastic -easier -suggests -gulf -blade -exposed -colors -industries -markets -pan -nervous -electoral -charts -legislation -ownership -##idae -mac -appointment -shield -copy -assault -socialist -abbey -monument -license -throne -employment -jay -93 -replacement -charter -cloud -powered -suffering -accounts -oak -connecticut -strongly -wright -colour -crystal -13th -context -welsh -networks -voiced -gabriel -jerry -##cing -forehead -mp -##ens -manage -schedule -totally -remix -##ii -forests -occupation -print -nicholas -brazilian -strategic -vampires -engineers -76 -roots -seek -correct -instrumental -und -alfred -backed -hop -##des -stanley -robinson -traveled -wayne -welcome -austrian -achieve -67 -exit -rates -1899 -strip -whereas -##cs -sing -deeply -adventure -bobby -rick -jamie -careful -components -cap -useful -personality -knee -##shi -pushing -hosts -02 -protest -ca -ottoman -symphony -##sis -63 -boundary -1890 -processes -considering -considerable -tons -##work -##ft -##nia -cooper -trading -dear -conduct -91 -illegal -apple -revolutionary -holiday -definition -harder -##van -jacob -circumstances -destruction -##lle -popularity -grip -classified -liverpool -donald -baltimore -flows -seeking -honour -approval -92 -mechanical -till -happening -statue -critic -increasingly -immediate -describe -commerce -stare -##ster -indonesia -meat -rounds -boats -baker -orthodox -depression -formally -worn -naked -claire -muttered -sentence -11th -emily -document -77 -criticism -wished -vessel -spiritual -bent -virgin -parker -minimum -murray -lunch -danny -printed -compilation -keyboards -false -blow -belonged -68 -raising -78 -cutting -##board -pittsburgh -##up -9th -shadows -81 -hated -indigenous -jon -15th -barry -scholar -ah -##zer -oliver -##gy -stick -susan -meetings -attracted -spell -romantic -##ver -ye -1895 -photo -demanded -customers -##ac -1896 -logan -revival -keys -modified -commanded -jeans -##ious -upset -raw -phil -detective -hiding -resident -vincent -##bly -experiences -diamond -defeating -coverage -lucas -external -parks -franchise -helen -bible -successor -percussion -celebrated -il -lift -profile -clan -romania -##ied -mills -##su -nobody -achievement -shrugged -fault -1897 -rhythm -initiative -breakfast -carbon -700 -69 -lasted -violent -74 -wound -ken -killer -gradually -filmed -°c -dollars -processing -94 -remove -criticized -guests -sang -chemistry -##vin -legislature -disney -##bridge -uniform -escaped -integrated -proposal -purple -denied -liquid -karl -influential -morris -nights -stones -intense -experimental -twisted -71 -84 -##ld -pace -nazi -mitchell -ny -blind -reporter -newspapers -14th -centers -burn -basin -forgotten -surviving -filed -collections -monastery -losses -manual -couch -description -appropriate -merely -tag -missions -sebastian -restoration -replacing -triple -73 -elder -julia -warriors -benjamin -julian -convinced -stronger -amazing -declined -versus -merchant -happens -output -finland -bare -barbara -absence -ignored -dawn -injuries -##port -producers -##ram -82 -luis -##ities -kw -admit -expensive -electricity -nba -exception -symbol -##ving -ladies -shower -sheriff -characteristics -##je -aimed -button -ratio -effectively -summit -angle -jury -bears -foster -vessels -pants -executed -evans -dozen -advertising -kicked -patrol -1889 -competitions -lifetime -principles -athletics -##logy -birmingham -sponsored -89 -rob -nomination -1893 -acoustic -##sm -creature -longest -##tra -credits -harbor -dust -josh -##so -territories -milk -infrastructure -completion -thailand -indians -leon -archbishop -##sy -assist -pitch -blake -arrangement -girlfriend -serbian -operational -hence -sad -scent -fur -dj -sessions -hp -refer -rarely -##ora -exists -1892 -##ten -scientists -dirty -penalty -burst -portrait -seed -79 -pole -limits -rival -1894 -stable -alpha -grave -constitutional -alcohol -arrest -flower -mystery -devil -architectural -relationships -greatly -habitat -##istic -larry -progressive -remote -cotton -##ics -##ok -preserved -reaches -##ming -cited -86 -vast -scholarship -decisions -cbs -joy -teach -1885 -editions -knocked -eve -searching -partly -participation -gap -animated -fate -excellent -##ett -na -87 -alternate -saints -youngest -##ily -climbed -##ita -##tors -suggest -##ct -discussion -staying -choir -lakes -jacket -revenue -nevertheless -peaked -instrument -wondering -annually -managing -neil -1891 -signing -terry -##ice -apply -clinical -brooklyn -aim -catherine -fuck -farmers -figured -ninth -pride -hugh -evolution -ordinary -involvement -comfortable -shouted -tech -encouraged -taiwan -representation -sharing -##lia -##em -panic -exact -cargo -competing -fat -cried -83 -1920s -occasions -pa -cabin -borders -utah -marcus -##isation -badly -muscles -##ance -victorian -transition -warner -bet -permission -##rin -slave -terrible -similarly -shares -seth -uefa -possession -medals -benefits -colleges -lowered -perfectly -mall -transit -##ye -##kar -publisher -##ened -harrison -deaths -elevation -##ae -asleep -machines -sigh -ash -hardly -argument -occasion -parent -leo -decline -1888 -contribution -##ua -concentration -1000 -opportunities -hispanic -guardian -extent -emotions -hips -mason -volumes -bloody -controversy -diameter -steady -mistake -phoenix -identify -violin -##sk -departure -richmond -spin -funeral -enemies -1864 -gear -literally -connor -random -sergeant -grab -confusion -1865 -transmission -informed -op -leaning -sacred -suspended -thinks -gates -portland -luck -agencies -yours -hull -expert -muscle -layer -practical -sculpture -jerusalem -latest -lloyd -statistics -deeper -recommended -warrior -arkansas -mess -supports -greg -eagle -1880 -recovered -rated -concerts -rushed -##ano -stops -eggs -files -premiere -keith -##vo -delhi -turner -pit -affair -belief -paint -##zing -mate -##ach -##ev -victim -##ology -withdrew -bonus -styles -fled -##ud -glasgow -technologies -funded -nbc -adaptation -##ata -portrayed -cooperation -supporters -judges -bernard -justin -hallway -ralph -##ick -graduating -controversial -distant -continental -spider -bite -##ho -recognize -intention -mixing -##ese -egyptian -bow -tourism -suppose -claiming -tiger -dominated -participants -vi -##ru -nurse -partially -tape -##rum -psychology -##rn -essential -touring -duo -voting -civilian -emotional -channels -##king -apparent -hebrew -1887 -tommy -carrier -intersection -beast -hudson -##gar -##zo -lab -nova -bench -discuss -costa -##ered -detailed -behalf -drivers -unfortunately -obtain -##lis -rocky -##dae -siege -friendship -honey -##rian -1861 -amy -hang -posted -governments -collins -respond -wildlife -preferred -operator -##po -laura -pregnant -videos -dennis -suspected -boots -instantly -weird -automatic -businessman -alleged -placing -throwing -ph -mood -1862 -perry -venue -jet -remainder -##lli -##ci -passion -biological -boyfriend -1863 -dirt -buffalo -ron -segment -fa -abuse -##era -genre -thrown -stroke -colored -stress -exercise -displayed -##gen -struggled -##tti -abroad -dramatic -wonderful -thereafter -madrid -component -widespread -##sed -tale -citizen -todd -monday -1886 -vancouver -overseas -forcing -crying -descent -##ris -discussed -substantial -ranks -regime -1870 -provinces -switch -drum -zane -ted -tribes -proof -lp -cream -researchers -volunteer -manor -silk -milan -donated -allies -venture -principle -delivery -enterprise -##ves -##ans -bars -traditionally -witch -reminded -copper -##uk -pete -inter -links -colin -grinned -elsewhere -competitive -frequent -##oy -scream -##hu -tension -texts -submarine -finnish -defending -defend -pat -detail -1884 -affiliated -stuart -themes -villa -periods -tool -belgian -ruling -crimes -answers -folded -licensed -resort -demolished -hans -lucy -1881 -lion -traded -photographs -writes -craig -##fa -trials -generated -beth -noble -debt -percentage -yorkshire -erected -ss -viewed -grades -confidence -ceased -islam -telephone -retail -##ible -chile -m² -roberts -sixteen -##ich -commented -hampshire -innocent -dual -pounds -checked -regulations -afghanistan -sung -rico -liberty -assets -bigger -options -angels -relegated -tribute -wells -attending -leaf -##yan -butler -romanian -forum -monthly -lisa -patterns -gmina -##tory -madison -hurricane -rev -##ians -bristol -##ula -elite -valuable -disaster -democracy -awareness -germans -freyja -##ins -loop -absolutely -paying -populations -maine -sole -prayer -spencer -releases -doorway -bull -##ani -lover -midnight -conclusion -##sson -thirteen -lily -mediterranean -##lt -nhl -proud -sample -##hill -drummer -guinea -##ova -murphy -climb -##ston -instant -attributed -horn -ain -railways -steven -##ao -autumn -ferry -opponent -root -traveling -secured -corridor -stretched -tales -sheet -trinity -cattle -helps -indicates -manhattan -murdered -fitted -1882 -gentle -grandmother -mines -shocked -vegas -produces -##light -caribbean -##ou -belong -continuous -desperate -drunk -historically -trio -waved -raf -dealing -nathan -bat -murmured -interrupted -residing -scientist -pioneer -harold -aaron -##net -delta -attempting -minority -mini -believes -chorus -tend -lots -eyed -indoor -load -shots -updated -jail -##llo -concerning -connecting -wealth -##ved -slaves -arrive -rangers -sufficient -rebuilt -##wick -cardinal -flood -muhammad -whenever -relation -runners -moral -repair -viewers -arriving -revenge -punk -assisted -bath -fairly -breathe -lists -innings -illustrated -whisper -nearest -voters -clinton -ties -ultimate -screamed -beijing -lions -andre -fictional -gathering -comfort -radar -suitable -dismissed -hms -ban -pine -wrist -atmosphere -voivodeship -bid -timber -##ned -##nan -giants -##ane -cameron -recovery -uss -identical -categories -switched -serbia -laughter -noah -ensemble -therapy -peoples -touching -##off -locally -pearl -platforms -everywhere -ballet -tables -lanka -herbert -outdoor -toured -derek -1883 -spaces -contested -swept -1878 -exclusive -slight -connections -##dra -winds -prisoner -collective -bangladesh -tube -publicly -wealthy -thai -##ys -isolated -select -##ric -insisted -pen -fortune -ticket -spotted -reportedly -animation -enforcement -tanks -110 -decides -wider -lowest -owen -##time -nod -hitting -##hn -gregory -furthermore -magazines -fighters -solutions -##ery -pointing -requested -peru -reed -chancellor -knights -mask -worker -eldest -flames -reduction -1860 -volunteers -##tis -reporting -##hl -wire -advisory -endemic -origins -settlers -pursue -knock -consumer -1876 -eu -compound -creatures -mansion -sentenced -ivan -deployed -guitars -frowned -involves -mechanism -kilometers -perspective -shops -maps -terminus -duncan -alien -fist -bridges -##pers -heroes -fed -derby -swallowed -##ros -patent -sara -illness -characterized -adventures -slide -hawaii -jurisdiction -##op -organised -##side -adelaide -walks -biology -se -##ties -rogers -swing -tightly -boundaries -##rie -prepare -implementation -stolen -##sha -certified -colombia -edwards -garage -##mm -recalled -##ball -rage -harm -nigeria -breast -##ren -furniture -pupils -settle -##lus -cuba -balls -client -alaska -21st -linear -thrust -celebration -latino -genetic -terror -##cia -##ening -lightning -fee -witness -lodge -establishing -skull -##ique -earning -hood -##ei -rebellion -wang -sporting -warned -missile -devoted -activist -porch -worship -fourteen -package -1871 -decorated -##shire -housed -##ock -chess -sailed -doctors -oscar -joan -treat -garcia -harbour -jeremy -##ire -traditions -dominant -jacques -##gon -##wan -relocated -1879 -amendment -sized -companion -simultaneously -volleyball -spun -acre -increases -stopping -loves -belongs -affect -drafted -tossed -scout -battles -1875 -filming -shoved -munich -tenure -vertical -romance -pc -##cher -argue -##ical -craft -ranging -www -opens -honest -tyler -yesterday -virtual -##let -muslims -reveal -snake -immigrants -radical -screaming -speakers -firing -saving -belonging -ease -lighting -prefecture -blame -farmer -hungry -grows -rubbed -beam -sur -subsidiary -##cha -armenian -sao -dropping -conventional -##fer -microsoft -reply -qualify -spots -1867 -sweat -festivals -##ken -immigration -physician -discover -exposure -sandy -explanation -isaac -implemented -##fish -hart -initiated -connect -stakes -presents -heights -householder -pleased -tourist -regardless -slip -closest -##ction -surely -sultan -brings -riley -preparation -aboard -slammed -baptist -experiment -ongoing -interstate -organic -playoffs -##ika -1877 -130 -##tar -hindu -error -tours -tier -plenty -arrangements -talks -trapped -excited -sank -ho -athens -1872 -denver -welfare -suburb -athletes -trick -diverse -belly -exclusively -yelled -1868 -##med -conversion -##ette -1874 -internationally -computers -conductor -abilities -sensitive -hello -dispute -measured -globe -rocket -prices -amsterdam -flights -tigers -inn -municipalities -emotion -references -3d -##mus -explains -airlines -manufactured -pm -archaeological -1873 -interpretation -devon -comment -##ites -settlements -kissing -absolute -improvement -suite -impressed -barcelona -sullivan -jefferson -towers -jesse -julie -##tin -##lu -grandson -hi -gauge -regard -rings -interviews -trace -raymond -thumb -departments -burns -serial -bulgarian -scores -demonstrated -##ix -1866 -kyle -alberta -underneath -romanized -##ward -relieved -acquisition -phrase -cliff -reveals -han -cuts -merger -custom -##dar -nee -gilbert -graduation -##nts -assessment -cafe -difficulty -demands -swung -democrat -jennifer -commons -1940s -grove -##yo -completing -focuses -sum -substitute -bearing -stretch -reception -##py -reflected -essentially -destination -pairs -##ched -survival -resource -##bach -promoting -doubles -messages -tear -##down -##fully -parade -florence -harvey -incumbent -partial -framework -900 -pedro -frozen -procedure -olivia -controls -##mic -shelter -personally -temperatures -##od -brisbane -tested -sits -marble -comprehensive -oxygen -leonard -##kov -inaugural -iranian -referring -quarters -attitude -##ivity -mainstream -lined -mars -dakota -norfolk -unsuccessful -##° -explosion -helicopter -congressional -##sing -inspector -bitch -seal -departed -divine -##ters -coaching -examination -punishment -manufacturer -sink -columns -unincorporated -signals -nevada -squeezed -dylan -dining -photos -martial -manuel -eighteen -elevator -brushed -plates -ministers -ivy -congregation -##len -slept -specialized -taxes -curve -restricted -negotiations -likes -statistical -arnold -inspiration -execution -bold -intermediate -significance -margin -ruler -wheels -gothic -intellectual -dependent -listened -eligible -buses -widow -syria -earn -cincinnati -collapsed -recipient -secrets -accessible -philippine -maritime -goddess -clerk -surrender -breaks -playoff -database -##ified -##lon -ideal -beetle -aspect -soap -regulation -strings -expand -anglo -shorter -crosses -retreat -tough -coins -wallace -directions -pressing -##oon -shipping -locomotives -comparison -topics -nephew -##mes -distinction -honors -travelled -sierra -ibn -##over -fortress -sa -recognised -carved -1869 -clients -##dan -intent -##mar -coaches -describing -bread -##ington -beaten -northwestern -##ona -merit -youtube -collapse -challenges -em -historians -objective -submitted -virus -attacking -drake -assume -##ere -diseases -marc -stem -leeds -##cus -##ab -farming -glasses -##lock -visits -nowhere -fellowship -relevant -carries -restaurants -experiments -101 -constantly -bases -targets -shah -tenth -opponents -verse -territorial -##ira -writings -corruption -##hs -instruction -inherited -reverse -emphasis -##vic -employee -arch -keeps -rabbi -watson -payment -uh -##ala -nancy -##tre -venice -fastest -sexy -banned -adrian -properly -ruth -touchdown -dollar -boards -metre -circles -edges -favour -comments -ok -travels -liberation -scattered -firmly -##ular -holland -permitted -diesel -kenya -den -originated -##ral -demons -resumed -dragged -rider -##rus -servant -blinked -extend -torn -##ias -##sey -input -meal -everybody -cylinder -kinds -camps -##fe -bullet -logic -##wn -croatian -evolved -healthy -fool -chocolate -wise -preserve -pradesh -##ess -respective -1850 -##ew -chicken -artificial -gross -corresponding -convicted -cage -caroline -dialogue -##dor -narrative -stranger -mario -br -christianity -failing -trent -commanding -buddhist -1848 -maurice -focusing -yale -bike -altitude -##ering -mouse -revised -##sley -veteran -##ig -pulls -theology -crashed -campaigns -legion -##ability -drag -excellence -customer -cancelled -intensity -excuse -##lar -liga -participating -contributing -printing -##burn -variable -##rk -curious -bin -legacy -renaissance -##my -symptoms -binding -vocalist -dancer -##nie -grammar -gospel -democrats -ya -enters -sc -diplomatic -hitler -##ser -clouds -mathematical -quit -defended -oriented -##heim -fundamental -hardware -impressive -equally -convince -confederate -guilt -chuck -sliding -##ware -magnetic -narrowed -petersburg -bulgaria -otto -phd -skill -##ama -reader -hopes -pitcher -reservoir -hearts -automatically -expecting -mysterious -bennett -extensively -imagined -seeds -monitor -fix -##ative -journalism -struggling -signature -ranch -encounter -photographer -observation -protests -##pin -influences -##hr -calendar -##all -cruz -croatia -locomotive -hughes -naturally -shakespeare -basement -hook -uncredited -faded -theories -approaches -dare -phillips -filling -fury -obama -##ain -efficient -arc -deliver -min -raid -breeding -inducted -leagues -efficiency -axis -montana -eagles -##ked -supplied -instructions -karen -picking -indicating -trap -anchor -practically -christians -tomb -vary -occasional -electronics -lords -readers -newcastle -faint -innovation -collect -situations -engagement -160 -claude -mixture -##feld -peer -tissue -logo -lean -##ration -°f -floors -##ven -architects -reducing -##our -##ments -rope -1859 -ottawa -##har -samples -banking -declaration -proteins -resignation -francois -saudi -advocate -exhibited -armor -twins -divorce -##ras -abraham -reviewed -jo -temporarily -matrix -physically -pulse -curled -##ena -difficulties -bengal -usage -##ban -annie -riders -certificate -##pi -holes -warsaw -distinctive -jessica -##mon -mutual -1857 -customs -circular -eugene -removal -loaded -mere -vulnerable -depicted -generations -dame -heir -enormous -lightly -climbing -pitched -lessons -pilots -nepal -ram -google -preparing -brad -louise -renowned -##₂ -liam -##ably -plaza -shaw -sophie -brilliant -bills -##bar -##nik -fucking -mainland -server -pleasant -seized -veterans -jerked -fail -beta -brush -radiation -stored -warmth -southeastern -nate -sin -raced -berkeley -joke -athlete -designation -trunk -##low -roland -qualification -archives -heels -artwork -receives -judicial -reserves -##bed -woke -installation -abu -floating -fake -lesser -excitement -interface -concentrated -addressed -characteristic -amanda -saxophone -monk -auto -##bus -releasing -egg -dies -interaction -defender -ce -outbreak -glory -loving -##bert -sequel -consciousness -http -awake -ski -enrolled -##ress -handling -rookie -brow -somebody -biography -warfare -amounts -contracts -presentation -fabric -dissolved -challenged -meter -psychological -lt -elevated -rally -accurate -##tha -hospitals -undergraduate -specialist -venezuela -exhibit -shed -nursing -protestant -fluid -structural -footage -jared -consistent -prey -##ska -succession -reflect -exile -lebanon -wiped -suspect -shanghai -resting -integration -preservation -marvel -variant -pirates -sheep -rounded -capita -sailing -colonies -manuscript -deemed -variations -clarke -functional -emerging -boxing -relaxed -curse -azerbaijan -heavyweight -nickname -editorial -rang -grid -tightened -earthquake -flashed -miguel -rushing -##ches -improvements -boxes -brooks -180 -consumption -molecular -felix -societies -repeatedly -variation -aids -civic -graphics -professionals -realm -autonomous -receiver -delayed -workshop -militia -chairs -trump -canyon -##point -harsh -extending -lovely -happiness -##jan -stake -eyebrows -embassy -wellington -hannah -##ella -sony -corners -bishops -swear -cloth -contents -xi -namely -commenced -1854 -stanford -nashville -courage -graphic -commitment -garrison -##bin -hamlet -clearing -rebels -attraction -literacy -cooking -ruins -temples -jenny -humanity -celebrate -hasn -freight -sixty -rebel -bastard -##art -newton -##ada -deer -##ges -##ching -smiles -delaware -singers -##ets -approaching -assists -flame -##ph -boulevard -barrel -planted -##ome -pursuit -##sia -consequences -posts -shallow -invitation -rode -depot -ernest -kane -rod -concepts -preston -topic -chambers -striking -blast -arrives -descendants -montgomery -ranges -worlds -##lay -##ari -span -chaos -praise -##ag -fewer -1855 -sanctuary -mud -fbi -##ions -programmes -maintaining -unity -harper -bore -handsome -closure -tournaments -thunder -nebraska -linda -facade -puts -satisfied -argentine -dale -cork -dome -panama -##yl -1858 -tasks -experts -##ates -feeding -equation -##las -##ida -##tu -engage -bryan -##ax -um -quartet -melody -disbanded -sheffield -blocked -gasped -delay -kisses -maggie -connects -##non -sts -poured -creator -publishers -##we -guided -ellis -extinct -hug -gaining -##ord -complicated -##bility -poll -clenched -investigate -##use -thereby -quantum -spine -cdp -humor -kills -administered -semifinals -##du -encountered -ignore -##bu -commentary -##maker -bother -roosevelt -140 -plains -halfway -flowing -cultures -crack -imprisoned -neighboring -airline -##ses -##view -##mate -##ec -gather -wolves -marathon -transformed -##ill -cruise -organisations -carol -punch -exhibitions -numbered -alarm -ratings -daddy -silently -##stein -queens -colours -impression -guidance -liu -tactical -##rat -marshal -della -arrow -##ings -rested -feared -tender -owns -bitter -advisor -escort -##ides -spare -farms -grants -##ene -dragons -encourage -colleagues -cameras -##und -sucked -pile -spirits -prague -statements -suspension -landmark -fence -torture -recreation -bags -permanently -survivors -pond -spy -predecessor -bombing -coup -##og -protecting -transformation -glow -##lands -##book -dug -priests -andrea -feat -barn -jumping -##chen -##ologist -##con -casualties -stern -auckland -pipe -serie -revealing -ba -##bel -trevor -mercy -spectrum -yang -consist -governing -collaborated -possessed -epic -comprises -blew -shane -##ack -lopez -honored -magical -sacrifice -judgment -perceived -hammer -mtv -baronet -tune -das -missionary -sheets -350 -neutral -oral -threatening -attractive -shade -aims -seminary -##master -estates -1856 -michel -wounds -refugees -manufacturers -##nic -mercury -syndrome -porter -##iya -##din -hamburg -identification -upstairs -purse -widened -pause -cared -breathed -affiliate -santiago -prevented -celtic -fisher -125 -recruited -byzantine -reconstruction -farther -##mp -diet -sake -au -spite -sensation -##ert -blank -separation -105 -##hon -vladimir -armies -anime -##lie -accommodate -orbit -cult -sofia -archive -##ify -##box -founders -sustained -disorder -honours -northeastern -mia -crops -violet -threats -blanket -fires -canton -followers -southwestern -prototype -voyage -assignment -altered -moderate -protocol -pistol -##eo -questioned -brass -lifting -1852 -math -authored -##ual -doug -dimensional -dynamic -##san -1851 -pronounced -grateful -quest -uncomfortable -boom -presidency -stevens -relating -politicians -chen -barrier -quinn -diana -mosque -tribal -cheese -palmer -portions -sometime -chester -treasure -wu -bend -download -millions -reforms -registration -##osa -consequently -monitoring -ate -preliminary -brandon -invented -ps -eaten -exterior -intervention -ports -documented -log -displays -lecture -sally -favourite -##itz -vermont -lo -invisible -isle -breed -##ator -journalists -relay -speaks -backward -explore -midfielder -actively -stefan -procedures -cannon -blond -kenneth -centered -servants -chains -libraries -malcolm -essex -henri -slavery -##hal -facts -fairy -coached -cassie -cats -washed -cop -##fi -announcement -item -2000s -vinyl -activated -marco -frontier -growled -curriculum -##das -loyal -accomplished -leslie -ritual -kenny -##00 -vii -napoleon -hollow -hybrid -jungle -stationed -friedrich -counted -##ulated -platinum -theatrical -seated -col -rubber -glen -1840 -diversity -healing -extends -id -provisions -administrator -columbus -##oe -tributary -te -assured -org -##uous -prestigious -examined -lectures -grammy -ronald -associations -bailey -allan -essays -flute -believing -consultant -proceedings -travelling -1853 -kit -kerala -yugoslavia -buddy -methodist -##ith -burial -centres -batman -##nda -discontinued -bo -dock -stockholm -lungs -severely -##nk -citing -manga -##ugh -steal -mumbai -iraqi -robot -celebrity -bride -broadcasts -abolished -pot -joel -overhead -franz -packed -reconnaissance -johann -acknowledged -introduce -handled -doctorate -developments -drinks -alley -palestine -##nis -##aki -proceeded -recover -bradley -grain -patch -afford -infection -nationalist -legendary -##ath -interchange -virtually -gen -gravity -exploration -amber -vital -wishes -powell -doctrine -elbow -screenplay -##bird -contribute -indonesian -pet -creates -##com -enzyme -kylie -discipline -drops -manila -hunger -##ien -layers -suffer -fever -bits -monica -keyboard -manages -##hood -searched -appeals -##bad -testament -grande -reid -##war -beliefs -congo -##ification -##dia -si -requiring -##via -casey -1849 -regret -streak -rape -depends -syrian -sprint -pound -tourists -upcoming -pub -##xi -tense -##els -practiced -echo -nationwide -guild -motorcycle -liz -##zar -chiefs -desired -elena -bye -precious -absorbed -relatives -booth -pianist -##mal -citizenship -exhausted -wilhelm -##ceae -##hed -noting -quarterback -urge -hectares -##gue -ace -holly -##tal -blonde -davies -parked -sustainable -stepping -twentieth -airfield -galaxy -nest -chip -##nell -tan -shaft -paulo -requirement -##zy -paradise -tobacco -trans -renewed -vietnamese -##cker -##ju -suggesting -catching -holmes -enjoying -md -trips -colt -holder -butterfly -nerve -reformed -cherry -bowling -trailer -carriage -goodbye -appreciate -toy -joshua -interactive -enabled -involve -##kan -collar -determination -bunch -facebook -recall -shorts -superintendent -episcopal -frustration -giovanni -nineteenth -laser -privately -array -circulation -##ovic -armstrong -deals -painful -permit -discrimination -##wi -aires -retiring -cottage -ni -##sta -horizon -ellen -jamaica -ripped -fernando -chapters -playstation -patron -lecturer -navigation -behaviour -genes -georgian -export -solomon -rivals -swift -seventeen -rodriguez -princeton -independently -sox -1847 -arguing -entity -casting -hank -criteria -oakland -geographic -milwaukee -reflection -expanding -conquest -dubbed -##tv -halt -brave -brunswick -doi -arched -curtis -divorced -predominantly -somerset -streams -ugly -zoo -horrible -curved -buenos -fierce -dictionary -vector -theological -unions -handful -stability -chan -punjab -segments -##lly -altar -ignoring -gesture -monsters -pastor -##stone -thighs -unexpected -operators -abruptly -coin -compiled -associates -improving -migration -pin -##ose -compact -collegiate -reserved -##urs -quarterfinals -roster -restore -assembled -hurry -oval -##cies -1846 -flags -martha -##del -victories -sharply -##rated -argues -deadly -neo -drawings -symbols -performer -##iel -griffin -restrictions -editing -andrews -java -journals -arabia -compositions -dee -pierce -removing -hindi -casino -runway -civilians -minds -nasa -hotels -##zation -refuge -rent -retain -potentially -conferences -suburban -conducting -##tto -##tions -##tle -descended -massacre -##cal -ammunition -terrain -fork -souls -counts -chelsea -durham -drives -cab -##bank -perth -realizing -palestinian -finn -simpson -##dal -betty -##ule -moreover -particles -cardinals -tent -evaluation -extraordinary -##oid -inscription -##works -wednesday -chloe -maintains -panels -ashley -trucks -##nation -cluster -sunlight -strikes -zhang -##wing -dialect -canon -##ap -tucked -##ws -collecting -##mas -##can -##sville -maker -quoted -evan -franco -aria -buying -cleaning -eva -closet -provision -apollo -clinic -rat -##ez -necessarily -ac -##gle -##ising -venues -flipped -cent -spreading -trustees -checking -authorized -##sco -disappointed -##ado -notion -duration -trumpet -hesitated -topped -brussels -rolls -theoretical -hint -define -aggressive -repeat -wash -peaceful -optical -width -allegedly -mcdonald -strict -copyright -##illa -investors -mar -jam -witnesses -sounding -miranda -michelle -privacy -hugo -harmony -##pp -valid -lynn -glared -nina -102 -headquartered -diving -boarding -gibson -##ncy -albanian -marsh -routine -dealt -enhanced -er -intelligent -substance -targeted -enlisted -discovers -spinning -observations -pissed -smoking -rebecca -capitol -visa -varied -costume -seemingly -indies -compensation -surgeon -thursday -arsenal -westminster -suburbs -rid -anglican -##ridge -knots -foods -alumni -lighter -fraser -whoever -portal -scandal -##ray -gavin -advised -instructor -flooding -terrorist -##ale -teenage -interim -senses -duck -teen -thesis -abby -eager -overcome -##ile -newport -glenn -rises -shame -##cc -prompted -priority -forgot -bomber -nicolas -protective -360 -cartoon -katherine -breeze -lonely -trusted -henderson -richardson -relax -banner -candy -palms -remarkable -##rio -legends -cricketer -essay -ordained -edmund -rifles -trigger -##uri -##away -sail -alert -1830 -audiences -penn -sussex -siblings -pursued -indianapolis -resist -rosa -consequence -succeed -avoided -1845 -##ulation -inland -##tie -##nna -counsel -profession -chronicle -hurried -##una -eyebrow -eventual -bleeding -innovative -cure -##dom -committees -accounting -con -scope -hardy -heather -tenor -gut -herald -codes -tore -scales -wagon -##oo -luxury -tin -prefer -fountain -triangle -bonds -darling -convoy -dried -traced -beings -troy -accidentally -slam -findings -smelled -joey -lawyers -outcome -steep -bosnia -configuration -shifting -toll -brook -performers -lobby -philosophical -construct -shrine -aggregate -boot -cox -phenomenon -savage -insane -solely -reynolds -lifestyle -##ima -nationally -holdings -consideration -enable -edgar -mo -mama -##tein -fights -relegation -chances -atomic -hub -conjunction -awkward -reactions -currency -finale -kumar -underwent -steering -elaborate -gifts -comprising -melissa -veins -reasonable -sunshine -chi -solve -trails -inhabited -elimination -ethics -huh -ana -molly -consent -apartments -layout -marines -##ces -hunters -bulk -##oma -hometown -##wall -##mont -cracked -reads -neighbouring -withdrawn -admission -wingspan -damned -anthology -lancashire -brands -batting -forgive -cuban -awful -##lyn -104 -dimensions -imagination -##ade -dante -##ship -tracking -desperately -goalkeeper -##yne -groaned -workshops -confident -burton -gerald -milton -circus -uncertain -slope -copenhagen -sophia -fog -philosopher -portraits -accent -cycling -varying -gripped -larvae -garrett -specified -scotia -mature -luther -kurt -rap -##kes -aerial -750 -ferdinand -heated -es -transported -##shan -safely -nonetheless -##orn -##gal -motors -demanding -##sburg -startled -##brook -ally -generate -caps -ghana -stained -demo -mentions -beds -ap -afterward -diary -##bling -utility -##iro -richards -1837 -conspiracy -conscious -shining -footsteps -observer -cyprus -urged -loyalty -developer -probability -olive -upgraded -gym -miracle -insects -graves -1844 -ourselves -hydrogen -amazon -katie -tickets -poets -##pm -planes -##pan -prevention -witnessed -dense -jin -randy -tang -warehouse -monroe -bang -archived -elderly -investigations -alec -granite -mineral -conflicts -controlling -aboriginal -carlo -##zu -mechanics -stan -stark -rhode -skirt -est -##berry -bombs -respected -##horn -imposed -limestone -deny -nominee -memphis -grabbing -disabled -##als -amusement -aa -frankfurt -corn -referendum -varies -slowed -disk -firms -unconscious -incredible -clue -sue -##zhou -twist -##cio -joins -idaho -chad -developers -computing -destroyer -103 -mortal -tucker -kingston -choices -yu -carson -1800 -os -whitney -geneva -pretend -dimension -staged -plateau -maya -##une -freestyle -##bc -rovers -hiv -##ids -tristan -classroom -prospect -##hus -honestly -diploma -lied -thermal -auxiliary -feast -unlikely -iata -##tel -morocco -pounding -treasury -lithuania -considerably -1841 -dish -1812 -geological -matching -stumbled -destroying -marched -brien -advances -cake -nicole -belle -settling -measuring -directing -##mie -tuesday -bassist -capabilities -stunned -fraud -torpedo -##list -##phone -anton -wisdom -surveillance -ruined -##ulate -lawsuit -healthcare -theorem -halls -trend -aka -horizontal -dozens -acquire -lasting -swim -hawk -gorgeous -fees -vicinity -decrease -adoption -tactics -##ography -pakistani -##ole -draws -##hall -willie -burke -heath -algorithm -integral -powder -elliott -brigadier -jackie -tate -varieties -darker -##cho -lately -cigarette -specimens -adds -##ree -##ensis -##inger -exploded -finalist -cia -murders -wilderness -arguments -nicknamed -acceptance -onwards -manufacture -robertson -jets -tampa -enterprises -blog -loudly -composers -nominations -1838 -ai -malta -inquiry -automobile -hosting -viii -rays -tilted -grief -museums -strategies -furious -euro -equality -cohen -poison -surrey -wireless -governed -ridiculous -moses -##esh -##room -vanished -##ito -barnes -attract -morrison -istanbul -##iness -absent -rotation -petition -janet -##logical -satisfaction -custody -deliberately -observatory -comedian -surfaces -pinyin -novelist -strictly -canterbury -oslo -monks -embrace -ibm -jealous -photograph -continent -dorothy -marina -doc -excess -holden -allegations -explaining -stack -avoiding -lance -storyline -majesty -poorly -spike -dos -bradford -raven -travis -classics -proven -voltage -pillow -fists -butt -1842 -interpreted -##car -1839 -gage -telegraph -lens -promising -expelled -casual -collector -zones -##min -silly -nintendo -##kh -##bra -downstairs -chef -suspicious -afl -flies -vacant -uganda -pregnancy -condemned -lutheran -estimates -cheap -decree -saxon -proximity -stripped -idiot -deposits -contrary -presenter -magnus -glacier -im -offense -edwin -##ori -upright -##long -bolt -##ois -toss -geographical -##izes -environments -delicate -marking -abstract -xavier -nails -windsor -plantation -occurring -equity -saskatchewan -fears -drifted -sequences -vegetation -revolt -##stic -1843 -sooner -fusion -opposing -nato -skating -1836 -secretly -ruin -lease -##oc -edit -##nne -flora -anxiety -ruby -##ological -##mia -tel -bout -taxi -emmy -frost -rainbow -compounds -foundations -rainfall -assassination -nightmare -dominican -##win -achievements -deserve -orlando -intact -armenia -##nte -calgary -valentine -106 -marion -proclaimed -theodore -bells -courtyard -thigh -gonzalez -console -troop -minimal -monte -everyday -##ence -##if -supporter -terrorism -buck -openly -presbyterian -activists -carpet -##iers -rubbing -uprising -##yi -cute -conceived -legally -##cht -millennium -cello -velocity -ji -rescued -cardiff -1835 -rex -concentrate -senators -beard -rendered -glowing -battalions -scouts -competitors -sculptor -catalogue -arctic -ion -raja -bicycle -wow -glancing -lawn -##woman -gentleman -lighthouse -publish -predicted -calculated -##val -variants -##gne -strain -##ui -winston -deceased -##nus -touchdowns -brady -caleb -sinking -echoed -crush -hon -blessed -protagonist -hayes -endangered -magnitude -editors -##tine -estimate -responsibilities -##mel -backup -laying -consumed -sealed -zurich -lovers -frustrated -##eau -ahmed -kicking -mit -treasurer -1832 -biblical -refuse -terrified -pump -agrees -genuine -imprisonment -refuses -plymouth -##hen -lou -##nen -tara -trembling -antarctic -ton -learns -##tas -crap -crucial -faction -atop -##borough -wrap -lancaster -odds -hopkins -erik -lyon -##eon -bros -##ode -snap -locality -tips -empress -crowned -cal -acclaimed -chuckled -##ory -clara -sends -mild -towel -##fl -##day -##а -wishing -assuming -interviewed -##bal -##die -interactions -eden -cups -helena -##lf -indie -beck -##fire -batteries -filipino -wizard -parted -##lam -traces -##born -rows -idol -albany -delegates -##ees -##sar -discussions -##ex -notre -instructed -belgrade -highways -suggestion -lauren -possess -orientation -alexandria -abdul -beats -salary -reunion -ludwig -alright -wagner -intimate -pockets -slovenia -hugged -brighton -merchants -cruel -stole -trek -slopes -repairs -enrollment -politically -underlying -promotional -counting -boeing -##bb -isabella -naming -##и -keen -bacteria -listing -separately -belfast -ussr -450 -lithuanian -anybody -ribs -sphere -martinez -cock -embarrassed -proposals -fragments -nationals -##fs -##wski -premises -fin -1500 -alpine -matched -freely -bounded -jace -sleeve -##af -gaming -pier -populated -evident -##like -frances -flooded -##dle -frightened -pour -trainer -framed -visitor -challenging -pig -wickets -##fold -infected -email -##pes -arose -##aw -reward -ecuador -oblast -vale -ch -shuttle -##usa -bach -rankings -forbidden -cornwall -accordance -salem -consumers -bruno -fantastic -toes -machinery -resolved -julius -remembering -propaganda -iceland -bombardment -tide -contacts -wives -##rah -concerto -macdonald -albania -implement -daisy -tapped -sudan -helmet -angela -mistress -##lic -crop -sunk -finest -##craft -hostile -##ute -##tsu -boxer -fr -paths -adjusted -habit -ballot -supervision -soprano -##zen -bullets -wicked -sunset -regiments -disappear -lamp -performs -app -##gia -##oa -rabbit -digging -incidents -entries -##cion -dishes -##oi -introducing -##ati -##fied -freshman -slot -jill -tackles -baroque -backs -##iest -lone -sponsor -destiny -altogether -convert -##aro -consensus -shapes -demonstration -basically -feminist -auction -artifacts -##bing -strongest -twitter -halifax -2019 -allmusic -mighty -smallest -precise -alexandra -viola -##los -##ille -manuscripts -##illo -dancers -ari -managers -monuments -blades -barracks -springfield -maiden -consolidated -electron -##end -berry -airing -wheat -nobel -inclusion -blair -payments -geography -bee -cc -eleanor -react -##hurst -afc -manitoba -##yu -su -lineup -fitness -recreational -investments -airborne -disappointment -##dis -edmonton -viewing -##row -renovation -##cast -infant -bankruptcy -roses -aftermath -pavilion -##yer -carpenter -withdrawal -ladder -##hy -discussing -popped -reliable -agreements -rochester -##abad -curves -bombers -220 -rao -reverend -decreased -choosing -107 -stiff -consulting -naples -crawford -tracy -ka -ribbon -cops -##lee -crushed -deciding -unified -teenager -accepting -flagship -explorer -poles -sanchez -inspection -revived -skilled -induced -exchanged -flee -locals -tragedy -swallow -loading -hanna -demonstrate -##ela -salvador -flown -contestants -civilization -##ines -wanna -rhodes -fletcher -hector -knocking -considers -##ough -nash -mechanisms -sensed -mentally -walt -unclear -##eus -renovated -madame -##cks -crews -governmental -##hin -undertaken -monkey -##ben -##ato -fatal -armored -copa -caves -governance -grasp -perception -certification -froze -damp -tugged -wyoming -##rg -##ero -newman -##lor -nerves -curiosity -graph -115 -##ami -withdraw -tunnels -dull -meredith -moss -exhibits -neighbors -communicate -accuracy -explored -raiders -republicans -secular -kat -superman -penny -criticised -##tch -freed -update -conviction -wade -ham -likewise -delegation -gotta -doll -promises -technological -myth -nationality -resolve -convent -##mark -sharon -dig -sip -coordinator -entrepreneur -fold -##dine -capability -councillor -synonym -blown -swan -cursed -1815 -jonas -haired -sofa -canvas -keeper -rivalry -##hart -rapper -speedway -swords -postal -maxwell -estonia -potter -recurring -##nn -##ave -errors -##oni -cognitive -1834 -##² -claws -nadu -roberto -bce -wrestler -ellie -##ations -infinite -ink -##tia -presumably -finite -staircase -108 -noel -patricia -nacional -##cation -chill -eternal -tu -preventing -prussia -fossil -limbs -##logist -ernst -frog -perez -rene -##ace -pizza -prussian -##ios -##vy -molecules -regulatory -answering -opinions -sworn -lengths -supposedly -hypothesis -upward -habitats -seating -ancestors -drank -yield -hd -synthesis -researcher -modest -##var -mothers -peered -voluntary -homeland -##the -acclaim -##igan -static -valve -luxembourg -alto -carroll -fe -receptor -norton -ambulance -##tian -johnston -catholics -depicting -jointly -elephant -gloria -mentor -badge -ahmad -distinguish -remarked -councils -precisely -allison -advancing -detection -crowded -##10 -cooperative -ankle -mercedes -dagger -surrendered -pollution -commit -subway -jeffrey -lesson -sculptures -provider -##fication -membrane -timothy -rectangular -fiscal -heating -teammate -basket -particle -anonymous -deployment -##ple -missiles -courthouse -proportion -shoe -sec -##ller -complaints -forbes -blacks -abandon -remind -sizes -overwhelming -autobiography -natalie -##awa -risks -contestant -countryside -babies -scorer -invaded -enclosed -proceed -hurling -disorders -##cu -reflecting -continuously -cruiser -graduates -freeway -investigated -ore -deserved -maid -blocking -phillip -jorge -shakes -dove -mann -variables -lacked -burden -accompanying -que -consistently -organizing -provisional -complained -endless -##rm -tubes -juice -georges -krishna -mick -labels -thriller -##uch -laps -arcade -sage -snail -##table -shannon -fi -laurence -seoul -vacation -presenting -hire -churchill -surprisingly -prohibited -savannah -technically -##oli -170 -##lessly -testimony -suited -speeds -toys -romans -mlb -flowering -measurement -talented -kay -settings -charleston -expectations -shattered -achieving -triumph -ceremonies -portsmouth -lanes -mandatory -loser -stretching -cologne -realizes -seventy -cornell -careers -webb -##ulating -americas -budapest -ava -suspicion -##ison -yo -conrad -##hai -sterling -jessie -rector -##az -1831 -transform -organize -loans -christine -volcanic -warrant -slender -summers -subfamily -newer -danced -dynamics -rhine -proceeds -heinrich -gastropod -commands -sings -facilitate -easter -ra -positioned -responses -expense -fruits -yanked -imported -25th -velvet -vic -primitive -tribune -baldwin -neighbourhood -donna -rip -hay -pr -##uro -1814 -espn -welcomed -##aria -qualifier -glare -highland -timing -##cted -shells -eased -geometry -louder -exciting -slovakia -##sion -##iz -##lot -savings -prairie -##ques -marching -rafael -tonnes -##lled -curtain -preceding -shy -heal -greene -worthy -##pot -detachment -bury -sherman -##eck -reinforced -seeks -bottles -contracted -duchess -outfit -walsh -##sc -mickey -##ase -geoffrey -archer -squeeze -dawson -eliminate -invention -##enberg -neal -##eth -stance -dealer -coral -maple -retire -polo -simplified -##ht -1833 -hid -watts -backwards -jules -##oke -genesis -mt -frames -rebounds -burma -woodland -moist -santos -whispers -drained -subspecies -##aa -streaming -ulster -burnt -correspondence -maternal -gerard -denis -stealing -##load -genius -duchy -##oria -inaugurated -momentum -suits -placement -sovereign -clause -thames -##hara -confederation -reservation -sketch -yankees -lets -rotten -charm -hal -verses -ultra -commercially -dot -salon -citation -adopt -winnipeg -mist -allocated -cairo -##boy -jenkins -interference -objectives -##wind -1820 -portfolio -armoured -sectors -##eh -initiatives -##world -integrity -exercises -robe -tap -ab -gazed -##tones -distracted -rulers -111 -favorable -jerome -tended -cart -factories -##eri -diplomat -valued -gravel -charitable -##try -calvin -exploring -chang -shepherd -terrace -pdf -pupil -##ural -reflects -ups -##rch -governors -shelf -depths -##nberg -trailed -crest -tackle -##nian -##ats -hatred -##kai -clare -makers -ethiopia -longtime -detected -embedded -lacking -slapped -rely -thomson -anticipation -iso -morton -successive -agnes -screenwriter -straightened -philippe -playwright -haunted -licence -iris -intentions -sutton -112 -logical -correctly -##weight -branded -licked -tipped -silva -ricky -narrator -requests -##ents -greeted -supernatural -cow -##wald -lung -refusing -employer -strait -gaelic -liner -##piece -zoe -sabha -##mba -driveway -harvest -prints -bates -reluctantly -threshold -algebra -ira -wherever -coupled -240 -assumption -picks -##air -designers -raids -gentlemen -##ean -roller -blowing -leipzig -locks -screw -dressing -strand -##lings -scar -dwarf -depicts -##nu -nods -##mine -differ -boris -##eur -yuan -flip -##gie -mob -invested -questioning -applying -##ture -shout -##sel -gameplay -blamed -illustrations -bothered -weakness -rehabilitation -##of -##zes -envelope -rumors -miners -leicester -subtle -kerry -##ico -ferguson -##fu -premiership -ne -##cat -bengali -prof -catches -remnants -dana -##rily -shouting -presidents -baltic -ought -ghosts -dances -sailors -shirley -fancy -dominic -##bie -madonna -##rick -bark -buttons -gymnasium -ashes -liver -toby -oath -providence -doyle -evangelical -nixon -cement -carnegie -embarked -hatch -surroundings -guarantee -needing -pirate -essence -##bee -filter -crane -hammond -projected -immune -percy -twelfth -##ult -regent -doctoral -damon -mikhail -##ichi -lu -critically -elect -realised -abortion -acute -screening -mythology -steadily -##fc -frown -nottingham -kirk -wa -minneapolis -##rra -module -algeria -mc -nautical -encounters -surprising -statues -availability -shirts -pie -alma -brows -munster -mack -soup -crater -tornado -sanskrit -cedar -explosive -bordered -dixon -planets -stamp -exam -happily -##bble -carriers -kidnapped -##vis -accommodation -emigrated -##met -knockout -correspondent -violation -profits -peaks -lang -specimen -agenda -ancestry -pottery -spelling -equations -obtaining -ki -linking -1825 -debris -asylum -##20 -buddhism -teddy -##ants -gazette -##nger -##sse -dental -eligibility -utc -fathers -averaged -zimbabwe -francesco -coloured -hissed -translator -lynch -mandate -humanities -mackenzie -uniforms -lin -##iana -##gio -asset -mhz -fitting -samantha -genera -wei -rim -beloved -shark -riot -entities -expressions -indo -carmen -slipping -owing -abbot -neighbor -sidney -##av -rats -recommendations -encouraging -squadrons -anticipated -commanders -conquered -##oto -donations -diagnosed -##mond -divide -##iva -guessed -decoration -vernon -auditorium -revelation -conversations -##kers -##power -herzegovina -dash -alike -protested -lateral -herman -accredited -mg -##gent -freeman -mel -fiji -crow -crimson -##rine -livestock -##pped -humanitarian -bored -oz -whip -##lene -##ali -legitimate -alter -grinning -spelled -anxious -oriental -wesley -##nin -##hole -carnival -controller -detect -##ssa -bowed -educator -kosovo -macedonia -##sin -occupy -mastering -stephanie -janeiro -para -unaware -nurses -noon -135 -cam -hopefully -ranger -combine -sociology -polar -rica -##eer -neill -##sman -holocaust -##ip -doubled -lust -1828 -109 -decent -cooling -unveiled -##card -1829 -nsw -homer -chapman -meyer -##gin -dive -mae -reagan -expertise -##gled -darwin -brooke -sided -prosecution -investigating -comprised -petroleum -genres -reluctant -differently -trilogy -johns -vegetables -corpse -highlighted -lounge -pension -unsuccessfully -elegant -aided -ivory -beatles -amelia -cain -dubai -sunny -immigrant -babe -click -##nder -underwater -pepper -combining -mumbled -atlas -horns -accessed -ballad -physicians -homeless -gestured -rpm -freak -louisville -corporations -patriots -prizes -rational -warn -modes -decorative -overnight -din -troubled -phantom -##ort -monarch -sheer -##dorf -generals -guidelines -organs -addresses -##zon -enhance -curling -parishes -cord -##kie -linux -caesar -deutsche -bavaria -##bia -coleman -cyclone -##eria -bacon -petty -##yama -##old -hampton -diagnosis -1824 -throws -complexity -rita -disputed -##₃ -pablo -##sch -marketed -trafficking -##ulus -examine -plague -formats -##oh -vault -faithful -##bourne -webster -##ox -highlights -##ient -##ann -phones -vacuum -sandwich -modeling -##gated -bolivia -clergy -qualities -isabel -##nas -##ars -wears -screams -reunited -annoyed -bra -##ancy -##rate -differential -transmitter -tattoo -container -poker -##och -excessive -resides -cowboys -##tum -augustus -trash -providers -statute -retreated -balcony -reversed -void -storey -preceded -masses -leap -laughs -neighborhoods -wards -schemes -falcon -santo -battlefield -pad -ronnie -thread -lesbian -venus -##dian -beg -sandstone -daylight -punched -gwen -analog -stroked -wwe -acceptable -measurements -dec -toxic -##kel -adequate -surgical -economist -parameters -varsity -##sberg -quantity -ella -##chy -##rton -countess -generating -precision -diamonds -expressway -ga -##ı -1821 -uruguay -talents -galleries -expenses -scanned -colleague -outlets -ryder -lucien -##ila -paramount -##bon -syracuse -dim -fangs -gown -sweep -##sie -toyota -missionaries -websites -##nsis -sentences -adviser -val -trademark -spells -##plane -patience -starter -slim -##borg -toe -incredibly -shoots -elliot -nobility -##wyn -cowboy -endorsed -gardner -tendency -persuaded -organisms -emissions -kazakhstan -amused -boring -chips -themed -##hand -llc -constantinople -chasing -systematic -guatemala -borrowed -erin -carey -##hard -highlands -struggles -1810 -##ifying -##ced -wong -exceptions -develops -enlarged -kindergarten -castro -##ern -##rina -leigh -zombie -juvenile -##most -consul -##nar -sailor -hyde -clarence -intensive -pinned -nasty -useless -jung -clayton -stuffed -exceptional -ix -apostolic -230 -transactions -##dge -exempt -swinging -cove -religions -##ash -shields -dairy -bypass -190 -pursuing -bug -joyce -bombay -chassis -southampton -chat -interact -redesignated -##pen -nascar -pray -salmon -rigid -regained -malaysian -grim -publicity -constituted -capturing -toilet -delegate -purely -tray -drift -loosely -striker -weakened -trinidad -mitch -itv -defines -transmitted -ming -scarlet -nodding -fitzgerald -fu -narrowly -sp -tooth -standings -virtue -##₁ -##wara -##cting -chateau -gloves -lid -##nel -hurting -conservatory -##pel -sinclair -reopened -sympathy -nigerian -strode -advocated -optional -chronic -discharge -##rc -suck -compatible -laurel -stella -shi -fails -wage -dodge -128 -informal -sorts -levi -buddha -villagers -##aka -chronicles -heavier -summoned -gateway -3000 -eleventh -jewelry -translations -accordingly -seas -##ency -fiber -pyramid -cubic -dragging -##ista -caring -##ops -android -contacted -lunar -##dt -kai -lisbon -patted -1826 -sacramento -theft -madagascar -subtropical -disputes -ta -holidays -piper -willow -mare -cane -itunes -newfoundland -benny -companions -dong -raj -observe -roar -charming -plaque -tibetan -fossils -enacted -manning -bubble -tina -tanzania -##eda -##hir -funk -swamp -deputies -cloak -ufc -scenario -par -scratch -metals -anthem -guru -engaging -specially -##boat -dialects -nineteen -cecil -duet -disability -messenger -unofficial -##lies -defunct -eds -moonlight -drainage -surname -puzzle -honda -switching -conservatives -mammals -knox -broadcaster -sidewalk -cope -##ried -benson -princes -peterson -##sal -bedford -sharks -eli -wreck -alberto -gasp -archaeology -lgbt -teaches -securities -madness -compromise -waving -coordination -davidson -visions -leased -possibilities -eighty -jun -fernandez -enthusiasm -assassin -sponsorship -reviewer -kingdoms -estonian -laboratories -##fy -##nal -applies -verb -celebrations -##zzo -rowing -lightweight -sadness -submit -mvp -balanced -dude -##vas -explicitly -metric -magnificent -mound -brett -mohammad -mistakes -irregular -##hing -##ass -sanders -betrayed -shipped -surge -##enburg -reporters -termed -georg -pity -verbal -bulls -abbreviated -enabling -appealed -##are -##atic -sicily -sting -heel -sweetheart -bart -spacecraft -brutal -monarchy -##tter -aberdeen -cameo -diane -##ub -survivor -clyde -##aries -complaint -##makers -clarinet -delicious -chilean -karnataka -coordinates -1818 -panties -##rst -pretending -ar -dramatically -kiev -bella -tends -distances -113 -catalog -launching -instances -telecommunications -portable -lindsay -vatican -##eim -angles -aliens -marker -stint -screens -bolton -##rne -judy -wool -benedict -plasma -europa -spark -imaging -filmmaker -swiftly -##een -contributor -##nor -opted -stamps -apologize -financing -butter -gideon -sophisticated -alignment -avery -chemicals -yearly -speculation -prominence -professionally -##ils -immortal -institutional -inception -wrists -identifying -tribunal -derives -gains -##wo -papal -preference -linguistic -vince -operative -brewery -##ont -unemployment -boyd -##ured -##outs -albeit -prophet -1813 -bi -##rr -##face -##rad -quarterly -asteroid -cleaned -radius -temper -##llen -telugu -jerk -viscount -menu -##ote -glimpse -##aya -yacht -hawaiian -baden -##rl -laptop -readily -##gu -monetary -offshore -scots -watches -##yang -##arian -upgrade -needle -xbox -lea -encyclopedia -flank -fingertips -##pus -delight -teachings -confirm -roth -beaches -midway -winters -##iah -teasing -daytime -beverly -gambling -bonnie -##backs -regulated -clement -hermann -tricks -knot -##shing -##uring -##vre -detached -ecological -owed -specialty -byron -inventor -bats -stays -screened -unesco -midland -trim -affection -##ander -##rry -jess -thoroughly -feedback -##uma -chennai -strained -heartbeat -wrapping -overtime -pleaded -##sworth -mon -leisure -oclc -##tate -##ele -feathers -angelo -thirds -nuts -surveys -clever -gill -commentator -##dos -darren -rides -gibraltar -##nc -##mu -dissolution -dedication -shin -meals -saddle -elvis -reds -chaired -taller -appreciation -functioning -niece -favored -advocacy -robbie -criminals -suffolk -yugoslav -passport -constable -congressman -hastings -vera -##rov -consecrated -sparks -ecclesiastical -confined -##ovich -muller -floyd -nora -1822 -paved -1827 -cumberland -ned -saga -spiral -##flow -appreciated -yi -collaborative -treating -similarities -feminine -finishes -##ib -jade -import -##nse -##hot -champagne -mice -securing -celebrities -helsinki -attributes -##gos -cousins -phases -ache -lucia -gandhi -submission -vicar -spear -shine -tasmania -biting -detention -constitute -tighter -seasonal -##gus -terrestrial -matthews -##oka -effectiveness -parody -philharmonic -##onic -1816 -strangers -encoded -consortium -guaranteed -regards -shifts -tortured -collision -supervisor -inform -broader -insight -theaters -armour -emeritus -blink -incorporates -mapping -##50 -##ein -handball -flexible -##nta -substantially -generous -thief -##own -carr -loses -1793 -prose -ucla -romeo -generic -metallic -realization -damages -mk -commissioners -zach -default -##ther -helicopters -lengthy -stems -spa -partnered -spectators -rogue -indication -penalties -teresa -1801 -sen -##tric -dalton -##wich -irving -photographic -##vey -dell -deaf -peters -excluded -unsure -##vable -patterson -crawled -##zio -resided -whipped -latvia -slower -ecole -pipes -employers -maharashtra -comparable -va -textile -pageant -##gel -alphabet -binary -irrigation -chartered -choked -antoine -offs -waking -supplement -##wen -quantities -demolition -regain -locate -urdu -folks -alt -114 -##mc -scary -andreas -whites -##ava -classrooms -mw -aesthetic -publishes -valleys -guides -cubs -johannes -bryant -conventions -affecting -##itt -drain -awesome -isolation -prosecutor -ambitious -apology -captive -downs -atmospheric -lorenzo -aisle -beef -foul -##onia -kidding -composite -disturbed -illusion -natives -##ffer -emi -rockets -riverside -wartime -painters -adolf -melted -##ail -uncertainty -simulation -hawks -progressed -meantime -builder -spray -breach -unhappy -regina -russians -##urg -determining -##tation -tram -1806 -##quin -aging -##12 -1823 -garion -rented -mister -diaz -terminated -clip -1817 -depend -nervously -disco -owe -defenders -shiva -notorious -disbelief -shiny -worcester -##gation -##yr -trailing -undertook -islander -belarus -limitations -watershed -fuller -overlooking -utilized -raphael -1819 -synthetic -breakdown -klein -##nate -moaned -memoir -lamb -practicing -##erly -cellular -arrows -exotic -##graphy -witches -117 -charted -rey -hut -hierarchy -subdivision -freshwater -giuseppe -aloud -reyes -qatar -marty -sideways -utterly -sexually -jude -prayers -mccarthy -softball -blend -damien -##gging -##metric -wholly -erupted -lebanese -negro -revenues -tasted -comparative -teamed -transaction -labeled -maori -sovereignty -parkway -trauma -gran -malay -121 -advancement -descendant -2020 -buzz -salvation -inventory -symbolic -##making -antarctica -mps -##gas -##bro -mohammed -myanmar -holt -submarines -tones -##lman -locker -patriarch -bangkok -emerson -remarks -predators -kin -afghan -confession -norwich -rental -emerge -advantages -##zel -rca -##hold -shortened -storms -aidan -##matic -autonomy -compliance -##quet -dudley -atp -##osis -1803 -motto -documentation -summary -professors -spectacular -christina -archdiocese -flashing -innocence -remake -##dell -psychic -reef -scare -employ -rs -sticks -meg -gus -leans -##ude -accompany -bergen -tomas -##iko -doom -wages -pools -##nch -##bes -breasts -scholarly -alison -outline -brittany -breakthrough -willis -realistic -##cut -##boro -competitor -##stan -pike -picnic -icon -designing -commercials -washing -villain -skiing -micro -costumes -auburn -halted -executives -##hat -logistics -cycles -vowel -applicable -barrett -exclaimed -eurovision -eternity -ramon -##umi -##lls -modifications -sweeping -disgust -##uck -torch -aviv -ensuring -rude -dusty -sonic -donovan -outskirts -cu -pathway -##band -##gun -##lines -disciplines -acids -cadet -paired -##40 -sketches -##sive -marriages -##⁺ -folding -peers -slovak -implies -admired -##beck -1880s -leopold -instinct -attained -weston -megan -horace -##ination -dorsal -ingredients -evolutionary -##its -complications -deity -lethal -brushing -levy -deserted -institutes -posthumously -delivering -telescope -coronation -motivated -rapids -luc -flicked -pays -volcano -tanner -weighed -##nica -crowds -frankie -gifted -addressing -granddaughter -winding -##rna -constantine -gomez -##front -landscapes -rudolf -anthropology -slate -werewolf -##lio -astronomy -circa -rouge -dreaming -sack -knelt -drowned -naomi -prolific -tracked -freezing -herb -##dium -agony -randall -twisting -wendy -deposit -touches -vein -wheeler -##bbled -##bor -batted -retaining -tire -presently -compare -specification -daemon -nigel -##grave -merry -recommendation -czechoslovakia -sandra -ng -roma -##sts -lambert -inheritance -sheikh -winchester -cries -examining -##yle -comeback -cuisine -nave -##iv -ko -retrieve -tomatoes -barker -polished -defining -irene -lantern -personalities -begging -tract -swore -1809 -175 -##gic -omaha -brotherhood -##rley -haiti -##ots -exeter -##ete -##zia -steele -dumb -pearson -210 -surveyed -elisabeth -trends -##ef -fritz -##rf -premium -bugs -fraction -calmly -viking -##birds -tug -inserted -unusually -##ield -confronted -distress -crashing -brent -turks -resign -##olo -cambodia -gabe -sauce -##kal -evelyn -116 -extant -clusters -quarry -teenagers -luna -##lers -##ister -affiliation -drill -##ashi -panthers -scenic -libya -anita -strengthen -inscriptions -##cated -lace -sued -judith -riots -##uted -mint -##eta -preparations -midst -dub -challenger -##vich -mock -cf -displaced -wicket -breaths -enables -schmidt -analyst -##lum -ag -highlight -automotive -axe -josef -newark -sufficiently -resembles -50th -##pal -flushed -mum -traits -##ante -commodore -incomplete -warming -titular -ceremonial -ethical -118 -celebrating -eighteenth -cao -lima -medalist -mobility -strips -snakes -##city -miniature -zagreb -barton -escapes -umbrella -automated -doubted -differs -cooled -georgetown -dresden -cooked -fade -wyatt -rna -jacobs -carlton -abundant -stereo -boost -madras -inning -##hia -spur -ip -malayalam -begged -osaka -groan -escaping -charging -dose -vista -##aj -bud -papa -communists -advocates -edged -tri -##cent -resemble -peaking -necklace -fried -montenegro -saxony -goose -glances -stuttgart -curator -recruit -grocery -sympathetic -##tting -##fort -127 -lotus -randolph -ancestor -##rand -succeeding -jupiter -1798 -macedonian -##heads -hiking -1808 -handing -fischer -##itive -garbage -node -##pies -prone -singular -papua -inclined -attractions -italia -pouring -motioned -grandma -garnered -jacksonville -corp -ego -ringing -aluminum -##hausen -ordering -##foot -drawer -traders -synagogue -##play -##kawa -resistant -wandering -fragile -fiona -teased -var -hardcore -soaked -jubilee -decisive -exposition -mercer -poster -valencia -hale -kuwait -1811 -##ises -##wr -##eed -tavern -gamma -122 -johan -##uer -airways -amino -gil -##ury -vocational -domains -torres -##sp -generator -folklore -outcomes -##keeper -canberra -shooter -fl -beams -confrontation -##lling -##gram -feb -aligned -forestry -pipeline -jax -motorway -conception -decay -##tos -coffin -##cott -stalin -1805 -escorted -minded -##nam -sitcom -purchasing -twilight -veronica -additions -passive -tensions -straw -123 -frequencies -1804 -refugee -cultivation -##iate -christie -clary -bulletin -crept -disposal -##rich -##zong -processor -crescent -##rol -bmw -emphasized -whale -nazis -aurora -##eng -dwelling -hauled -sponsors -toledo -mega -ideology -theatres -tessa -cerambycidae -saves -turtle -cone -suspects -kara -rusty -yelling -greeks -mozart -shades -cocked -participant -##tro -shire -spit -freeze -necessity -##cos -inmates -nielsen -councillors -loaned -uncommon -omar -peasants -botanical -offspring -daniels -formations -jokes -1794 -pioneers -sigma -licensing -##sus -wheelchair -polite -1807 -liquor -pratt -trustee -##uta -forewings -balloon -##zz -kilometre -camping -explicit -casually -shawn -foolish -teammates -nm -hassan -carrie -judged -satisfy -vanessa -knives -selective -cnn -flowed -##lice -eclipse -stressed -eliza -mathematician -cease -cultivated -##roy -commissions -browns -##ania -destroyers -sheridan -meadow -##rius -minerals -##cial -downstream -clash -gram -memoirs -ventures -baha -seymour -archie -midlands -edith -fare -flynn -invite -canceled -tiles -stabbed -boulder -incorporate -amended -camden -facial -mollusk -unreleased -descriptions -yoga -grabs -550 -raises -ramp -shiver -##rose -coined -pioneering -tunes -qing -warwick -tops -119 -melanie -giles -##rous -wandered -##inal -annexed -nov -30th -unnamed -##ished -organizational -airplane -normandy -stoke -whistle -blessing -violations -chased -holders -shotgun -##ctic -outlet -reactor -##vik -tires -tearing -shores -fortified -mascot -constituencies -nc -columnist -productive -tibet -##rta -lineage -hooked -oct -tapes -judging -cody -##gger -hansen -kashmir -triggered -##eva -solved -cliffs -##tree -resisted -anatomy -protesters -transparent -implied -##iga -injection -mattress -excluding -##mbo -defenses -helpless -devotion -##elli -growl -liberals -weber -phenomena -atoms -plug -##iff -mortality -apprentice -howe -convincing -aaa -swimmer -barber -leone -promptly -sodium -def -nowadays -arise -##oning -gloucester -corrected -dignity -norm -erie -##ders -elders -evacuated -sylvia -compression -##yar -hartford -pose -backpack -reasoning -accepts -24th -wipe -millimetres -marcel -##oda -dodgers -albion -1790 -overwhelmed -aerospace -oaks -1795 -showcase -acknowledge -recovering -nolan -ashe -hurts -geology -fashioned -disappearance -farewell -swollen -shrug -marquis -wimbledon -124 -rue -1792 -commemorate -reduces -experiencing -inevitable -calcutta -intel -##court -murderer -sticking -fisheries -imagery -bloom -280 -brake -##inus -gustav -hesitation -memorable -po -viral -beans -accidents -tunisia -antenna -spilled -consort -treatments -aye -perimeter -##gard -donation -hostage -migrated -banker -addiction -apex -lil -trout -##ously -conscience -##nova -rams -sands -genome -passionate -troubles -##lets -##set -amid -##ibility -##ret -higgins -exceed -vikings -##vie -payne -##zan -muscular -##ste -defendant -sucking -##wal -ibrahim -fuselage -claudia -vfl -europeans -snails -interval -##garh -preparatory -statewide -tasked -lacrosse -viktor -##lation -angola -##hra -flint -implications -employs -teens -patrons -stall -weekends -barriers -scrambled -nucleus -tehran -jenna -parsons -lifelong -robots -displacement -5000 -##bles -precipitation -##gt -knuckles -clutched -1802 -marrying -ecology -marx -accusations -declare -scars -kolkata -mat -meadows -bermuda -skeleton -finalists -vintage -crawl -coordinate -affects -subjected -orchestral -mistaken -##tc -mirrors -dipped -relied -260 -arches -candle -##nick -incorporating -wildly -fond -basilica -owl -fringe -rituals -whispering -stirred -feud -tertiary -slick -goat -honorable -whereby -skip -ricardo -stripes -parachute -adjoining -submerged -synthesizer -##gren -intend -positively -ninety -phi -beaver -partition -fellows -alexis -prohibition -carlisle -bizarre -fraternity -##bre -doubts -icy -cbc -aquatic -sneak -sonny -combines -airports -crude -supervised -spatial -merge -alfonso -##bic -corrupt -scan -undergo -##ams -disabilities -colombian -comparing -dolphins -perkins -##lish -reprinted -unanimous -bounced -hairs -underworld -midwest -semester -bucket -paperback -miniseries -coventry -demise -##leigh -demonstrations -sensor -rotating -yan -##hler -arrange -soils -##idge -hyderabad -labs -##dr -brakes -grandchildren -##nde -negotiated -rover -ferrari -continuation -directorate -augusta -stevenson -counterpart -gore -##rda -nursery -rican -ave -collectively -broadly -pastoral -repertoire -asserted -discovering -nordic -styled -fiba -cunningham -harley -middlesex -survives -tumor -tempo -zack -aiming -lok -urgent -##rade -##nto -devils -##ement -contractor -turin -##wl -##ool -bliss -repaired -simmons -moan -astronomical -cr -negotiate -lyric -1890s -lara -bred -clad -angus -pbs -##ience -engineered -posed -##lk -hernandez -possessions -elbows -psychiatric -strokes -confluence -electorate -lifts -campuses -lava -alps -##ep -##ution -##date -physicist -woody -##page -##ographic -##itis -juliet -reformation -sparhawk -320 -complement -suppressed -jewel -##½ -floated -##kas -continuity -sadly -##ische -inability -melting -scanning -paula -flour -judaism -safer -vague -##lm -solving -curb -##stown -financially -gable -bees -expired -miserable -cassidy -dominion -1789 -cupped -145 -robbery -facto -amos -warden -resume -tallest -marvin -ing -pounded -usd -declaring -gasoline -##aux -darkened -270 -650 -sophomore -##mere -erection -gossip -televised -risen -dial -##eu -pillars -##link -passages -profound -##tina -arabian -ashton -silicon -nail -##ead -##lated -##wer -##hardt -fleming -firearms -ducked -circuits -blows -waterloo -titans -##lina -atom -fireplace -cheshire -financed -activation -algorithms -##zzi -constituent -catcher -cherokee -partnerships -sexuality -platoon -tragic -vivian -guarded -whiskey -meditation -poetic -##late -##nga -##ake -porto -listeners -dominance -kendra -mona -chandler -factions -22nd -salisbury -attitudes -derivative -##ido -##haus -intake -paced -javier -illustrator -barrels -bias -cockpit -burnett -dreamed -ensuing -##anda -receptors -someday -hawkins -mattered -##lal -slavic -1799 -jesuit -cameroon -wasted -tai -wax -lowering -victorious -freaking -outright -hancock -librarian -sensing -bald -calcium -myers -tablet -announcing -barack -shipyard -pharmaceutical -##uan -greenwich -flush -medley -patches -wolfgang -pt -speeches -acquiring -exams -nikolai -##gg -hayden -kannada -##type -reilly -##pt -waitress -abdomen -devastated -capped -pseudonym -pharmacy -fulfill -paraguay -1796 -clicked -##trom -archipelago -syndicated -##hman -lumber -orgasm -rejection -clifford -lorraine -advent -mafia -rodney -brock -##ght -##used -##elia -cassette -chamberlain -despair -mongolia -sensors -developmental -upstream -##eg -##alis -spanning -165 -trombone -basque -seeded -interred -renewable -rhys -leapt -revision -molecule -##ages -chord -vicious -nord -shivered -23rd -arlington -debts -corpus -sunrise -bays -blackburn -centimetres -##uded -shuddered -gm -strangely -gripping -cartoons -isabelle -orbital -##ppa -seals -proving -##lton -refusal -strengthened -bust -assisting -baghdad -batsman -portrayal -mara -pushes -spears -og -##cock -reside -nathaniel -brennan -1776 -confirmation -caucus -##worthy -markings -yemen -nobles -ku -lazy -viewer -catalan -encompasses -sawyer -##fall -sparked -substances -patents -braves -arranger -evacuation -sergio -persuade -dover -tolerance -penguin -cum -jockey -insufficient -townships -occupying -declining -plural -processed -projection -puppet -flanders -introduces -liability -##yon -gymnastics -antwerp -taipei -hobart -candles -jeep -wes -observers -126 -chaplain -bundle -glorious -##hine -hazel -flung -sol -excavations -dumped -stares -sh -bangalore -triangular -icelandic -intervals -expressing -turbine -##vers -songwriting -crafts -##igo -jasmine -ditch -rite -##ways -entertaining -comply -sorrow -wrestlers -basel -emirates -marian -rivera -helpful -##some -caution -downward -networking -##atory -##tered -darted -genocide -emergence -replies -specializing -spokesman -convenient -unlocked -fading -augustine -concentrations -resemblance -elijah -investigator -andhra -##uda -promotes -bean -##rrell -fleeing -wan -simone -announcer -##ame -##bby -lydia -weaver -132 -residency -modification -##fest -stretches -##ast -alternatively -nat -lowe -lacks -##ented -pam -tile -concealed -inferior -abdullah -residences -tissues -vengeance -##ided -moisture -peculiar -groove -zip -bologna -jennings -ninja -oversaw -zombies -pumping -batch -livingston -emerald -installations -1797 -peel -nitrogen -rama -##fying -##star -schooling -strands -responding -werner -##ost -lime -casa -accurately -targeting -##rod -underway -##uru -hemisphere -lester -##yard -occupies -2d -griffith -angrily -reorganized -##owing -courtney -deposited -##dd -##30 -estadio -##ifies -dunn -exiled -##ying -checks -##combe -##о -##fly -successes -unexpectedly -blu -assessed -##flower -##ه -observing -sacked -spiders -kn -##tail -mu -nodes -prosperity -audrey -divisional -155 -broncos -tangled -adjust -feeds -erosion -paolo -surf -directory -snatched -humid -admiralty -screwed -gt -reddish -##nese -modules -trench -lamps -bind -leah -bucks -competes -##nz -##form -transcription -##uc -isles -violently -clutching -pga -cyclist -inflation -flats -ragged -unnecessary -##hian -stubborn -coordinated -harriet -baba -disqualified -330 -insect -wolfe -##fies -reinforcements -rocked -duel -winked -embraced -bricks -##raj -hiatus -defeats -pending -brightly -jealousy -##xton -##hm -##uki -lena -gdp -colorful -##dley -stein -kidney -##shu -underwear -wanderers -##haw -##icus -guardians -m³ -roared -habits -##wise -permits -gp -uranium -punished -disguise -bundesliga -elise -dundee -erotic -partisan -pi -collectors -float -individually -rendering -behavioral -bucharest -ser -hare -valerie -corporal -nutrition -proportional -##isa -immense -##kis -pavement -##zie -##eld -sutherland -crouched -1775 -##lp -suzuki -trades -endurance -operas -crosby -prayed -priory -rory -socially -##urn -gujarat -##pu -walton -cube -pasha -privilege -lennon -floods -thorne -waterfall -nipple -scouting -approve -##lov -minorities -voter -dwight -extensions -assure -ballroom -slap -dripping -privileges -rejoined -confessed -demonstrating -patriotic -yell -investor -##uth -pagan -slumped -squares -##cle -##kins -confront -bert -embarrassment -##aid -aston -urging -sweater -starr -yuri -brains -williamson -commuter -mortar -structured -selfish -exports -##jon -cds -##him -unfinished -##rre -mortgage -destinations -##nagar -canoe -solitary -buchanan -delays -magistrate -fk -##pling -motivation -##lier -##vier -recruiting -assess -##mouth -malik -antique -1791 -pius -rahman -reich -tub -zhou -smashed -airs -galway -xii -conditioning -honduras -discharged -dexter -##pf -lionel -129 -debates -lemon -tiffany -volunteered -dom -dioxide -procession -devi -sic -tremendous -advertisements -colts -transferring -verdict -hanover -decommissioned -utter -relate -pac -racism -##top -beacon -limp -similarity -terra -occurrence -ant -##how -becky -capt -updates -armament -richie -pal -##graph -halloween -mayo -##ssen -##bone -cara -serena -fcc -dolls -obligations -##dling -violated -lafayette -jakarta -exploitation -##ime -infamous -iconic -##lah -##park -kitty -moody -reginald -dread -spill -crystals -olivier -modeled -bluff -equilibrium -separating -notices -ordnance -extinction -onset -cosmic -attachment -sammy -expose -privy -anchored -##bil -abbott -admits -bending -baritone -emmanuel -policeman -vaughan -winged -climax -dresses -denny -polytechnic -mohamed -burmese -authentic -nikki -genetics -grandparents -homestead -gaza -postponed -metacritic -una -##sby -##bat -unstable -dissertation -##rial -##cian -curls -obscure -uncovered -bronx -praying -disappearing -##hoe -prehistoric -coke -turret -mutations -nonprofit -pits -monaco -##ي -##usion -prominently -dispatched -podium -##mir -uci -##uation -133 -fortifications -birthplace -kendall -##lby -##oll -preacher -rack -goodman -##rman -persistent -##ott -countless -jaime -recorder -lexington -persecution -jumps -renewal -wagons -##11 -crushing -##holder -decorations -##lake -abundance -wrath -laundry -£1 -garde -##rp -jeanne -beetles -peasant -##sl -splitting -caste -sergei -##rer -##ema -scripts -##ively -rub -satellites -##vor -inscribed -verlag -scrapped -gale -packages -chick -potato -slogan -kathleen -arabs -##culture -counterparts -reminiscent -choral -##tead -rand -retains -bushes -dane -accomplish -courtesy -closes -##oth -slaughter -hague -krakow -lawson -tailed -elias -ginger -##ttes -canopy -betrayal -rebuilding -turf -##hof -frowning -allegiance -brigades -kicks -rebuild -polls -alias -nationalism -td -rowan -audition -bowie -fortunately -recognizes -harp -dillon -horrified -##oro -renault -##tics -ropes -##α -presumed -rewarded -infrared -wiping -accelerated -illustration -##rid -presses -practitioners -badminton -##iard -detained -##tera -recognizing -relates -misery -##sies -##tly -reproduction -piercing -potatoes -thornton -esther -manners -hbo -##aan -ours -bullshit -ernie -perennial -sensitivity -illuminated -rupert -##jin -##iss -##ear -rfc -nassau -##dock -staggered -socialism -##haven -appointments -nonsense -prestige -sharma -haul -##tical -solidarity -gps -##ook -##rata -igor -pedestrian -##uit -baxter -tenants -wires -medication -unlimited -guiding -impacts -diabetes -##rama -sasha -pas -clive -extraction -131 -continually -constraints -##bilities -sonata -hunted -sixteenth -chu -planting -quote -mayer -pretended -abs -spat -##hua -ceramic -##cci -curtains -pigs -pitching -##dad -latvian -sore -dayton -##sted -##qi -patrols -slice -playground -##nted -shone -stool -apparatus -inadequate -mates -treason -##ija -desires -##liga -##croft -somalia -laurent -mir -leonardo -oracle -grape -obliged -chevrolet -thirteenth -stunning -enthusiastic -##ede -accounted -concludes -currents -basil -##kovic -drought -##rica -mai -##aire -shove -posting -##shed -pilgrimage -humorous -packing -fry -pencil -wines -smells -144 -marilyn -aching -newest -clung -bon -neighbours -sanctioned -##pie -mug -##stock -drowning -##mma -hydraulic -##vil -hiring -reminder -lilly -investigators -##ncies -sour -##eous -compulsory -packet -##rion -##graphic -##elle -cannes -##inate -depressed -##rit -heroic -importantly -theresa -##tled -conway -saturn -marginal -rae -##xia -corresponds -royce -pact -jasper -explosives -packaging -aluminium -##ttered -denotes -rhythmic -spans -assignments -hereditary -outlined -originating -sundays -lad -reissued -greeting -beatrice -##dic -pillar -marcos -plots -handbook -alcoholic -judiciary -avant -slides -extract -masculine -blur -##eum -##force -homage -trembled -owens -hymn -trey -omega -signaling -socks -accumulated -reacted -attic -theo -lining -angie -distraction -primera -talbot -##key -1200 -ti -creativity -billed -##hey -deacon -eduardo -identifies -proposition -dizzy -gunner -hogan -##yam -##pping -##hol -ja -##chan -jensen -reconstructed -##berger -clearance -darius -##nier -abe -harlem -plea -dei -circled -emotionally -notation -fascist -neville -exceeded -upwards -viable -ducks -##fo -workforce -racer -limiting -shri -##lson -possesses -1600 -kerr -moths -devastating -laden -disturbing -locking -##cture -gal -fearing -accreditation -flavor -aide -1870s -mountainous -##baum -melt -##ures -motel -texture -servers -soda -##mb -herd -##nium -erect -puzzled -hum -peggy -examinations -gould -testified -geoff -ren -devised -sacks -##law -denial -posters -grunted -cesar -tutor -ec -gerry -offerings -byrne -falcons -combinations -ct -incoming -pardon -rocking -26th -avengers -flared -mankind -seller -uttar -loch -nadia -stroking -exposing -##hd -fertile -ancestral -instituted -##has -noises -prophecy -taxation -eminent -vivid -pol -##bol -dart -indirect -multimedia -notebook -upside -displaying -adrenaline -referenced -geometric -##iving -progression -##ddy -blunt -announce -##far -implementing -##lav -aggression -liaison -cooler -cares -headache -plantations -gorge -dots -impulse -thickness -ashamed -averaging -kathy -obligation -precursor -137 -fowler -symmetry -thee -225 -hears -##rai -undergoing -ads -butcher -bowler -##lip -cigarettes -subscription -goodness -##ically -browne -##hos -##tech -kyoto -donor -##erty -damaging -friction -drifting -expeditions -hardened -prostitution -152 -fauna -blankets -claw -tossing -snarled -butterflies -recruits -investigative -coated -healed -138 -communal -hai -xiii -academics -boone -psychologist -restless -lahore -stephens -mba -brendan -foreigners -printer -##pc -ached -explode -27th -deed -scratched -dared -##pole -cardiac -1780 -okinawa -proto -commando -compelled -oddly -electrons -##base -replica -thanksgiving -##rist -sheila -deliberate -stafford -tidal -representations -hercules -ou -##path -##iated -kidnapping -lenses -##tling -deficit -samoa -mouths -consuming -computational -maze -granting -smirk -razor -fixture -ideals -inviting -aiden -nominal -##vs -issuing -julio -pitt -ramsey -docks -##oss -exhaust -##owed -bavarian -draped -anterior -mating -ethiopian -explores -noticing -##nton -discarded -convenience -hoffman -endowment -beasts -cartridge -mormon -paternal -probe -sleeves -interfere -lump -deadline -##rail -jenks -bulldogs -scrap -alternating -justified -reproductive -nam -seize -descending -secretariat -kirby -coupe -grouped -smash -panther -sedan -tapping -##18 -lola -cheer -germanic -unfortunate -##eter -unrelated -##fan -subordinate -##sdale -suzanne -advertisement -##ility -horsepower -##lda -cautiously -discourse -luigi -##mans -##fields -noun -prevalent -mao -schneider -everett -surround -governorate -kira -##avia -westward -##take -misty -rails -sustainability -134 -unused -##rating -packs -toast -unwilling -regulate -thy -suffrage -nile -awe -assam -definitions -travelers -affordable -##rb -conferred -sells -undefeated -beneficial -torso -basal -repeating -remixes -##pass -bahrain -cables -fang -##itated -excavated -numbering -statutory -##rey -deluxe -##lian -forested -ramirez -derbyshire -zeus -slamming -transfers -astronomer -banana -lottery -berg -histories -bamboo -##uchi -resurrection -posterior -bowls -vaguely -##thi -thou -preserving -tensed -offence -##inas -meyrick -callum -ridden -watt -langdon -tying -lowland -snorted -daring -truman -##hale -##girl -aura -overly -filing -weighing -goa -infections -philanthropist -saunders -eponymous -##owski -latitude -perspectives -reviewing -mets -commandant -radial -##kha -flashlight -reliability -koch -vowels -amazed -ada -elaine -supper -##rth -##encies -predator -debated -soviets -cola -##boards -##nah -compartment -crooked -arbitrary -fourteenth -##ctive -havana -majors -steelers -clips -profitable -ambush -exited -packers -##tile -nude -cracks -fungi -##е -limb -trousers -josie -shelby -tens -frederic -##ος -definite -smoothly -constellation -insult -baton -discs -lingering -##nco -conclusions -lent -staging -becker -grandpa -shaky -##tron -einstein -obstacles -sk -adverse -elle -economically -##moto -mccartney -thor -dismissal -motions -readings -nostrils -treatise -##pace -squeezing -evidently -prolonged -1783 -venezuelan -je -marguerite -beirut -takeover -shareholders -##vent -denise -digit -airplay -norse -##bbling -imaginary -pills -hubert -blaze -vacated -eliminating -##ello -vine -mansfield -##tty -retrospective -barrow -borne -clutch -bail -forensic -weaving -##nett -##witz -desktop -citadel -promotions -worrying -dorset -ieee -subdivided -##iating -manned -expeditionary -pickup -synod -chuckle -185 -barney -##rz -##ffin -functionality -karachi -litigation -meanings -uc -lick -turbo -anders -##ffed -execute -curl -oppose -ankles -typhoon -##د -##ache -##asia -linguistics -compassion -pressures -grazing -perfection -##iting -immunity -monopoly -muddy -backgrounds -136 -namibia -francesca -monitors -attracting -stunt -tuition -##ии -vegetable -##mates -##quent -mgm -jen -complexes -forts -##ond -cellar -bites -seventeenth -royals -flemish -failures -mast -charities -##cular -peruvian -capitals -macmillan -ipswich -outward -frigate -postgraduate -folds -employing -##ouse -concurrently -fiery -##tai -contingent -nightmares -monumental -nicaragua -##kowski -lizard -mal -fielding -gig -reject -##pad -harding -##ipe -coastline -##cin -##nos -beethoven -humphrey -innovations -##tam -##nge -norris -doris -solicitor -huang -obey -141 -##lc -niagara -##tton -shelves -aug -bourbon -curry -nightclub -specifications -hilton -##ndo -centennial -dispersed -worm -neglected -briggs -sm -font -kuala -uneasy -plc -##nstein -##bound -##aking -##burgh -awaiting -pronunciation -##bbed -##quest -eh -optimal -zhu -raped -greens -presided -brenda -worries -##life -venetian -marxist -turnout -##lius -refined -braced -sins -grasped -sunderland -nickel -speculated -lowell -cyrillic -communism -fundraising -resembling -colonists -mutant -freddie -usc -##mos -gratitude -##run -mural -##lous -chemist -wi -reminds -28th -steals -tess -pietro -##ingen -promoter -ri -microphone -honoured -rai -sant -##qui -feather -##nson -burlington -kurdish -terrorists -deborah -sickness -##wed -##eet -hazard -irritated -desperation -veil -clarity -##rik -jewels -xv -##gged -##ows -##cup -berkshire -unfair -mysteries -orchid -winced -exhaustion -renovations -stranded -obe -infinity -##nies -adapt -redevelopment -thanked -registry -olga -domingo -noir -tudor -ole -##atus -commenting -behaviors -##ais -crisp -pauline -probable -stirling -wigan -##bian -paralympics -panting -surpassed -##rew -luca -barred -pony -famed -##sters -cassandra -waiter -carolyn -exported -##orted -andres -destructive -deeds -jonah -castles -vacancy -suv -##glass -1788 -orchard -yep -famine -belarusian -sprang -##forth -skinny -##mis -administrators -rotterdam -zambia -zhao -boiler -discoveries -##ride -##physics -lucius -disappointing -outreach -spoon -##frame -qualifications -unanimously -enjoys -regency -##iidae -stade -realism -veterinary -rodgers -dump -alain -chestnut -castile -censorship -rumble -gibbs -##itor -communion -reggae -inactivated -logs -loads -##houses -homosexual -##iano -ale -informs -##cas -phrases -plaster -linebacker -ambrose -kaiser -fascinated -850 -limerick -recruitment -forge -mastered -##nding -leinster -rooted -threaten -##strom -borneo -##hes -suggestions -scholarships -propeller -documentaries -patronage -coats -constructing -invest -neurons -comet -entirety -shouts -identities -annoying -unchanged -wary -##antly -##ogy -neat -oversight -##kos -phillies -replay -constance -##kka -incarnation -humble -skies -minus -##acy -smithsonian -##chel -guerrilla -jar -cadets -##plate -surplus -audit -##aru -cracking -joanna -louisa -pacing -##lights -intentionally -##iri -diner -nwa -imprint -australians -tong -unprecedented -bunker -naive -specialists -ark -nichols -railing -leaked -pedal -##uka -shrub -longing -roofs -v8 -captains -neural -tuned -##ntal -##jet -emission -medina -frantic -codex -definitive -sid -abolition -intensified -stocks -enrique -sustain -genoa -oxide -##written -clues -cha -##gers -tributaries -fragment -venom -##rity -##ente -##sca -muffled -vain -sire -laos -##ingly -##hana -hastily -snapping -surfaced -sentiment -motive -##oft -contests -approximate -mesa -luckily -dinosaur -exchanges -propelled -accord -bourne -relieve -tow -masks -offended -##ues -cynthia -##mmer -rains -bartender -zinc -reviewers -lois -##sai -legged -arrogant -rafe -rosie -comprise -handicap -blockade -inlet -lagoon -copied -drilling -shelley -petals -##inian -mandarin -obsolete -##inated -onward -arguably -productivity -cindy -praising -seldom -busch -discusses -raleigh -shortage -ranged -stanton -encouragement -firstly -conceded -overs -temporal -##uke -cbe -##bos -woo -certainty -pumps -##pton -stalked -##uli -lizzie -periodic -thieves -weaker -##night -gases -shoving -chooses -wc -##chemical -prompting -weights -##kill -robust -flanked -sticky -hu -tuberculosis -##eb -##eal -christchurch -resembled -wallet -reese -inappropriate -pictured -distract -fixing -fiddle -giggled -burger -heirs -hairy -mechanic -torque -apache -obsessed -chiefly -cheng -logging -##tag -extracted -meaningful -numb -##vsky -gloucestershire -reminding -##bay -unite -##lit -breeds -diminished -clown -glove -1860s -##ن -##ug -archibald -focal -freelance -sliced -depiction -##yk -organism -switches -sights -stray -crawling -##ril -lever -leningrad -interpretations -loops -anytime -reel -alicia -delighted -##ech -inhaled -xiv -suitcase -bernie -vega -licenses -northampton -exclusion -induction -monasteries -racecourse -homosexuality -##right -##sfield -##rky -dimitri -michele -alternatives -ions -commentators -genuinely -objected -pork -hospitality -fencing -stephan -warships -peripheral -wit -drunken -wrinkled -quentin -spends -departing -chung -numerical -spokesperson -##zone -johannesburg -caliber -killers -##udge -assumes -neatly -demographic -abigail -bloc -##vel -mounting -##lain -bentley -slightest -xu -recipients -##jk -merlin -##writer -seniors -prisons -blinking -hindwings -flickered -kappa -##hel -80s -strengthening -appealing -brewing -gypsy -mali -lashes -hulk -unpleasant -harassment -bio -treaties -predict -instrumentation -pulp -troupe -boiling -mantle -##ffe -ins -##vn -dividing -handles -verbs -##onal -coconut -senegal -340 -thorough -gum -momentarily -##sto -cocaine -panicked -destined -##turing -teatro -denying -weary -captained -mans -##hawks -##code -wakefield -bollywood -thankfully -##16 -cyril -##wu -amendments -##bahn -consultation -stud -reflections -kindness -1787 -internally -##ovo -tex -mosaic -distribute -paddy -seeming -143 -##hic -piers -##15 -##mura -##verse -popularly -winger -kang -sentinel -mccoy -##anza -covenant -##bag -verge -fireworks -suppress -thrilled -dominate -##jar -swansea -##60 -142 -reconciliation -##ndi -stiffened -cue -dorian -##uf -damascus -amor -ida -foremost -##aga -porsche -unseen -dir -##had -##azi -stony -lexi -melodies -##nko -angular -integer -podcast -ants -inherent -jaws -justify -persona -##olved -josephine -##nr -##ressed -customary -flashes -gala -cyrus -glaring -backyard -ariel -physiology -greenland -html -stir -avon -atletico -finch -methodology -ked -##lent -mas -catholicism -townsend -branding -quincy -fits -containers -1777 -ashore -aragon -##19 -forearm -poisoning -##sd -adopting -conquer -grinding -amnesty -keller -finances -evaluate -forged -lankan -instincts -##uto -guam -bosnian -photographed -workplace -desirable -protector -##dog -allocation -intently -encourages -willy -##sten -bodyguard -electro -brighter -##ν -bihar -##chev -lasts -opener -amphibious -sal -verde -arte -##cope -captivity -vocabulary -yields -##tted -agreeing -desmond -pioneered -##chus -strap -campaigned -railroads -##ович -emblem -##dre -stormed -501 -##ulous -marijuana -northumberland -##gn -##nath -bowen -landmarks -beaumont -##qua -danube -##bler -attorneys -th -ge -flyers -critique -villains -cass -mutation -acc -##0s -colombo -mckay -motif -sampling -concluding -syndicate -##rell -neon -stables -ds -warnings -clint -mourning -wilkinson -##tated -merrill -leopard -evenings -exhaled -emil -sonia -ezra -discrete -stove -farrell -fifteenth -prescribed -superhero -##rier -worms -helm -wren -##duction -##hc -expo -##rator -hq -unfamiliar -antony -prevents -acceleration -fiercely -mari -painfully -calculations -cheaper -ign -clifton -irvine -davenport -mozambique -##np -pierced -##evich -wonders -##wig -##cate -##iling -crusade -ware -##uel -enzymes -reasonably -mls -##coe -mater -ambition -bunny -eliot -kernel -##fin -asphalt -headmaster -torah -aden -lush -pins -waived -##care -##yas -joao -substrate -enforce -##grad -##ules -alvarez -selections -epidemic -tempted -##bit -bremen -translates -ensured -waterfront -29th -forrest -manny -malone -kramer -reigning -cookies -simpler -absorption -205 -engraved -##ffy -evaluated -1778 -haze -146 -comforting -crossover -##abe -thorn -##rift -##imo -##pop -suppression -fatigue -cutter -##tr -201 -wurttemberg -##orf -enforced -hovering -proprietary -gb -samurai -syllable -ascent -lacey -tick -lars -tractor -merchandise -rep -bouncing -defendants -##yre -huntington -##ground -##oko -standardized -##hor -##hima -assassinated -nu -predecessors -rainy -liar -assurance -lyrical -##uga -secondly -flattened -ios -parameter -undercover -##mity -bordeaux -punish -ridges -markers -exodus -inactive -hesitate -debbie -nyc -pledge -savoy -nagar -offset -organist -##tium -hesse -marin -converting -##iver -diagram -propulsion -pu -validity -reverted -supportive -##dc -ministries -clans -responds -proclamation -##inae -##ø -##rea -ein -pleading -patriot -sf -birch -islanders -strauss -hates -##dh -brandenburg -concession -rd -##ob -1900s -killings -textbook -antiquity -cinematography -wharf -embarrassing -setup -creed -farmland -inequality -centred -signatures -fallon -370 -##ingham -##uts -ceylon -gazing -directive -laurie -##tern -globally -##uated -##dent -allah -excavation -threads -##cross -148 -frantically -icc -utilize -determines -respiratory -thoughtful -receptions -##dicate -merging -chandra -seine -147 -builders -builds -diagnostic -dev -visibility -goddamn -analyses -dhaka -cho -proves -chancel -concurrent -curiously -canadians -pumped -restoring -1850s -turtles -jaguar -sinister -spinal -traction -declan -vows -1784 -glowed -capitalism -swirling -install -universidad -##lder -##oat -soloist -##genic -##oor -coincidence -beginnings -nissan -dip -resorts -caucasus -combustion -infectious -##eno -pigeon -serpent -##itating -conclude -masked -salad -jew -##gr -surreal -toni -##wc -harmonica -151 -##gins -##etic -##coat -fishermen -intending -bravery -##wave -klaus -titan -wembley -taiwanese -ransom -40th -incorrect -hussein -eyelids -jp -cooke -dramas -utilities -##etta -##print -eisenhower -principally -granada -lana -##rak -openings -concord -##bl -bethany -connie -morality -sega -##mons -##nard -earnings -##kara -##cine -wii -communes -##rel -coma -composing -softened -severed -grapes -##17 -nguyen -analyzed -warlord -hubbard -heavenly -behave -slovenian -##hit -##ony -hailed -filmmakers -trance -caldwell -skye -unrest -coward -likelihood -##aging -bern -sci -taliban -honolulu -propose -##wang -1700 -browser -imagining -cobra -contributes -dukes -instinctively -conan -violinist -##ores -accessories -gradual -##amp -quotes -sioux -##dating -undertake -intercepted -sparkling -compressed -139 -fungus -tombs -haley -imposing -rests -degradation -lincolnshire -retailers -wetlands -tulsa -distributor -dungeon -nun -greenhouse -convey -atlantis -aft -exits -oman -dresser -lyons -##sti -joking -eddy -judgement -omitted -digits -##cts -##game -juniors -##rae -cents -stricken -une -##ngo -wizards -weir -breton -nan -technician -fibers -liking -royalty -##cca -154 -persia -terribly -magician -##rable -##unt -vance -cafeteria -booker -camille -warmer -##static -consume -cavern -gaps -compass -contemporaries -foyer -soothing -graveyard -maj -plunged -blush -##wear -cascade -demonstrates -ordinance -##nov -boyle -##lana -rockefeller -shaken -banjo -izzy -##ense -breathless -vines -##32 -##eman -alterations -chromosome -dwellings -feudal -mole -153 -catalonia -relics -tenant -mandated -##fm -fridge -hats -honesty -patented -raul -heap -cruisers -accusing -enlightenment -infants -wherein -chatham -contractors -zen -affinity -hc -osborne -piston -156 -traps -maturity -##rana -lagos -##zal -peering -##nay -attendant -dealers -protocols -subset -prospects -biographical -##cre -artery -##zers -insignia -nuns -endured -##eration -recommend -schwartz -serbs -berger -cromwell -crossroads -##ctor -enduring -clasped -grounded -##bine -marseille -twitched -abel -choke -https -catalyst -moldova -italians -##tist -disastrous -wee -##oured -##nti -wwf -nope -##piration -##asa -expresses -thumbs -167 -##nza -coca -1781 -cheating -##ption -skipped -sensory -heidelberg -spies -satan -dangers -semifinal -202 -bohemia -whitish -confusing -shipbuilding -relies -surgeons -landings -ravi -baku -moor -suffix -alejandro -##yana -litre -upheld -##unk -rajasthan -##rek -coaster -insists -posture -scenarios -etienne -favoured -appoint -transgender -elephants -poked -greenwood -defences -fulfilled -militant -somali -1758 -chalk -potent -##ucci -migrants -wink -assistants -nos -restriction -activism -niger -##ario -colon -shaun -##sat -daphne -##erated -swam -congregations -reprise -considerations -magnet -playable -xvi -##р -overthrow -tobias -knob -chavez -coding -##mers -propped -katrina -orient -newcomer -##suke -temperate -##pool -farmhouse -interrogation -##vd -committing -##vert -forthcoming -strawberry -joaquin -macau -ponds -shocking -siberia -##cellular -chant -contributors -##nant -##ologists -sped -absorb -hail -1782 -spared -##hore -barbados -karate -opus -originates -saul -##xie -evergreen -leaped -##rock -correlation -exaggerated -weekday -unification -bump -tracing -brig -afb -pathways -utilizing -##ners -mod -mb -disturbance -kneeling -##stad -##guchi -100th -pune -##thy -decreasing -168 -manipulation -miriam -academia -ecosystem -occupational -rbi -##lem -rift -##14 -rotary -stacked -incorporation -awakening -generators -guerrero -racist -##omy -cyber -derivatives -culminated -allie -annals -panzer -sainte -wikipedia -pops -zu -austro -##vate -algerian -politely -nicholson -mornings -educate -tastes -thrill -dartmouth -##gating -db -##jee -regan -differing -concentrating -choreography -divinity -##media -pledged -alexandre -routing -gregor -madeline -##idal -apocalypse -##hora -gunfire -culminating -elves -fined -liang -lam -programmed -tar -guessing -transparency -gabrielle -##gna -cancellation -flexibility -##lining -accession -shea -stronghold -nets -specializes -##rgan -abused -hasan -sgt -ling -exceeding -##₄ -admiration -supermarket -##ark -photographers -specialised -tilt -resonance -hmm -perfume -380 -sami -threatens -garland -botany -guarding -boiled -greet -puppy -russo -supplier -wilmington -vibrant -vijay -##bius -paralympic -grumbled -paige -faa -licking -margins -hurricanes -##gong -fest -grenade -ripping -##uz -counseling -weigh -##sian -needles -wiltshire -edison -costly -##not -fulton -tramway -redesigned -staffordshire -cache -gasping -watkins -sleepy -candidacy -##group -monkeys -timeline -throbbing -##bid -##sos -berth -uzbekistan -vanderbilt -bothering -overturned -ballots -gem -##iger -sunglasses -subscribers -hooker -compelling -ang -exceptionally -saloon -stab -##rdi -carla -terrifying -rom -##vision -coil -##oids -satisfying -vendors -31st -mackay -deities -overlooked -ambient -bahamas -felipe -olympia -whirled -botanist -advertised -tugging -##dden -disciples -morales -unionist -rites -foley -morse -motives -creepy -##₀ -soo -##sz -bargain -highness -frightening -turnpike -tory -reorganization -##cer -depict -biographer -##walk -unopposed -manifesto -##gles -institut -emile -accidental -kapoor -##dam -kilkenny -cortex -lively -##13 -romanesque -jain -shan -cannons -##ood -##ske -petrol -echoing -amalgamated -disappears -cautious -proposes -sanctions -trenton -##ر -flotilla -aus -contempt -tor -canary -cote -theirs -##hun -conceptual -deleted -fascinating -paso -blazing -elf -honourable -hutchinson -##eiro -##outh -##zin -surveyor -tee -amidst -wooded -reissue -intro -##ono -cobb -shelters -newsletter -hanson -brace -encoding -confiscated -dem -caravan -marino -scroll -melodic -cows -imam -##adi -##aneous -northward -searches -biodiversity -cora -310 -roaring -##bers -connell -theologian -halo -compose -pathetic -unmarried -dynamo -##oot -az -calculation -toulouse -deserves -humour -nr -forgiveness -tam -undergone -martyr -pamela -myths -whore -counselor -hicks -290 -heavens -battleship -electromagnetic -##bbs -stellar -establishments -presley -hopped -##chin -temptation -90s -wills -nas -##yuan -nhs -##nya -seminars -##yev -adaptations -gong -asher -lex -indicator -sikh -tobago -cites -goin -##yte -satirical -##gies -characterised -correspond -bubbles -lure -participates -##vid -eruption -skate -therapeutic -1785 -canals -wholesale -defaulted -sac -460 -petit -##zzled -virgil -leak -ravens -256 -portraying -##yx -ghetto -creators -dams -portray -vicente -##rington -fae -namesake -bounty -##arium -joachim -##ota -##iser -aforementioned -axle -snout -depended -dismantled -reuben -480 -##ibly -gallagher -##lau -##pd -earnest -##ieu -##iary -inflicted -objections -##llar -asa -gritted -##athy -jericho -##sea -##was -flick -underside -ceramics -undead -substituted -195 -eastward -undoubtedly -wheeled -chimney -##iche -guinness -cb -##ager -siding -##bell -traitor -baptiste -disguised -inauguration -149 -tipperary -choreographer -perched -warmed -stationary -eco -##ike -##ntes -bacterial -##aurus -flores -phosphate -##core -attacker -invaders -alvin -intersects -a1 -indirectly -immigrated -businessmen -cornelius -valves -narrated -pill -sober -ul -nationale -monastic -applicants -scenery -##jack -161 -motifs -constitutes -cpu -##osh -jurisdictions -sd -tuning -irritation -woven -##uddin -fertility -gao -##erie -antagonist -impatient -glacial -hides -boarded -denominations -interception -##jas -cookie -nicola -##tee -algebraic -marquess -bahn -parole -buyers -bait -turbines -paperwork -bestowed -natasha -renee -oceans -purchases -157 -vaccine -215 -##tock -fixtures -playhouse -integrate -jai -oswald -intellectuals -##cky -booked -nests -mortimer -##isi -obsession -sept -##gler -##sum -440 -scrutiny -simultaneous -squinted -##shin -collects -oven -shankar -penned -remarkably -##я -slips -luggage -spectral -1786 -collaborations -louie -consolidation -##ailed -##ivating -420 -hoover -blackpool -harness -ignition -vest -tails -belmont -mongol -skinner -##nae -visually -mage -derry -##tism -##unce -stevie -transitional -##rdy -redskins -drying -prep -prospective -##21 -annoyance -oversee -##loaded -fills -##books -##iki -announces -fda -scowled -respects -prasad -mystic -tucson -##vale -revue -springer -bankrupt -1772 -aristotle -salvatore -habsburg -##geny -dal -natal -nut -pod -chewing -darts -moroccan -walkover -rosario -lenin -punjabi -##ße -grossed -scattering -wired -invasive -hui -polynomial -corridors -wakes -gina -portrays -##cratic -arid -retreating -erich -irwin -sniper -##dha -linen -lindsey -maneuver -butch -shutting -socio -bounce -commemorative -postseason -jeremiah -pines -275 -mystical -beads -bp -abbas -furnace -bidding -consulted -assaulted -empirical -rubble -enclosure -sob -weakly -cancel -polly -yielded -##emann -curly -prediction -battered -70s -vhs -jacqueline -render -sails -barked -detailing -grayson -riga -sloane -raging -##yah -herbs -bravo -##athlon -alloy -giggle -imminent -suffers -assumptions -waltz -##itate -accomplishments -##ited -bathing -remixed -deception -prefix -##emia -deepest -##tier -##eis -balkan -frogs -##rong -slab -##pate -philosophers -peterborough -grains -imports -dickinson -rwanda -##atics -1774 -dirk -lan -tablets -##rove -clone -##rice -caretaker -hostilities -mclean -##gre -regimental -treasures -norms -impose -tsar -tango -diplomacy -variously -complain -192 -recognise -arrests -1779 -celestial -pulitzer -##dus -bing -libretto -##moor -adele -splash -##rite -expectation -lds -confronts -##izer -spontaneous -harmful -wedge -entrepreneurs -buyer -##ope -bilingual -translate -rugged -conner -circulated -uae -eaton -##gra -##zzle -lingered -lockheed -vishnu -reelection -alonso -##oom -joints -yankee -headline -cooperate -heinz -laureate -invading -##sford -echoes -scandinavian -##dham -hugging -vitamin -salute -micah -hind -trader -##sper -radioactive -##ndra -militants -poisoned -ratified -remark -campeonato -deprived -wander -prop -##dong -outlook -##tani -##rix -##eye -chiang -darcy -##oping -mandolin -spice -statesman -babylon -182 -walled -forgetting -afro -##cap -158 -giorgio -buffer -##polis -planetary -##gis -overlap -terminals -kinda -centenary -##bir -arising -manipulate -elm -ke -1770 -ak -##tad -chrysler -mapped -moose -pomeranian -quad -macarthur -assemblies -shoreline -recalls -stratford -##rted -noticeable -##evic -imp -##rita -##sque -accustomed -supplying -tents -disgusted -vogue -sipped -filters -khz -reno -selecting -luftwaffe -mcmahon -tyne -masterpiece -carriages -collided -dunes -exercised -flare -remembers -muzzle -##mobile -heck -##rson -burgess -lunged -middleton -boycott -bilateral -##sity -hazardous -lumpur -multiplayer -spotlight -jackets -goldman -liege -porcelain -rag -waterford -benz -attracts -hopeful -battling -ottomans -kensington -baked -hymns -cheyenne -lattice -levine -borrow -polymer -clashes -michaels -monitored -commitments -denounced -##25 -##von -cavity -##oney -hobby -akin -##holders -futures -intricate -cornish -patty -##oned -illegally -dolphin -##lag -barlow -yellowish -maddie -apologized -luton -plagued -##puram -nana -##rds -sway -fanny -łodz -##rino -psi -suspicions -hanged -##eding -initiate -charlton -##por -nak -competent -235 -analytical -annex -wardrobe -reservations -##rma -sect -162 -fairfax -hedge -piled -buckingham -uneven -bauer -simplicity -snyder -interpret -accountability -donors -moderately -byrd -continents -##cite -##max -disciple -hr -jamaican -ping -nominees -##uss -mongolian -diver -attackers -eagerly -ideological -pillows -miracles -apartheid -revolver -sulfur -clinics -moran -163 -##enko -ile -katy -rhetoric -##icated -chronology -recycling -##hrer -elongated -mughal -pascal -profiles -vibration -databases -domination -##fare -##rant -matthias -digest -rehearsal -polling -weiss -initiation -reeves -clinging -flourished -impress -ngo -##hoff -##ume -buckley -symposium -rhythms -weed -emphasize -transforming -##taking -##gence -##yman -accountant -analyze -flicker -foil -priesthood -voluntarily -decreases -##80 -##hya -slater -sv -charting -mcgill -##lde -moreno -##iu -besieged -zur -robes -##phic -admitting -api -deported -turmoil -peyton -earthquakes -##ares -nationalists -beau -clair -brethren -interrupt -welch -curated -galerie -requesting -164 -##ested -impending -steward -viper -##vina -complaining -beautifully -brandy -foam -nl -1660 -##cake -alessandro -punches -laced -explanations -##lim -attribute -clit -reggie -discomfort -##cards -smoothed -whales -##cene -adler -countered -duffy -disciplinary -widening -recipe -reliance -conducts -goats -gradient -preaching -##shaw -matilda -quasi -striped -meridian -cannabis -cordoba -certificates -##agh -##tering -graffiti -hangs -pilgrims -repeats -##ych -revive -urine -etat -##hawk -fueled -belts -fuzzy -susceptible -##hang -mauritius -salle -sincere -beers -hooks -##cki -arbitration -entrusted -advise -sniffed -seminar -junk -donnell -processors -principality -strapped -celia -mendoza -everton -fortunes -prejudice -starving -reassigned -steamer -##lund -tuck -evenly -foreman -##ffen -dans -375 -envisioned -slit -##xy -baseman -liberia -rosemary -##weed -electrified -periodically -potassium -stride -contexts -sperm -slade -mariners -influx -bianca -subcommittee -##rane -spilling -icao -estuary -##nock -delivers -iphone -##ulata -isa -mira -bohemian -dessert -##sbury -welcoming -proudly -slowing -##chs -musee -ascension -russ -##vian -waits -##psy -africans -exploit -##morphic -gov -eccentric -crab -peck -##ull -entrances -formidable -marketplace -groom -bolted -metabolism -patton -robbins -courier -payload -endure -##ifier -andes -refrigerator -##pr -ornate -##uca -ruthless -illegitimate -masonry -strasbourg -bikes -adobe -##³ -apples -quintet -willingly -niche -bakery -corpses -energetic -##cliffe -##sser -##ards -177 -centimeters -centro -fuscous -cretaceous -rancho -##yde -andrei -telecom -tottenham -oasis -ordination -vulnerability -presiding -corey -cp -penguins -sims -##pis -malawi -piss -##48 -correction -##cked -##ffle -##ryn -countdown -detectives -psychiatrist -psychedelic -dinosaurs -blouse -##get -choi -vowed -##oz -randomly -##pol -49ers -scrub -blanche -bruins -dusseldorf -##using -unwanted -##ums -212 -dominique -elevations -headlights -om -laguna -##oga -1750 -famously -ignorance -shrewsbury -##aine -ajax -breuning -che -confederacy -greco -overhaul -##screen -paz -skirts -disagreement -cruelty -jagged -phoebe -shifter -hovered -viruses -##wes -mandy -##lined -##gc -landlord -squirrel -dashed -##ι -ornamental -gag -wally -grange -literal -spurs -undisclosed -proceeding -yin -##text -billie -orphan -spanned -humidity -indy -weighted -presentations -explosions -lucian -##tary -vaughn -hindus -##anga -##hell -psycho -171 -daytona -protects -efficiently -rematch -sly -tandem -##oya -rebranded -impaired -hee -metropolis -peach -godfrey -diaspora -ethnicity -prosperous -gleaming -dar -grossing -playback -##rden -stripe -pistols -##tain -births -labelled -##cating -172 -rudy -alba -##onne -aquarium -hostility -##gb -##tase -shudder -sumatra -hardest -lakers -consonant -creeping -demos -homicide -capsule -zeke -liberties -expulsion -pueblo -##comb -trait -transporting -##ddin -##neck -##yna -depart -gregg -mold -ledge -hangar -oldham -playboy -termination -analysts -gmbh -romero -##itic -insist -cradle -filthy -brightness -slash -shootout -deposed -bordering -##truct -isis -microwave -tumbled -sheltered -cathy -werewolves -messy -andersen -convex -clapped -clinched -satire -wasting -edo -vc -rufus -##jak -mont -##etti -poznan -##keeping -restructuring -transverse -##rland -azerbaijani -slovene -gestures -roommate -choking -shear -##quist -vanguard -oblivious -##hiro -disagreed -baptism -##lich -coliseum -##aceae -salvage -societe -cory -locke -relocation -relying -versailles -ahl -swelling -##elo -cheerful -##word -##edes -gin -sarajevo -obstacle -diverted -##nac -messed -thoroughbred -fluttered -utrecht -chewed -acquaintance -assassins -dispatch -mirza -##wart -nike -salzburg -swell -yen -##gee -idle -ligue -samson -##nds -##igh -playful -spawned -##cise -tease -##case -burgundy -##bot -stirring -skeptical -interceptions -marathi -##dies -bedrooms -aroused -pinch -##lik -preferences -tattoos -buster -digitally -projecting -rust -##ital -kitten -priorities -addison -pseudo -##guard -dusk -icons -sermon -##psis -##iba -bt -##lift -##xt -ju -truce -rink -##dah -##wy -defects -psychiatry -offences -calculate -glucose -##iful -##rized -##unda -francaise -##hari -richest -warwickshire -carly -1763 -purity -redemption -lending -##cious -muse -bruises -cerebral -aero -carving -##name -preface -terminology -invade -monty -##int -anarchist -blurred -##iled -rossi -treats -guts -shu -foothills -ballads -undertaking -premise -cecilia -affiliates -blasted -conditional -wilder -minors -drone -rudolph -buffy -swallowing -horton -attested -##hop -rutherford -howell -primetime -livery -penal -##bis -minimize -hydro -wrecked -wrought -palazzo -##gling -cans -vernacular -friedman -nobleman -shale -walnut -danielle -##ection -##tley -sears -##kumar -chords -lend -flipping -streamed -por -dracula -gallons -sacrifices -gamble -orphanage -##iman -mckenzie -##gible -boxers -daly -##balls -##ان -208 -##ific -##rative -##iq -exploited -slated -##uity -circling -hillary -pinched -goldberg -provost -campaigning -lim -piles -ironically -jong -mohan -successors -usaf -##tem -##ught -autobiographical -haute -preserves -##ending -acquitted -comparisons -203 -hydroelectric -gangs -cypriot -torpedoes -rushes -chrome -derive -bumps -instability -fiat -pets -##mbe -silas -dye -reckless -settler -##itation -info -heats -##writing -176 -canonical -maltese -fins -mushroom -stacy -aspen -avid -##kur -##loading -vickers -gaston -hillside -statutes -wilde -gail -kung -sabine -comfortably -motorcycles -##rgo -169 -pneumonia -fetch -##sonic -axel -faintly -parallels -##oop -mclaren -spouse -compton -interdisciplinary -miner -##eni -181 -clamped -##chal -##llah -separates -versa -##mler -scarborough -labrador -##lity -##osing -rutgers -hurdles -como -166 -burt -divers -##100 -wichita -cade -coincided -##erson -bruised -mla -##pper -vineyard -##ili -##brush -notch -mentioning -jase -hearted -kits -doe -##acle -pomerania -##ady -ronan -seizure -pavel -problematic -##zaki -domenico -##ulin -catering -penelope -dependence -parental -emilio -ministerial -atkinson -##bolic -clarkson -chargers -colby -grill -peeked -arises -summon -##aged -fools -##grapher -faculties -qaeda -##vial -garner -refurbished -##hwa -geelong -disasters -nudged -bs -shareholder -lori -algae -reinstated -rot -##ades -##nous -invites -stainless -183 -inclusive -##itude -diocesan -til -##icz -denomination -##xa -benton -floral -registers -##ider -##erman -##kell -absurd -brunei -guangzhou -hitter -retaliation -##uled -##eve -blanc -nh -consistency -contamination -##eres -##rner -dire -palermo -broadcasters -diaries -inspire -vols -brewer -tightening -ky -mixtape -hormone -##tok -stokes -##color -##dly -##ssi -pg -##ometer -##lington -sanitation -##tility -intercontinental -apps -##adt -¹⁄₂ -cylinders -economies -favourable -unison -croix -gertrude -odyssey -vanity -dangling -##logists -upgrades -dice -middleweight -practitioner -##ight -206 -henrik -parlor -orion -angered -lac -python -blurted -##rri -sensual -intends -swings -angled -##phs -husky -attain -peerage -precinct -textiles -cheltenham -shuffled -dai -confess -tasting -bhutan -##riation -tyrone -segregation -abrupt -ruiz -##rish -smirked -blackwell -confidential -browning -amounted -##put -vase -scarce -fabulous -raided -staple -guyana -unemployed -glider -shay -##tow -carmine -troll -intervene -squash -superstar -##uce -cylindrical -len -roadway -researched -handy -##rium -##jana -meta -lao -declares -##rring -##tadt -##elin -##kova -willem -shrubs -napoleonic -realms -skater -qi -volkswagen -##ł -tad -hara -archaeologist -awkwardly -eerie -##kind -wiley -##heimer -##24 -titus -organizers -cfl -crusaders -lama -usb -vent -enraged -thankful -occupants -maximilian -##gaard -possessing -textbooks -##oran -collaborator -quaker -##ulo -avalanche -mono -silky -straits -isaiah -mustang -surged -resolutions -potomac -descend -cl -kilograms -plato -strains -saturdays -##olin -bernstein -##ype -holstein -ponytail -##watch -belize -conversely -heroine -perpetual -##ylus -charcoal -piedmont -glee -negotiating -backdrop -prologue -##jah -##mmy -pasadena -climbs -ramos -sunni -##holm -##tner -##tri -anand -deficiency -hertfordshire -stout -##avi -aperture -orioles -##irs -doncaster -intrigued -bombed -coating -otis -##mat -cocktail -##jit -##eto -amir -arousal -sar -##proof -##act -##ories -dixie -pots -##bow -whereabouts -159 -##fted -drains -bullying -cottages -scripture -coherent -fore -poe -appetite -##uration -sampled -##ators -##dp -derrick -rotor -jays -peacock -installment -##rro -advisors -##coming -rodeo -scotch -##mot -##db -##fen -##vant -ensued -rodrigo -dictatorship -martyrs -twenties -##н -towed -incidence -marta -rainforest -sai -scaled -##cles -oceanic -qualifiers -symphonic -mcbride -dislike -generalized -aubrey -colonization -##iation -##lion -##ssing -disliked -lublin -salesman -##ulates -spherical -whatsoever -sweating -avalon -contention -punt -severity -alderman -atari -##dina -##grant -##rop -scarf -seville -vertices -annexation -fairfield -fascination -inspiring -launches -palatinate -regretted -##rca -feral -##iom -elk -nap -olsen -reddy -yong -##leader -##iae -garment -transports -feng -gracie -outrage -viceroy -insides -##esis -breakup -grady -organizer -softer -grimaced -222 -murals -galicia -arranging -vectors -##rsten -bas -##sb -##cens -sloan -##eka -bitten -ara -fender -nausea -bumped -kris -banquet -comrades -detector -persisted -##llan -adjustment -endowed -cinemas -##shot -sellers -##uman -peek -epa -kindly -neglect -simpsons -talon -mausoleum -runaway -hangul -lookout -##cic -rewards -coughed -acquainted -chloride -##ald -quicker -accordion -neolithic -##qa -artemis -coefficient -lenny -pandora -tx -##xed -ecstasy -litter -segunda -chairperson -gemma -hiss -rumor -vow -nasal -antioch -compensate -patiently -transformers -##eded -judo -morrow -penis -posthumous -philips -bandits -husbands -denote -flaming -##any -##phones -langley -yorker -1760 -walters -##uo -##kle -gubernatorial -fatty -samsung -leroy -outlaw -##nine -unpublished -poole -jakob -##ᵢ -##ₙ -crete -distorted -superiority -##dhi -intercept -crust -mig -claus -crashes -positioning -188 -stallion -301 -frontal -armistice -##estinal -elton -aj -encompassing -camel -commemorated -malaria -woodward -calf -cigar -penetrate -##oso -willard -##rno -##uche -illustrate -amusing -convergence -noteworthy -##lma -##rva -journeys -realise -manfred -##sable -410 -##vocation -hearings -fiance -##posed -educators -provoked -adjusting -##cturing -modular -stockton -paterson -vlad -rejects -electors -selena -maureen -##tres -uber -##rce -swirled -##num -proportions -nanny -pawn -naturalist -parma -apostles -awoke -ethel -wen -##bey -monsoon -overview -##inating -mccain -rendition -risky -adorned -##ih -equestrian -germain -nj -conspicuous -confirming -##yoshi -shivering -##imeter -milestone -rumours -flinched -bounds -smacked -token -##bei -lectured -automobiles -##shore -impacted -##iable -nouns -nero -##leaf -ismail -prostitute -trams -##lace -bridget -sud -stimulus -impressions -reins -revolves -##oud -##gned -giro -honeymoon -##swell -criterion -##sms -##uil -libyan -prefers -##osition -211 -preview -sucks -accusation -bursts -metaphor -diffusion -tolerate -faye -betting -cinematographer -liturgical -specials -bitterly -humboldt -##ckle -flux -rattled -##itzer -archaeologists -odor -authorised -marshes -discretion -##ов -alarmed -archaic -inverse -##leton -explorers -##pine -drummond -tsunami -woodlands -##minate -##tland -booklet -insanity -owning -insert -crafted -calculus -##tore -receivers -##bt -stung -##eca -##nched -prevailing -travellers -eyeing -lila -graphs -##borne -178 -julien -##won -morale -adaptive -therapist -erica -cw -libertarian -bowman -pitches -vita -##ional -crook -##ads -##entation -caledonia -mutiny -##sible -1840s -automation -##ß -flock -##pia -ironic -pathology -##imus -remarried -##22 -joker -withstand -energies -##att -shropshire -hostages -madeleine -tentatively -conflicting -mateo -recipes -euros -ol -mercenaries -nico -##ndon -albuquerque -augmented -mythical -bel -freud -##child -cough -##lica -365 -freddy -lillian -genetically -nuremberg -calder -209 -bonn -outdoors -paste -suns -urgency -vin -restraint -tyson -##cera -##selle -barrage -bethlehem -kahn -##par -mounts -nippon -barony -happier -ryu -makeshift -sheldon -blushed -castillo -barking -listener -taped -bethel -fluent -headlines -pornography -rum -disclosure -sighing -mace -doubling -gunther -manly -##plex -rt -interventions -physiological -forwards -emerges -##tooth -##gny -compliment -rib -recession -visibly -barge -faults -connector -exquisite -prefect -##rlin -patio -##cured -elevators -brandt -italics -pena -173 -wasp -satin -ea -botswana -graceful -respectable -##jima -##rter -##oic -franciscan -generates -##dl -alfredo -disgusting -##olate -##iously -sherwood -warns -cod -promo -cheryl -sino -##ة -##escu -twitch -##zhi -brownish -thom -ortiz -##dron -densely -##beat -carmel -reinforce -##bana -187 -anastasia -downhill -vertex -contaminated -remembrance -harmonic -homework -##sol -fiancee -gears -olds -angelica -loft -ramsay -quiz -colliery -sevens -##cape -autism -##hil -walkway -##boats -ruben -abnormal -ounce -khmer -##bbe -zachary -bedside -morphology -punching -##olar -sparrow -convinces -##35 -hewitt -queer -remastered -rods -mabel -solemn -notified -lyricist -symmetric -##xide -174 -encore -passports -wildcats -##uni -baja -##pac -mildly -##ease -bleed -commodity -mounds -glossy -orchestras -##omo -damian -prelude -ambitions -##vet -awhile -remotely -##aud -asserts -imply -##iques -distinctly -modelling -remedy -##dded -windshield -dani -xiao -##endra -audible -powerplant -1300 -invalid -elemental -acquisitions -##hala -immaculate -libby -plata -smuggling -ventilation -denoted -minh -##morphism -430 -differed -dion -kelley -lore -mocking -sabbath -spikes -hygiene -drown -runoff -stylized -tally -liberated -aux -interpreter -righteous -aba -siren -reaper -pearce -millie -##cier -##yra -gaius -##iso -captures -##ttering -dorm -claudio -##sic -benches -knighted -blackness -##ored -discount -fumble -oxidation -routed -##ς -novak -perpendicular -spoiled -fracture -splits -##urt -pads -topology -##cats -axes -fortunate -offenders -protestants -esteem -221 -broadband -convened -frankly -hound -prototypes -isil -facilitated -keel -##sher -sahara -awaited -bubba -orb -prosecutors -186 -hem -520 -##xing -relaxing -remnant -romney -sorted -slalom -stefano -ulrich -##active -exemption -folder -pauses -foliage -hitchcock -epithet -204 -criticisms -##aca -ballistic -brody -hinduism -chaotic -youths -equals -##pala -pts -thicker -analogous -capitalist -improvised -overseeing -sinatra -ascended -beverage -##tl -straightforward -##kon -curran -##west -bois -325 -induce -surveying -emperors -sax -unpopular -##kk -cartoonist -fused -##mble -unto -##yuki -localities -##cko -##ln -darlington -slain -academie -lobbying -sediment -puzzles -##grass -defiance -dickens -manifest -tongues -alumnus -arbor -coincide -184 -appalachian -mustafa -examiner -cabaret -traumatic -yves -bracelet -draining -heroin -magnum -baths -odessa -consonants -mitsubishi -##gua -kellan -vaudeville -##fr -joked -null -straps -probation -##ław -ceded -interfaces -##pas -##zawa -blinding -viet -224 -rothschild -museo -640 -huddersfield -##vr -tactic -##storm -brackets -dazed -incorrectly -##vu -reg -glazed -fearful -manifold -benefited -irony -##sun -stumbling -##rte -willingness -balkans -mei -wraps -##aba -injected -##lea -gu -syed -harmless -##hammer -bray -takeoff -poppy -timor -cardboard -astronaut -purdue -weeping -southbound -cursing -stalls -diagonal -##neer -lamar -bryce -comte -weekdays -harrington -##uba -negatively -##see -lays -grouping -##cken -##henko -affirmed -halle -modernist -##lai -hodges -smelling -aristocratic -baptized -dismiss -justification -oilers -##now -coupling -qin -snack -healer -##qing -gardener -layla -battled -formulated -stephenson -gravitational -##gill -##jun -1768 -granny -coordinating -suites -##cd -##ioned -monarchs -##cote -##hips -sep -blended -apr -barrister -deposition -fia -mina -policemen -paranoid -##pressed -churchyard -covert -crumpled -creep -abandoning -tr -transmit -conceal -barr -understands -readiness -spire -##cology -##enia -##erry -610 -startling -unlock -vida -bowled -slots -##nat -##islav -spaced -trusting -admire -rig -##ink -slack -##70 -mv -207 -casualty -##wei -classmates -##odes -##rar -##rked -amherst -furnished -evolve -foundry -menace -mead -##lein -flu -wesleyan -##kled -monterey -webber -##vos -wil -##mith -##на -bartholomew -justices -restrained -##cke -amenities -191 -mediated -sewage -trenches -ml -mainz -##thus -1800s -##cula -##inski -caine -bonding -213 -converts -spheres -superseded -marianne -crypt -sweaty -ensign -historia -##br -spruce -##post -##ask -forks -thoughtfully -yukon -pamphlet -ames -##uter -karma -##yya -bryn -negotiation -sighs -incapable -##mbre -##ntial -actresses -taft -##mill -luce -prevailed -##amine -1773 -motionless -envoy -testify -investing -sculpted -instructors -provence -kali -cullen -horseback -##while -goodwin -##jos -gaa -norte -##ldon -modify -wavelength -abd -214 -skinned -sprinter -forecast -scheduling -marries -squared -tentative -##chman -boer -##isch -bolts -swap -fisherman -assyrian -impatiently -guthrie -martins -murdoch -194 -tanya -nicely -dolly -lacy -med -##45 -syn -decks -fashionable -millionaire -##ust -surfing -##ml -##ision -heaved -tammy -consulate -attendees -routinely -197 -fuse -saxophonist -backseat -malaya -##lord -scowl -tau -##ishly -193 -sighted -steaming -##rks -303 -911 -##holes -##hong -ching -##wife -bless -conserved -jurassic -stacey -unix -zion -chunk -rigorous -blaine -198 -peabody -slayer -dismay -brewers -nz -##jer -det -##glia -glover -postwar -int -penetration -sylvester -imitation -vertically -airlift -heiress -knoxville -viva -##uin -390 -macon -##rim -##fighter -##gonal -janice -##orescence -##wari -marius -belongings -leicestershire -196 -blanco -inverted -preseason -sanity -sobbing -##due -##elt -##dled -collingwood -regeneration -flickering -shortest -##mount -##osi -feminism -##lat -sherlock -cabinets -fumbled -northbound -precedent -snaps -##mme -researching -##akes -guillaume -insights -manipulated -vapor -neighbour -sap -gangster -frey -f1 -stalking -scarcely -callie -barnett -tendencies -audi -doomed -assessing -slung -panchayat -ambiguous -bartlett -##etto -distributing -violating -wolverhampton -##hetic -swami -histoire -##urus -liable -pounder -groin -hussain -larsen -popping -surprises -##atter -vie -curt -##station -mute -relocate -musicals -authorization -richter -##sef -immortality -tna -bombings -##press -deteriorated -yiddish -##acious -robbed -colchester -cs -pmid -ao -verified -balancing -apostle -swayed -recognizable -oxfordshire -retention -nottinghamshire -contender -judd -invitational -shrimp -uhf -##icient -cleaner -longitudinal -tanker -##mur -acronym -broker -koppen -sundance -suppliers -##gil -4000 -clipped -fuels -petite -##anne -landslide -helene -diversion -populous -landowners -auspices -melville -quantitative -##xes -ferries -nicky -##llus -doo -haunting -roche -carver -downed -unavailable -##pathy -approximation -hiroshima -##hue -garfield -valle -comparatively -keyboardist -traveler -##eit -congestion -calculating -subsidiaries -##bate -serb -modernization -fairies -deepened -ville -averages -##lore -inflammatory -tonga -##itch -co₂ -squads -##hea -gigantic -serum -enjoyment -retailer -verona -35th -cis -##phobic -magna -technicians -##vati -arithmetic -##sport -levin -##dation -amtrak -chow -sienna -##eyer -backstage -entrepreneurship -##otic -learnt -tao -##udy -worcestershire -formulation -baggage -hesitant -bali -sabotage -##kari -barren -enhancing -murmur -pl -freshly -putnam -syntax -aces -medicines -resentment -bandwidth -##sier -grins -chili -guido -##sei -framing -implying -gareth -lissa -genevieve -pertaining -admissions -geo -thorpe -proliferation -sato -bela -analyzing -parting -##gor -awakened -##isman -huddled -secrecy -##kling -hush -gentry -540 -dungeons -##ego -coasts -##utz -sacrificed -##chule -landowner -mutually -prevalence -programmer -adolescent -disrupted -seaside -gee -trusts -vamp -georgie -##nesian -##iol -schedules -sindh -##market -etched -hm -sparse -bey -beaux -scratching -gliding -unidentified -216 -collaborating -gems -jesuits -oro -accumulation -shaping -mbe -anal -##xin -231 -enthusiasts -newscast -##egan -janata -dewey -parkinson -179 -ankara -biennial -towering -dd -inconsistent -950 -##chet -thriving -terminate -cabins -furiously -eats -advocating -donkey -marley -muster -phyllis -leiden -##user -grassland -glittering -iucn -loneliness -217 -memorandum -armenians -##ddle -popularized -rhodesia -60s -lame -##illon -sans -bikini -header -orbits -##xx -##finger -##ulator -sharif -spines -biotechnology -strolled -naughty -yates -##wire -fremantle -milo -##mour -abducted -removes -##atin -humming -wonderland -##chrome -##ester -hume -pivotal -##rates -armand -grams -believers -elector -rte -apron -bis -scraped -##yria -endorsement -initials -##llation -eps -dotted -hints -buzzing -emigration -nearer -##tom -indicators -##ulu -coarse -neutron -protectorate -##uze -directional -exploits -pains -loire -1830s -proponents -guggenheim -rabbits -ritchie -305 -hectare -inputs -hutton -##raz -verify -##ako -boilers -longitude -##lev -skeletal -yer -emilia -citrus -compromised -##gau -pokemon -prescription -paragraph -eduard -cadillac -attire -categorized -kenyan -weddings -charley -##bourg -entertain -monmouth -##lles -nutrients -davey -mesh -incentive -practised -ecosystems -kemp -subdued -overheard -##rya -bodily -maxim -##nius -apprenticeship -ursula -##fight -lodged -rug -silesian -unconstitutional -patel -inspected -coyote -unbeaten -##hak -34th -disruption -convict -parcel -##cl -##nham -collier -implicated -mallory -##iac -##lab -susannah -winkler -##rber -shia -phelps -sediments -graphical -robotic -##sner -adulthood -mart -smoked -##isto -kathryn -clarified -##aran -divides -convictions -oppression -pausing -burying -##mt -federico -mathias -eileen -##tana -kite -hunched -##acies -189 -##atz -disadvantage -liza -kinetic -greedy -paradox -yokohama -dowager -trunks -ventured -##gement -gupta -vilnius -olaf -##thest -crimean -hopper -##ej -progressively -arturo -mouthed -arrondissement -##fusion -rubin -simulcast -oceania -##orum -##stra -##rred -busiest -intensely -navigator -cary -##vine -##hini -##bies -fife -rowe -rowland -posing -insurgents -shafts -lawsuits -activate -conor -inward -culturally -garlic -265 -##eering -eclectic -##hui -##kee -##nl -furrowed -vargas -meteorological -rendezvous -##aus -culinary -commencement -##dition -quota -##notes -mommy -salaries -overlapping -mule -##iology -##mology -sums -wentworth -##isk -##zione -mainline -subgroup -##illy -hack -plaintiff -verdi -bulb -differentiation -engagements -multinational -supplemented -bertrand -caller -regis -##naire -##sler -##arts -##imated -blossom -propagation -kilometer -viaduct -vineyards -##uate -beckett -optimization -golfer -songwriters -seminal -semitic -thud -volatile -evolving -ridley -##wley -trivial -distributions -scandinavia -jiang -##ject -wrestled -insistence -##dio -emphasizes -napkin -##ods -adjunct -rhyme -##ricted -##eti -hopeless -surrounds -tremble -32nd -smoky -##ntly -oils -medicinal -padded -steer -wilkes -219 -255 -concessions -hue -uniquely -blinded -landon -yahoo -##lane -hendrix -commemorating -dex -specify -chicks -##ggio -intercity -1400 -morley -##torm -highlighting -##oting -pang -oblique -stalled -##liner -flirting -newborn -1769 -bishopric -shaved -232 -currie -##ush -dharma -spartan -##ooped -favorites -smug -novella -sirens -abusive -creations -espana -##lage -paradigm -semiconductor -sheen -##rdo -##yen -##zak -nrl -renew -##pose -##tur -adjutant -marches -norma -##enity -ineffective -weimar -grunt -##gat -lordship -plotting -expenditure -infringement -lbs -refrain -av -mimi -mistakenly -postmaster -1771 -##bara -ras -motorsports -tito -199 -subjective -##zza -bully -stew -##kaya -prescott -1a -##raphic -##zam -bids -styling -paranormal -reeve -sneaking -exploding -katz -akbar -migrant -syllables -indefinitely -##ogical -destroys -replaces -applause -##phine -pest -##fide -218 -articulated -bertie -##thing -##cars -##ptic -courtroom -crowley -aesthetics -cummings -tehsil -hormones -titanic -dangerously -##ibe -stadion -jaenelle -auguste -ciudad -##chu -mysore -partisans -##sio -lucan -philipp -##aly -debating -henley -interiors -##rano -##tious -homecoming -beyonce -usher -henrietta -prepares -weeds -##oman -ely -plucked -##pire -##dable -luxurious -##aq -artifact -password -pasture -juno -maddy -minsk -##dder -##ologies -##rone -assessments -martian -royalist -1765 -examines -##mani -##rge -nino -223 -parry -scooped -relativity -##eli -##uting -##cao -congregational -noisy -traverse -##agawa -strikeouts -nickelodeon -obituary -transylvania -binds -depictions -polk -trolley -##yed -##lard -breeders -##under -dryly -hokkaido -1762 -strengths -stacks -bonaparte -connectivity -neared -prostitutes -stamped -anaheim -gutierrez -sinai -##zzling -bram -fresno -madhya -##86 -proton -##lena -##llum -##phon -reelected -wanda -##anus -##lb -ample -distinguishing -##yler -grasping -sermons -tomato -bland -stimulation -avenues -##eux -spreads -scarlett -fern -pentagon -assert -baird -chesapeake -ir -calmed -distortion -fatalities -##olis -correctional -pricing -##astic -##gina -prom -dammit -ying -collaborate -##chia -welterweight -33rd -pointer -substitution -bonded -umpire -communicating -multitude -paddle -##obe -federally -intimacy -##insky -betray -ssr -##lett -##lean -##lves -##therapy -airbus -##tery -functioned -ud -bearer -biomedical -netflix -##hire -##nca -condom -brink -ik -##nical -macy -##bet -flap -gma -experimented -jelly -lavender -##icles -##ulia -munro -##mian -##tial -rye -##rle -60th -gigs -hottest -rotated -predictions -fuji -bu -##erence -##omi -barangay -##fulness -##sas -clocks -##rwood -##liness -cereal -roe -wight -decker -uttered -babu -onion -xml -forcibly -##df -petra -sarcasm -hartley -peeled -storytelling -##42 -##xley -##ysis -##ffa -fibre -kiel -auditor -fig -harald -greenville -##berries -geographically -nell -quartz -##athic -cemeteries -##lr -crossings -nah -holloway -reptiles -chun -sichuan -snowy -660 -corrections -##ivo -zheng -ambassadors -blacksmith -fielded -fluids -hardcover -turnover -medications -melvin -academies -##erton -ro -roach -absorbing -spaniards -colton -##founded -outsider -espionage -kelsey -245 -edible -##ulf -dora -establishes -##sham -##tries -contracting -##tania -cinematic -costello -nesting -##uron -connolly -duff -##nology -mma -##mata -fergus -sexes -gi -optics -spectator -woodstock -banning -##hee -##fle -differentiate -outfielder -refinery -226 -312 -gerhard -horde -lair -drastically -##udi -landfall -##cheng -motorsport -odi -##achi -predominant -quay -skins -##ental -edna -harshly -complementary -murdering -##aves -wreckage -##90 -ono -outstretched -lennox -munitions -galen -reconcile -470 -scalp -bicycles -gillespie -questionable -rosenberg -guillermo -hostel -jarvis -kabul -volvo -opium -yd -##twined -abuses -decca -outpost -##cino -sensible -neutrality -##64 -ponce -anchorage -atkins -turrets -inadvertently -disagree -libre -vodka -reassuring -weighs -##yal -glide -jumper -ceilings -repertory -outs -stain -##bial -envy -##ucible -smashing -heightened -policing -hyun -mixes -lai -prima -##ples -celeste -##bina -lucrative -intervened -kc -manually -##rned -stature -staffed -bun -bastards -nairobi -priced -##auer -thatcher -##kia -tripped -comune -##ogan -##pled -brasil -incentives -emanuel -hereford -musica -##kim -benedictine -biennale -##lani -eureka -gardiner -rb -knocks -sha -##ael -##elled -##onate -efficacy -ventura -masonic -sanford -maize -leverage -##feit -capacities -santana -##aur -novelty -vanilla -##cter -##tour -benin -##oir -##rain -neptune -drafting -tallinn -##cable -humiliation -##boarding -schleswig -fabian -bernardo -liturgy -spectacle -sweeney -pont -routledge -##tment -cosmos -ut -hilt -sleek -universally -##eville -##gawa -typed -##dry -favors -allegheny -glaciers -##rly -recalling -aziz -##log -parasite -requiem -auf -##berto -##llin -illumination -##breaker -##issa -festivities -bows -govern -vibe -vp -333 -sprawled -larson -pilgrim -bwf -leaping -##rts -##ssel -alexei -greyhound -hoarse -##dler -##oration -seneca -##cule -gaping -##ulously -##pura -cinnamon -##gens -##rricular -craven -fantasies -houghton -engined -reigned -dictator -supervising -##oris -bogota -commentaries -unnatural -fingernails -spirituality -tighten -##tm -canadiens -protesting -intentional -cheers -sparta -##ytic -##iere -##zine -widen -belgarath -controllers -dodd -iaaf -navarre -##ication -defect -squire -steiner -whisky -##mins -560 -inevitably -tome -##gold -chew -##uid -##lid -elastic -##aby -streaked -alliances -jailed -regal -##ined -##phy -czechoslovak -narration -absently -##uld -bluegrass -guangdong -quran -criticizing -hose -hari -##liest -##owa -skier -streaks -deploy -##lom -raft -bose -dialed -huff -##eira -haifa -simplest -bursting -endings -ib -sultanate -##titled -franks -whitman -ensures -sven -##ggs -collaborators -forster -organising -ui -banished -napier -injustice -teller -layered -thump -##otti -roc -battleships -evidenced -fugitive -sadie -robotics -##roud -equatorial -geologist -##iza -yielding -##bron -##sr -internationale -mecca -##diment -sbs -skyline -toad -uploaded -reflective -undrafted -lal -leafs -bayern -##dai -lakshmi -shortlisted -##stick -##wicz -camouflage -donate -af -christi -lau -##acio -disclosed -nemesis -1761 -assemble -straining -northamptonshire -tal -##asi -bernardino -premature -heidi -42nd -coefficients -galactic -reproduce -buzzed -sensations -zionist -monsieur -myrtle -##eme -archery -strangled -musically -viewpoint -antiquities -bei -trailers -seahawks -cured -pee -preferring -tasmanian -lange -sul -##mail -##working -colder -overland -lucivar -massey -gatherings -haitian -##smith -disapproval -flaws -##cco -##enbach -1766 -npr -##icular -boroughs -creole -forums -techno -1755 -dent -abdominal -streetcar -##eson -##stream -procurement -gemini -predictable -##tya -acheron -christoph -feeder -fronts -vendor -bernhard -jammu -tumors -slang -##uber -goaltender -twists -curving -manson -vuelta -mer -peanut -confessions -pouch -unpredictable -allowance -theodor -vascular -##factory -bala -authenticity -metabolic -coughing -nanjing -##cea -pembroke -##bard -splendid -36th -ff -hourly -##ahu -elmer -handel -##ivate -awarding -thrusting -dl -experimentation -##hesion -##46 -caressed -entertained -steak -##rangle -biologist -orphans -baroness -oyster -stepfather -##dridge -mirage -reefs -speeding -##31 -barons -1764 -227 -inhabit -preached -repealed -##tral -honoring -boogie -captives -administer -johanna -##imate -gel -suspiciously -1767 -sobs -##dington -backbone -hayward -garry -##folding -##nesia -maxi -##oof -##ppe -ellison -galileo -##stand -crimea -frenzy -amour -bumper -matrices -natalia -baking -garth -palestinians -##grove -smack -conveyed -ensembles -gardening -##manship -##rup -##stituting -1640 -harvesting -topography -jing -shifters -dormitory -##carriage -##lston -ist -skulls -##stadt -dolores -jewellery -sarawak -##wai -##zier -fences -christy -confinement -tumbling -credibility -fir -stench -##bria -##plication -##nged -##sam -virtues -##belt -marjorie -pba -##eem -##made -celebrates -schooner -agitated -barley -fulfilling -anthropologist -##pro -restrict -novi -regulating -##nent -padres -##rani -##hesive -loyola -tabitha -milky -olson -proprietor -crambidae -guarantees -intercollegiate -ljubljana -hilda -##sko -ignorant -hooded -##lts -sardinia -##lidae -##vation -frontman -privileged -witchcraft -##gp -jammed -laude -poking -##than -bracket -amazement -yunnan -##erus -maharaja -linnaeus -264 -commissioning -milano -peacefully -##logies -akira -rani -regulator -##36 -grasses -##rance -luzon -crows -compiler -gretchen -seaman -edouard -tab -buccaneers -ellington -hamlets -whig -socialists -##anto -directorial -easton -mythological -##kr -##vary -rhineland -semantic -taut -dune -inventions -succeeds -##iter -replication -branched -##pired -jul -prosecuted -kangaroo -penetrated -##avian -middlesbrough -doses -bleak -madam -predatory -relentless -##vili -reluctance -##vir -hailey -crore -silvery -1759 -monstrous -swimmers -transmissions -hawthorn -informing -##eral -toilets -caracas -crouch -kb -##sett -295 -cartel -hadley -##aling -alexia -yvonne -##biology -cinderella -eton -superb -blizzard -stabbing -industrialist -maximus -##gm -##orus -groves -maud -clade -oversized -comedic -##bella -rosen -nomadic -fulham -montane -beverages -galaxies -redundant -swarm -##rot -##folia -##llis -buckinghamshire -fen -bearings -bahadur -##rom -gilles -phased -dynamite -faber -benoit -vip -##ount -##wd -booking -fractured -tailored -anya -spices -westwood -cairns -auditions -inflammation -steamed -##rocity -##acion -##urne -skyla -thereof -watford -torment -archdeacon -transforms -lulu -demeanor -fucked -serge -##sor -mckenna -minas -entertainer -##icide -caress -originate -residue -##sty -1740 -##ilised -##org -beech -##wana -subsidies -##ghton -emptied -gladstone -ru -firefighters -voodoo -##rcle -het -nightingale -tamara -edmond -ingredient -weaknesses -silhouette -285 -compatibility -withdrawing -hampson -##mona -anguish -giggling -##mber -bookstore -##jiang -southernmost -tilting -##vance -bai -economical -rf -briefcase -dreadful -hinted -projections -shattering -totaling -##rogate -analogue -indicted -periodical -fullback -##dman -haynes -##tenberg -##ffs -##ishment -1745 -thirst -stumble -penang -vigorous -##ddling -##kor -##lium -octave -##ove -##enstein -##inen -##ones -siberian -##uti -cbn -repeal -swaying -##vington -khalid -tanaka -unicorn -otago -plastered -lobe -riddle -##rella -perch -##ishing -croydon -filtered -graeme -tripoli -##ossa -crocodile -##chers -sufi -mined -##tung -inferno -lsu -##phi -swelled -utilizes -£2 -cale -periodicals -styx -hike -informally -coop -lund -##tidae -ala -hen -qui -transformations -disposed -sheath -chickens -##cade -fitzroy -sas -silesia -unacceptable -odisha -1650 -sabrina -pe -spokane -ratios -athena -massage -shen -dilemma -##drum -##riz -##hul -corona -doubtful -niall -##pha -##bino -fines -cite -acknowledging -bangor -ballard -bathurst -##resh -huron -mustered -alzheimer -garments -kinase -tyre -warship -##cp -flashback -pulmonary -braun -cheat -kamal -cyclists -constructions -grenades -ndp -traveller -excuses -stomped -signalling -trimmed -futsal -mosques -relevance -##wine -wta -##23 -##vah -##lter -hoc -##riding -optimistic -##´s -deco -sim -interacting -rejecting -moniker -waterways -##ieri -##oku -mayors -gdansk -outnumbered -pearls -##ended -##hampton -fairs -totals -dominating -262 -notions -stairway -compiling -pursed -commodities -grease -yeast -##jong -carthage -griffiths -residual -amc -contraction -laird -sapphire -##marine -##ivated -amalgamation -dissolve -inclination -lyle -packaged -altitudes -suez -canons -graded -lurched -narrowing -boasts -guise -wed -enrico -##ovsky -rower -scarred -bree -cub -iberian -protagonists -bargaining -proposing -trainers -voyages -vans -fishes -##aea -##ivist -##verance -encryption -artworks -kazan -sabre -cleopatra -hepburn -rotting -supremacy -mecklenburg -##brate -burrows -hazards -outgoing -flair -organizes -##ctions -scorpion -##usions -boo -234 -chevalier -dunedin -slapping -##34 -ineligible -pensions -##38 -##omic -manufactures -emails -bismarck -238 -weakening -blackish -ding -mcgee -quo -##rling -northernmost -xx -manpower -greed -sampson -clicking -##ange -##horpe -##inations -##roving -torre -##eptive -##moral -symbolism -38th -asshole -meritorious -outfits -splashed -biographies -sprung -astros -##tale -302 -737 -filly -raoul -nw -tokugawa -linden -clubhouse -##apa -tracts -romano -##pio -putin -tags -##note -chained -dickson -gunshot -moe -gunn -rashid -##tails -zipper -##bas -##nea -contrasted -##ply -##udes -plum -pharaoh -##pile -aw -comedies -ingrid -sandwiches -subdivisions -1100 -mariana -nokia -kamen -hz -delaney -veto -herring -##words -possessive -outlines -##roup -siemens -stairwell -rc -gallantry -messiah -palais -yells -233 -zeppelin -##dm -bolivar -##cede -smackdown -mckinley -##mora -##yt -muted -geologic -finely -unitary -avatar -hamas -maynard -rees -bog -contrasting -##rut -liv -chico -disposition -pixel -##erate -becca -dmitry -yeshiva -narratives -##lva -##ulton -mercenary -sharpe -tempered -navigate -stealth -amassed -keynes -##lini -untouched -##rrie -havoc -lithium -##fighting -abyss -graf -southward -wolverine -balloons -implements -ngos -transitions -##icum -ambushed -concacaf -dormant -economists -##dim -costing -csi -rana -universite -boulders -verity -##llon -collin -mellon -misses -cypress -fluorescent -lifeless -spence -##ulla -crewe -shepard -pak -revelations -##م -jolly -gibbons -paw -##dro -##quel -freeing -##test -shack -fries -palatine -##51 -##hiko -accompaniment -cruising -recycled -##aver -erwin -sorting -synthesizers -dyke -realities -sg -strides -enslaved -wetland -##ghan -competence -gunpowder -grassy -maroon -reactors -objection -##oms -carlson -gearbox -macintosh -radios -shelton -##sho -clergyman -prakash -254 -mongols -trophies -oricon -228 -stimuli -twenty20 -cantonese -cortes -mirrored -##saurus -bhp -cristina -melancholy -##lating -enjoyable -nuevo -##wny -downfall -schumacher -##ind -banging -lausanne -rumbled -paramilitary -reflex -ax -amplitude -migratory -##gall -##ups -midi -barnard -lastly -sherry -##hp -##nall -keystone -##kra -carleton -slippery -##53 -coloring -foe -socket -otter -##rgos -mats -##tose -consultants -bafta -bison -topping -##km -490 -primal -abandonment -transplant -atoll -hideous -mort -pained -reproduced -tae -howling -##turn -unlawful -billionaire -hotter -poised -lansing -##chang -dinamo -retro -messing -nfc -domesday -##mina -blitz -timed -##athing -##kley -ascending -gesturing -##izations -signaled -tis -chinatown -mermaid -savanna -jameson -##aint -catalina -##pet -##hers -cochrane -cy -chatting -##kus -alerted -computation -mused -noelle -majestic -mohawk -campo -octagonal -##sant -##hend -241 -aspiring -##mart -comprehend -iona -paralyzed -shimmering -swindon -rhone -##eley -reputed -configurations -pitchfork -agitation -francais -gillian -lipstick -##ilo -outsiders -pontifical -resisting -bitterness -sewer -rockies -##edd -##ucher -misleading -1756 -exiting -galloway -##nging -risked -##heart -246 -commemoration -schultz -##rka -integrating -##rsa -poses -shrieked -##weiler -guineas -gladys -jerking -owls -goldsmith -nightly -penetrating -##unced -lia -##33 -ignited -betsy -##aring -##thorpe -follower -vigorously -##rave -coded -kiran -knit -zoology -tbilisi -##28 -##bered -repository -govt -deciduous -dino -growling -##bba -enhancement -unleashed -chanting -pussy -biochemistry -##eric -kettle -repression -toxicity -nrhp -##arth -##kko -##bush -ernesto -commended -outspoken -242 -mca -parchment -sms -kristen -##aton -bisexual -raked -glamour -navajo -a2 -conditioned -showcased -##hma -spacious -youthful -##esa -usl -appliances -junta -brest -layne -conglomerate -enchanted -chao -loosened -picasso -circulating -inspect -montevideo -##centric -##kti -piazza -spurred -##aith -bari -freedoms -poultry -stamford -lieu -##ect -indigo -sarcastic -bahia -stump -attach -dvds -frankenstein -lille -approx -scriptures -pollen -##script -nmi -overseen -##ivism -tides -proponent -newmarket -inherit -milling -##erland -centralized -##rou -distributors -credentials -drawers -abbreviation -##lco -##xon -downing -uncomfortably -ripe -##oes -erase -franchises -##ever -populace -##bery -##khar -decomposition -pleas -##tet -daryl -sabah -##stle -##wide -fearless -genie -lesions -annette -##ogist -oboe -appendix -nair -dripped -petitioned -maclean -mosquito -parrot -rpg -hampered -1648 -operatic -reservoirs -##tham -irrelevant -jolt -summarized -##fp -medallion -##taff -##− -clawed -harlow -narrower -goddard -marcia -bodied -fremont -suarez -altering -tempest -mussolini -porn -##isms -sweetly -oversees -walkers -solitude -grimly -shrines -hk -ich -supervisors -hostess -dietrich -legitimacy -brushes -expressive -##yp -dissipated -##rse -localized -systemic -##nikov -gettysburg -##js -##uaries -dialogues -muttering -251 -housekeeper -sicilian -discouraged -##frey -beamed -kaladin -halftime -kidnap -##amo -##llet -1754 -synonymous -depleted -instituto -insulin -reprised -##opsis -clashed -##ctric -interrupting -radcliffe -insisting -medici -1715 -ejected -playfully -turbulent -##47 -starvation -##rini -shipment -rebellious -petersen -verification -merits -##rified -cakes -##charged -1757 -milford -shortages -spying -fidelity -##aker -emitted -storylines -harvested -seismic -##iform -cheung -kilda -theoretically -barbie -lynx -##rgy -##tius -goblin -mata -poisonous -##nburg -reactive -residues -obedience -##евич -conjecture -##rac -401 -hating -sixties -kicker -moaning -motown -##bha -emancipation -neoclassical -##hering -consoles -ebert -professorship -##tures -sustaining -assaults -obeyed -affluent -incurred -tornadoes -##eber -##zow -emphasizing -highlanders -cheated -helmets -##ctus -internship -terence -bony -executions -legislators -berries -peninsular -tinged -##aco -1689 -amplifier -corvette -ribbons -lavish -pennant -##lander -worthless -##chfield -##forms -mariano -pyrenees -expenditures -##icides -chesterfield -mandir -tailor -39th -sergey -nestled -willed -aristocracy -devotees -goodnight -raaf -rumored -weaponry -remy -appropriations -harcourt -burr -riaa -##lence -limitation -unnoticed -guo -soaking -swamps -##tica -collapsing -tatiana -descriptive -brigham -psalm -##chment -maddox -##lization -patti -caliph -##aja -akron -injuring -serra -##ganj -basins -##sari -astonished -launcher -##church -hilary -wilkins -sewing -##sf -stinging -##fia -##ncia -underwood -startup -##ition -compilations -vibrations -embankment -jurist -##nity -bard -juventus -groundwater -kern -palaces -helium -boca -cramped -marissa -soto -##worm -jae -princely -##ggy -faso -bazaar -warmly -##voking -229 -pairing -##lite -##grate -##nets -wien -freaked -ulysses -rebirth -##alia -##rent -mummy -guzman -jimenez -stilled -##nitz -trajectory -tha -woken -archival -professions -##pts -##pta -hilly -shadowy -shrink -##bolt -norwood -glued -migrate -stereotypes -devoid -##pheus -625 -evacuate -horrors -infancy -gotham -knowles -optic -downloaded -sachs -kingsley -parramatta -darryl -mor -##onale -shady -commence -confesses -kan -##meter -##placed -marlborough -roundabout -regents -frigates -io -##imating -gothenburg -revoked -carvings -clockwise -convertible -intruder -##sche -banged -##ogo -vicky -bourgeois -##mony -dupont -footing -##gum -pd -##real -buckle -yun -penthouse -sane -720 -serviced -stakeholders -neumann -bb -##eers -comb -##gam -catchment -pinning -rallies -typing -##elles -forefront -freiburg -sweetie -giacomo -widowed -goodwill -worshipped -aspirations -midday -##vat -fishery -##trick -bournemouth -turk -243 -hearth -ethanol -guadalajara -murmurs -sl -##uge -afforded -scripted -##hta -wah -##jn -coroner -translucent -252 -memorials -puck -progresses -clumsy -##race -315 -candace -recounted -##27 -##slin -##uve -filtering -##mac -howl -strata -heron -leveled -##ays -dubious -##oja -##т -##wheel -citations -exhibiting -##laya -##mics -##pods -turkic -##lberg -injunction -##ennial -##mit -antibodies -##44 -organise -##rigues -cardiovascular -cushion -inverness -##zquez -dia -cocoa -sibling -##tman -##roid -expanse -feasible -tunisian -algiers -##relli -rus -bloomberg -dso -westphalia -bro -tacoma -281 -downloads -##ours -konrad -duran -##hdi -continuum -jett -compares -legislator -secession -##nable -##gues -##zuka -translating -reacher -##gley -##ła -aleppo -##agi -tc -orchards -trapping -linguist -versatile -drumming -postage -calhoun -superiors -##mx -barefoot -leary -##cis -ignacio -alfa -kaplan -##rogen -bratislava -mori -##vot -disturb -haas -313 -cartridges -gilmore -radiated -salford -tunic -hades -##ulsive -archeological -delilah -magistrates -auditioned -brewster -charters -empowerment -blogs -cappella -dynasties -iroquois -whipping -##krishna -raceway -truths -myra -weaken -judah -mcgregor -##horse -mic -refueling -37th -burnley -bosses -markus -premio -query -##gga -dunbar -##economic -darkest -lyndon -sealing -commendation -reappeared -##mun -addicted -ezio -slaughtered -satisfactory -shuffle -##eves -##thic -##uj -fortification -warrington -##otto -resurrected -fargo -mane -##utable -##lei -##space -foreword -ox -##aris -##vern -abrams -hua -##mento -sakura -##alo -uv -sentimental -##skaya -midfield -##eses -sturdy -scrolls -macleod -##kyu -entropy -##lance -mitochondrial -cicero -excelled -thinner -convoys -perceive -##oslav -##urable -systematically -grind -burkina -287 -##tagram -ops -##aman -guantanamo -##cloth -##tite -forcefully -wavy -##jou -pointless -##linger -##tze -layton -portico -superficial -clerical -outlaws -##hism -burials -muir -##inn -creditors -hauling -rattle -##leg -calais -monde -archers -reclaimed -dwell -wexford -hellenic -falsely -remorse -##tek -dough -furnishings -##uttered -gabon -neurological -novice -##igraphy -contemplated -pulpit -nightstand -saratoga -##istan -documenting -pulsing -taluk -##firmed -busted -marital -##rien -disagreements -wasps -##yes -hodge -mcdonnell -mimic -fran -pendant -dhabi -musa -##nington -congratulations -argent -darrell -concussion -losers -regrets -thessaloniki -reversal -donaldson -hardwood -thence -achilles -ritter -##eran -demonic -jurgen -prophets -goethe -eki -classmate -buff -##cking -yank -irrational -##inging -perished -seductive -qur -sourced -##crat -##typic -mustard -ravine -barre -horizontally -characterization -phylogenetic -boise -##dit -##runner -##tower -brutally -intercourse -seduce -##bbing -fay -ferris -ogden -amar -nik -unarmed -##inator -evaluating -kyrgyzstan -sweetness -##lford -##oki -mccormick -meiji -notoriety -stimulate -disrupt -figuring -instructional -mcgrath -##zoo -groundbreaking -##lto -flinch -khorasan -agrarian -bengals -mixer -radiating -##sov -ingram -pitchers -nad -tariff -##cript -tata -##codes -##emi -##ungen -appellate -lehigh -##bled -##giri -brawl -duct -texans -##ciation -##ropolis -skipper -speculative -vomit -doctrines -stresses -253 -davy -graders -whitehead -jozef -timely -cumulative -haryana -paints -appropriately -boon -cactus -##ales -##pid -dow -legions -##pit -perceptions -1730 -picturesque -##yse -periphery -rune -wr -##aha -celtics -sentencing -whoa -##erin -confirms -variance -425 -moines -mathews -spade -rave -m1 -fronted -fx -blending -alleging -reared -##gl -237 -##paper -grassroots -eroded -##free -##physical -directs -ordeal -##sław -accelerate -hacker -rooftop -##inia -lev -buys -cebu -devote -##lce -specialising -##ulsion -choreographed -repetition -warehouses -##ryl -paisley -tuscany -analogy -sorcerer -hash -huts -shards -descends -exclude -nix -chaplin -gaga -ito -vane -##drich -causeway -misconduct -limo -orchestrated -glands -jana -##kot -u2 -##mple -##sons -branching -contrasts -scoop -longed -##virus -chattanooga -##75 -syrup -cornerstone -##tized -##mind -##iaceae -careless -precedence -frescoes -##uet -chilled -consult -modelled -snatch -peat -##thermal -caucasian -humane -relaxation -spins -temperance -##lbert -occupations -lambda -hybrids -moons -mp3 -##oese -247 -rolf -societal -yerevan -ness -##ssler -befriended -mechanized -nominate -trough -boasted -cues -seater -##hom -bends -##tangle -conductors -emptiness -##lmer -eurasian -adriatic -tian -##cie -anxiously -lark -propellers -chichester -jock -ev -2a -##holding -credible -recounts -tori -loyalist -abduction -##hoot -##redo -nepali -##mite -ventral -tempting -##ango -##crats -steered -##wice -javelin -dipping -laborers -prentice -looming -titanium -##ː -badges -emir -tensor -##ntation -egyptians -rash -denies -hawthorne -lombard -showers -wehrmacht -dietary -trojan -##reus -welles -executing -horseshoe -lifeboat -##lak -elsa -infirmary -nearing -roberta -boyer -mutter -trillion -joanne -##fine -##oked -sinks -vortex -uruguayan -clasp -sirius -##block -accelerator -prohibit -sunken -byu -chronological -diplomats -ochreous -510 -symmetrical -1644 -maia -##tology -salts -reigns -atrocities -##ия -hess -bared -issn -##vyn -cater -saturated -##cycle -##isse -sable -voyager -dyer -yusuf -##inge -fountains -wolff -##39 -##nni -engraving -rollins -atheist -ominous -##ault -herr -chariot -martina -strung -##fell -##farlane -horrific -sahib -gazes -saetan -erased -ptolemy -##olic -flushing -lauderdale -analytic -##ices -530 -navarro -beak -gorilla -herrera -broom -guadalupe -raiding -sykes -311 -bsc -deliveries -1720 -invasions -carmichael -tajikistan -thematic -ecumenical -sentiments -onstage -##rians -##brand -##sume -catastrophic -flanks -molten -##arns -waller -aimee -terminating -##icing -alternately -##oche -nehru -printers -outraged -##eving -empires -template -banners -repetitive -za -##oise -vegetarian -##tell -guiana -opt -cavendish -lucknow -synthesized -##hani -##mada -finalized -##ctable -fictitious -mayoral -unreliable -##enham -embracing -peppers -rbis -##chio -##neo -inhibition -slashed -togo -orderly -embroidered -safari -salty -236 -barron -benito -totaled -##dak -pubs -simulated -caden -devin -tolkien -momma -welding -sesame -##ept -gottingen -hardness -630 -shaman -temeraire -620 -adequately -pediatric -##kit -ck -assertion -radicals -composure -cadence -seafood -beaufort -lazarus -mani -warily -cunning -kurdistan -249 -cantata -##kir -ares -##41 -##clusive -nape -townland -geared -insulted -flutter -boating -violate -draper -dumping -malmo -##hh -##romatic -firearm -alta -bono -obscured -##clave -exceeds -panorama -unbelievable -##train -preschool -##essed -disconnected -installing -rescuing -secretaries -accessibility -##castle -##drive -##ifice -##film -bouts -slug -waterway -mindanao -##buro -##ratic -halves -##ل -calming -liter -maternity -adorable -bragg -electrification -mcc -##dote -roxy -schizophrenia -##body -munoz -kaye -whaling -239 -mil -tingling -tolerant -##ago -unconventional -volcanoes -##finder -deportivo -##llie -robson -kaufman -neuroscience -wai -deportation -masovian -scraping -converse -##bh -hacking -bulge -##oun -administratively -yao -580 -amp -mammoth -booster -claremont -hooper -nomenclature -pursuits -mclaughlin -melinda -##sul -catfish -barclay -substrates -taxa -zee -originals -kimberly -packets -padma -##ality -borrowing -ostensibly -solvent -##bri -##genesis -##mist -lukas -shreveport -veracruz -##ь -##lou -##wives -cheney -tt -anatolia -hobbs -##zyn -cyclic -radiant -alistair -greenish -siena -dat -independents -##bation -conform -pieter -hyper -applicant -bradshaw -spores -telangana -vinci -inexpensive -nuclei -322 -jang -nme -soho -spd -##ign -cradled -receptionist -pow -##43 -##rika -fascism -##ifer -experimenting -##ading -##iec -##region -345 -jocelyn -maris -stair -nocturnal -toro -constabulary -elgin -##kker -msc -##giving -##schen -##rase -doherty -doping -sarcastically -batter -maneuvers -##cano -##apple -##gai -##git -intrinsic -##nst -##stor -1753 -showtime -cafes -gasps -lviv -ushered -##thed -fours -restart -astonishment -transmitting -flyer -shrugs -##sau -intriguing -cones -dictated -mushrooms -medial -##kovsky -##elman -escorting -gaped -##26 -godfather -##door -##sell -djs -recaptured -timetable -vila -1710 -3a -aerodrome -mortals -scientology -##orne -angelina -mag -convection -unpaid -insertion -intermittent -lego -##nated -endeavor -kota -pereira -##lz -304 -bwv -glamorgan -insults -agatha -fey -##cend -fleetwood -mahogany -protruding -steamship -zeta -##arty -mcguire -suspense -##sphere -advising -urges -##wala -hurriedly -meteor -gilded -inline -arroyo -stalker -##oge -excitedly -revered -##cure -earle -introductory -##break -##ilde -mutants -puff -pulses -reinforcement -##haling -curses -lizards -stalk -correlated -##fixed -fallout -macquarie -##unas -bearded -denton -heaving -802 -##ocation -winery -assign -dortmund -##lkirk -everest -invariant -charismatic -susie -##elling -bled -lesley -telegram -sumner -bk -##ogen -##к -wilcox -needy -colbert -duval -##iferous -##mbled -allotted -attends -imperative -##hita -replacements -hawker -##inda -insurgency -##zee -##eke -casts -##yla -680 -ives -transitioned -##pack -##powering -authoritative -baylor -flex -cringed -plaintiffs -woodrow -##skie -drastic -ape -aroma -unfolded -commotion -nt -preoccupied -theta -routines -lasers -privatization -wand -domino -ek -clenching -nsa -strategically -showered -bile -handkerchief -pere -storing -christophe -insulting -316 -nakamura -romani -asiatic -magdalena -palma -cruises -stripping -405 -konstantin -soaring -##berman -colloquially -forerunner -havilland -incarcerated -parasites -sincerity -##utus -disks -plank -saigon -##ining -corbin -homo -ornaments -powerhouse -##tlement -chong -fastened -feasibility -idf -morphological -usable -##nish -##zuki -aqueduct -jaguars -keepers -##flies -aleksandr -faust -assigns -ewing -bacterium -hurled -tricky -hungarians -integers -wallis -321 -yamaha -##isha -hushed -oblivion -aviator -evangelist -friars -##eller -monograph -ode -##nary -airplanes -labourers -charms -##nee -1661 -hagen -tnt -rudder -fiesta -transcript -dorothea -ska -inhibitor -maccabi -retorted -raining -encompassed -clauses -menacing -1642 -lineman -##gist -vamps -##ape -##dick -gloom -##rera -dealings -easing -seekers -##nut -##pment -helens -unmanned -##anu -##isson -basics -##amy -##ckman -adjustments -1688 -brutality -horne -##zell -sui -##55 -##mable -aggregator -##thal -rhino -##drick -##vira -counters -zoom -##01 -##rting -mn -montenegrin -packard -##unciation -##♭ -##kki -reclaim -scholastic -thugs -pulsed -##icia -syriac -quan -saddam -banda -kobe -blaming -buddies -dissent -##lusion -##usia -corbett -jaya -delle -erratic -lexie -##hesis -435 -amiga -hermes -##pressing -##leen -chapels -gospels -jamal -##uating -compute -revolving -warp -##sso -##thes -armory -##eras -##gol -antrim -loki -##kow -##asian -##good -##zano -braid -handwriting -subdistrict -funky -pantheon -##iculate -concurrency -estimation -improper -juliana -##his -newcomers -johnstone -staten -communicated -##oco -##alle -sausage -stormy -##stered -##tters -superfamily -##grade -acidic -collateral -tabloid -##oped -##rza -bladder -austen -##ellant -mcgraw -##hay -hannibal -mein -aquino -lucifer -wo -badger -boar -cher -christensen -greenberg -interruption -##kken -jem -244 -mocked -bottoms -cambridgeshire -##lide -sprawling -##bbly -eastwood -ghent -synth -##buck -advisers -##bah -nominally -hapoel -qu -daggers -estranged -fabricated -towels -vinnie -wcw -misunderstanding -anglia -nothin -unmistakable -##dust -##lova -chilly -marquette -truss -##edge -##erine -reece -##lty -##chemist -##connected -272 -308 -41st -bash -raion -waterfalls -##ump -##main -labyrinth -queue -theorist -##istle -bharatiya -flexed -soundtracks -rooney -leftist -patrolling -wharton -plainly -alleviate -eastman -schuster -topographic -engages -immensely -unbearable -fairchild -1620 -dona -lurking -parisian -oliveira -ia -indictment -hahn -bangladeshi -##aster -vivo -##uming -##ential -antonia -expects -indoors -kildare -harlan -##logue -##ogenic -##sities -forgiven -##wat -childish -tavi -##mide -##orra -plausible -grimm -successively -scooted -##bola -##dget -##rith -spartans -emery -flatly -azure -epilogue -##wark -flourish -##iny -##tracted -##overs -##oshi -bestseller -distressed -receipt -spitting -hermit -topological -##cot -drilled -subunit -francs -##layer -eel -##fk -##itas -octopus -footprint -petitions -ufo -##say -##foil -interfering -leaking -palo -##metry -thistle -valiant -##pic -narayan -mcpherson -##fast -gonzales -##ym -##enne -dustin -novgorod -solos -##zman -doin -##raph -##patient -##meyer -soluble -ashland -cuffs -carole -pendleton -whistling -vassal -##river -deviation -revisited -constituents -rallied -rotate -loomed -##eil -##nting -amateurs -augsburg -auschwitz -crowns -skeletons -##cona -bonnet -257 -dummy -globalization -simeon -sleeper -mandal -differentiated -##crow -##mare -milne -bundled -exasperated -talmud -owes -segregated -##feng -##uary -dentist -piracy -props -##rang -devlin -##torium -malicious -paws -##laid -dependency -##ergy -##fers -##enna -258 -pistons -rourke -jed -grammatical -tres -maha -wig -512 -ghostly -jayne -##achal -##creen -##ilis -##lins -##rence -designate -##with -arrogance -cambodian -clones -showdown -throttle -twain -##ception -lobes -metz -nagoya -335 -braking -##furt -385 -roaming -##minster -amin -crippled -##37 -##llary -indifferent -hoffmann -idols -intimidating -1751 -261 -influenza -memo -onions -1748 -bandage -consciously -##landa -##rage -clandestine -observes -swiped -tangle -##ener -##jected -##trum -##bill -##lta -hugs -congresses -josiah -spirited -##dek -humanist -managerial -filmmaking -inmate -rhymes -debuting -grimsby -ur -##laze -duplicate -vigor -##tf -republished -bolshevik -refurbishment -antibiotics -martini -methane -newscasts -royale -horizons -levant -iain -visas -##ischen -paler -##around -manifestation -snuck -alf -chop -futile -pedestal -rehab -##kat -bmg -kerman -res -fairbanks -jarrett -abstraction -saharan -##zek -1746 -procedural -clearer -kincaid -sash -luciano -##ffey -crunch -helmut -##vara -revolutionaries -##tute -creamy -leach -##mmon -1747 -permitting -nes -plight -wendell -##lese -contra -ts -clancy -ipa -mach -staples -autopsy -disturbances -nueva -karin -pontiac -##uding -proxy -venerable -haunt -leto -bergman -expands -##helm -wal -##pipe -canning -celine -cords -obesity -##enary -intrusion -planner -##phate -reasoned -sequencing -307 -harrow -##chon -##dora -marred -mcintyre -repay -tarzan -darting -248 -harrisburg -margarita -repulsed -##hur -##lding -belinda -hamburger -novo -compliant -runways -bingham -registrar -skyscraper -ic -cuthbert -improvisation -livelihood -##corp -##elial -admiring -##dened -sporadic -believer -casablanca -popcorn -##29 -asha -shovel -##bek -##dice -coiled -tangible -##dez -casper -elsie -resin -tenderness -rectory -##ivision -avail -sonar -##mori -boutique -##dier -guerre -bathed -upbringing -vaulted -sandals -blessings -##naut -##utnant -1680 -306 -foxes -pia -corrosion -hesitantly -confederates -crystalline -footprints -shapiro -tirana -valentin -drones -45th -microscope -shipments -texted -inquisition -wry -guernsey -unauthorized -resigning -760 -ripple -schubert -stu -reassure -felony -##ardo -brittle -koreans -##havan -##ives -dun -implicit -tyres -##aldi -##lth -magnolia -##ehan -##puri -##poulos -aggressively -fei -gr -familiarity -##poo -indicative -##trust -fundamentally -jimmie -overrun -395 -anchors -moans -##opus -britannia -armagh -##ggle -purposely -seizing -##vao -bewildered -mundane -avoidance -cosmopolitan -geometridae -quartermaster -caf -415 -chatter -engulfed -gleam -purge -##icate -juliette -jurisprudence -guerra -revisions -##bn -casimir -brew -##jm -1749 -clapton -cloudy -conde -hermitage -278 -simulations -torches -vincenzo -matteo -##rill -hidalgo -booming -westbound -accomplishment -tentacles -unaffected -##sius -annabelle -flopped -sloping -##litz -dreamer -interceptor -vu -##loh -consecration -copying -messaging -breaker -climates -hospitalized -1752 -torino -afternoons -winfield -witnessing -##teacher -breakers -choirs -sawmill -coldly -##ege -sipping -haste -uninhabited -conical -bibliography -pamphlets -severn -edict -##oca -deux -illnesses -grips -##pl -rehearsals -sis -thinkers -tame -##keepers -1690 -acacia -reformer -##osed -##rys -shuffling -##iring -##shima -eastbound -ionic -rhea -flees -littered -##oum -rocker -vomiting -groaning -champ -overwhelmingly -civilizations -paces -sloop -adoptive -##tish -skaters -##vres -aiding -mango -##joy -nikola -shriek -##ignon -pharmaceuticals -##mg -tuna -calvert -gustavo -stocked -yearbook -##urai -##mana -computed -subsp -riff -hanoi -kelvin -hamid -moors -pastures -summons -jihad -nectar -##ctors -bayou -untitled -pleasing -vastly -republics -intellect -##η -##ulio -##tou -crumbling -stylistic -sb -##ی -consolation -frequented -h₂o -walden -widows -##iens -404 -##ignment -chunks -improves -288 -grit -recited -##dev -snarl -sociological -##arte -##gul -inquired -##held -bruise -clube -consultancy -homogeneous -hornets -multiplication -pasta -prick -savior -##grin -##kou -##phile -yoon -##gara -grimes -vanishing -cheering -reacting -bn -distillery -##quisite -##vity -coe -dockyard -massif -##jord -escorts -voss -##valent -byte -chopped -hawke -illusions -workings -floats -##koto -##vac -kv -annapolis -madden -##onus -alvaro -noctuidae -##cum -##scopic -avenge -steamboat -forte -illustrates -erika -##trip -570 -dew -nationalities -bran -manifested -thirsty -diversified -muscled -reborn -##standing -arson -##lessness -##dran -##logram -##boys -##kushima -##vious -willoughby -##phobia -286 -alsace -dashboard -yuki -##chai -granville -myspace -publicized -tricked -##gang -adjective -##ater -relic -reorganisation -enthusiastically -indications -saxe -##lassified -consolidate -iec -padua -helplessly -ramps -renaming -regulars -pedestrians -accents -convicts -inaccurate -lowers -mana -##pati -barrie -bjp -outta -someplace -berwick -flanking -invoked -marrow -sparsely -excerpts -clothed -rei -##ginal -wept -##straße -##vish -alexa -excel -##ptive -membranes -aquitaine -creeks -cutler -sheppard -implementations -ns -##dur -fragrance -budge -concordia -magnesium -marcelo -##antes -gladly -vibrating -##rral -##ggles -montrose -##omba -lew -seamus -1630 -cocky -##ament -##uen -bjorn -##rrick -fielder -fluttering -##lase -methyl -kimberley -mcdowell -reductions -barbed -##jic -##tonic -aeronautical -condensed -distracting -##promising -huffed -##cala -##sle -claudius -invincible -missy -pious -balthazar -ci -##lang -butte -combo -orson -##dication -myriad -1707 -silenced -##fed -##rh -coco -netball -yourselves -##oza -clarify -heller -peg -durban -etudes -offender -roast -blackmail -curvature -##woods -vile -309 -illicit -suriname -##linson -overture -1685 -bubbling -gymnast -tucking -##mming -##ouin -maldives -##bala -gurney -##dda -##eased -##oides -backside -pinto -jars -racehorse -tending -##rdial -baronetcy -wiener -duly -##rke -barbarian -cupping -flawed -##thesis -bertha -pleistocene -puddle -swearing -##nob -##tically -fleeting -prostate -amulet -educating -##mined -##iti -##tler -75th -jens -respondents -analytics -cavaliers -papacy -raju -##iente -##ulum -##tip -funnel -271 -disneyland -##lley -sociologist -##iam -2500 -faulkner -louvre -menon -##dson -276 -##ower -afterlife -mannheim -peptide -referees -comedians -meaningless -##anger -##laise -fabrics -hurley -renal -sleeps -##bour -##icle -breakout -kristin -roadside -animator -clover -disdain -unsafe -redesign -##urity -firth -barnsley -portage -reset -narrows -268 -commandos -expansive -speechless -tubular -##lux -essendon -eyelashes -smashwords -##yad -##bang -##claim -craved -sprinted -chet -somme -astor -wrocław -orton -266 -bane -##erving -##uing -mischief -##amps -##sund -scaling -terre -##xious -impairment -offenses -undermine -moi -soy -contiguous -arcadia -inuit -seam -##tops -macbeth -rebelled -##icative -##iot -590 -elaborated -frs -uniformed -##dberg -259 -powerless -priscilla -stimulated -980 -qc -arboretum -frustrating -trieste -bullock -##nified -enriched -glistening -intern -##adia -locus -nouvelle -ollie -ike -lash -starboard -ee -tapestry -headlined -hove -rigged -##vite -pollock -##yme -thrive -clustered -cas -roi -gleamed -olympiad -##lino -pressured -regimes -##hosis -##lick -ripley -##ophone -kickoff -gallon -rockwell -##arable -crusader -glue -revolutions -scrambling -1714 -grover -##jure -englishman -aztec -263 -contemplating -coven -ipad -preach -triumphant -tufts -##esian -rotational -##phus -328 -falkland -##brates -strewn -clarissa -rejoin -environmentally -glint -banded -drenched -moat -albanians -johor -rr -maestro -malley -nouveau -shaded -taxonomy -v6 -adhere -bunk -airfields -##ritan -1741 -encompass -remington -tran -##erative -amelie -mazda -friar -morals -passions -##zai -breadth -vis -##hae -argus -burnham -caressing -insider -rudd -##imov -##mini -##rso -italianate -murderous -textual -wainwright -armada -bam -weave -timer -##taken -##nh -fra -##crest -ardent -salazar -taps -tunis -##ntino -allegro -gland -philanthropic -##chester -implication -##optera -esq -judas -noticeably -wynn -##dara -inched -indexed -crises -villiers -bandit -royalties -patterned -cupboard -interspersed -accessory -isla -kendrick -entourage -stitches -##esthesia -headwaters -##ior -interlude -distraught -draught -1727 -##basket -biased -sy -transient -triad -subgenus -adapting -kidd -shortstop -##umatic -dimly -spiked -mcleod -reprint -nellie -pretoria -windmill -##cek -singled -##mps -273 -reunite -##orous -747 -bankers -outlying -##omp -##ports -##tream -apologies -cosmetics -patsy -##deh -##ocks -##yson -bender -nantes -serene -##nad -lucha -mmm -323 -##cius -##gli -cmll -coinage -nestor -juarez -##rook -smeared -sprayed -twitching -sterile -irina -embodied -juveniles -enveloped -miscellaneous -cancers -dq -gulped -luisa -crested -swat -donegal -ref -##anov -##acker -hearst -mercantile -##lika -doorbell -ua -vicki -##alla -##som -bilbao -psychologists -stryker -sw -horsemen -turkmenistan -wits -##national -anson -mathew -screenings -##umb -rihanna -##agne -##nessy -aisles -##iani -##osphere -hines -kenton -saskatoon -tasha -truncated -##champ -##itan -mildred -advises -fredrik -interpreting -inhibitors -##athi -spectroscopy -##hab -##kong -karim -panda -##oia -##nail -##vc -conqueror -kgb -leukemia -##dity -arrivals -cheered -pisa -phosphorus -shielded -##riated -mammal -unitarian -urgently -chopin -sanitary -##mission -spicy -drugged -hinges -##tort -tipping -trier -impoverished -westchester -##caster -267 -epoch -nonstop -##gman -##khov -aromatic -centrally -cerro -##tively -##vio -billions -modulation -sedimentary -283 -facilitating -outrageous -goldstein -##eak -##kt -ld -maitland -penultimate -pollard -##dance -fleets -spaceship -vertebrae -##nig -alcoholism -als -recital -##bham -##ference -##omics -m2 -##bm -trois -##tropical -##в -commemorates -##meric -marge -##raction -1643 -670 -cosmetic -ravaged -##ige -catastrophe -eng -##shida -albrecht -arterial -bellamy -decor -harmon -##rde -bulbs -synchronized -vito -easiest -shetland -shielding -wnba -##glers -##ssar -##riam -brianna -cumbria -##aceous -##rard -cores -thayer -##nsk -brood -hilltop -luminous -carts -keynote -larkin -logos -##cta -##ا -##mund -##quay -lilith -tinted -277 -wrestle -mobilization -##uses -sequential -siam -bloomfield -takahashi -274 -##ieving -presenters -ringo -blazed -witty -##oven -##ignant -devastation -haydn -harmed -newt -therese -##peed -gershwin -molina -rabbis -sudanese -001 -innate -restarted -##sack -##fus -slices -wb -##shah -enroll -hypothetical -hysterical -1743 -fabio -indefinite -warped -##hg -exchanging -525 -unsuitable -##sboro -gallo -1603 -bret -cobalt -homemade -##hunter -mx -operatives -##dhar -terraces -durable -latch -pens -whorls -##ctuated -##eaux -billing -ligament -succumbed -##gly -regulators -spawn -##brick -##stead -filmfare -rochelle -##nzo -1725 -circumstance -saber -supplements -##nsky -##tson -crowe -wellesley -carrot -##9th -##movable -primate -drury -sincerely -topical -##mad -##rao -callahan -kyiv -smarter -tits -undo -##yeh -announcements -anthologies -barrio -nebula -##islaus -##shaft -##tyn -bodyguards -2021 -assassinate -barns -emmett -scully -##mah -##yd -##eland -##tino -##itarian -demoted -gorman -lashed -prized -adventist -writ -##gui -alla -invertebrates -##ausen -1641 -amman -1742 -align -healy -redistribution -##gf -##rize -insulation -##drop -adherents -hezbollah -vitro -ferns -yanking -269 -php -registering -uppsala -cheerleading -confines -mischievous -tully -##ross -49th -docked -roam -stipulated -pumpkin -##bry -prompt -##ezer -blindly -shuddering -craftsmen -frail -scented -katharine -scramble -shaggy -sponge -helix -zaragoza -279 -##52 -43rd -backlash -fontaine -seizures -posse -cowan -nonfiction -telenovela -wwii -hammered -undone -##gpur -encircled -irs -##ivation -artefacts -oneself -searing -smallpox -##belle -##osaurus -shandong -breached -upland -blushing -rankin -infinitely -psyche -tolerated -docking -evicted -##col -unmarked -##lving -gnome -lettering -litres -musique -##oint -benevolent -##jal -blackened -##anna -mccall -racers -tingle -##ocene -##orestation -introductions -radically -292 -##hiff -##باد -1610 -1739 -munchen -plead -##nka -condo -scissors -##sight -##tens -apprehension -##cey -##yin -hallmark -watering -formulas -sequels -##llas -aggravated -bae -commencing -##building -enfield -prohibits -marne -vedic -civilized -euclidean -jagger -beforehand -blasts -dumont -##arney -##nem -740 -conversions -hierarchical -rios -simulator -##dya -##lellan -hedges -oleg -thrusts -shadowed -darby -maximize -1744 -gregorian -##nded -##routed -sham -unspecified -##hog -emory -factual -##smo -##tp -fooled -##rger -ortega -wellness -marlon -##oton -##urance -casket -keating -ley -enclave -##ayan -char -influencing -jia -##chenko -412 -ammonia -erebidae -incompatible -violins -cornered -##arat -grooves -astronauts -columbian -rampant -fabrication -kyushu -mahmud -vanish -##dern -mesopotamia -##lete -ict -##rgen -caspian -kenji -pitted -##vered -999 -grimace -roanoke -tchaikovsky -twinned -##analysis -##awan -xinjiang -arias -clemson -kazakh -sizable -1662 -##khand -##vard -plunge -tatum -vittorio -##nden -cholera -##dana -##oper -bracing -indifference -projectile -superliga -##chee -realises -upgrading -299 -porte -retribution -##vies -nk -stil -##resses -ama -bureaucracy -blackberry -bosch -testosterone -collapses -greer -##pathic -ioc -fifties -malls -##erved -bao -baskets -adolescents -siegfried -##osity -##tosis -mantra -detecting -existent -fledgling -##cchi -dissatisfied -gan -telecommunication -mingled -sobbed -6000 -controversies -outdated -taxis -##raus -fright -slams -##lham -##fect -##tten -detectors -fetal -tanned -##uw -fray -goth -olympian -skipping -mandates -scratches -sheng -unspoken -hyundai -tracey -hotspur -restrictive -##buch -americana -mundo -##bari -burroughs -diva -vulcan -##6th -distinctions -thumping -##ngen -mikey -sheds -fide -rescues -springsteen -vested -valuation -##ece -##ely -pinnacle -rake -sylvie -##edo -almond -quivering -##irus -alteration -faltered -##wad -51st -hydra -ticked -##kato -recommends -##dicated -antigua -arjun -stagecoach -wilfred -trickle -pronouns -##pon -aryan -nighttime -##anian -gall -pea -stitch -##hei -leung -milos -##dini -eritrea -nexus -starved -snowfall -kant -parasitic -cot -discus -hana -strikers -appleton -kitchens -##erina -##partisan -##itha -##vius -disclose -metis -##channel -1701 -tesla -##vera -fitch -1735 -blooded -##tila -decimal -##tang -##bai -cyclones -eun -bottled -peas -pensacola -basha -bolivian -crabs -boil -lanterns -partridge -roofed -1645 -necks -##phila -opined -patting -##kla -##lland -chuckles -volta -whereupon -##nche -devout -euroleague -suicidal -##dee -inherently -involuntary -knitting -nasser -##hide -puppets -colourful -courageous -southend -stills -miraculous -hodgson -richer -rochdale -ethernet -greta -uniting -prism -umm -##haya -##itical -##utation -deterioration -pointe -prowess -##ropriation -lids -scranton -billings -subcontinent -##koff -##scope -brute -kellogg -psalms -degraded -##vez -stanisław -##ructured -ferreira -pun -astonishing -gunnar -##yat -arya -prc -gottfried -##tight -excursion -##ographer -dina -##quil -##nare -huffington -illustrious -wilbur -gundam -verandah -##zard -naacp -##odle -constructive -fjord -kade -##naud -generosity -thrilling -baseline -cayman -frankish -plastics -accommodations -zoological -##fting -cedric -qb -motorized -##dome -##otted -squealed -tackled -canucks -budgets -situ -asthma -dail -gabled -grasslands -whimpered -writhing -judgments -##65 -minnie -pv -##carbon -bananas -grille -domes -monique -odin -maguire -markham -tierney -##estra -##chua -libel -poke -speedy -atrium -laval -notwithstanding -##edly -fai -kala -##sur -robb -##sma -listings -luz -supplementary -tianjin -##acing -enzo -jd -ric -scanner -croats -transcribed -##49 -arden -cv -##hair -##raphy -##lver -##uy -357 -seventies -staggering -alam -horticultural -hs -regression -timbers -blasting -##ounded -montagu -manipulating -##cit -catalytic -1550 -troopers -##meo -condemnation -fitzpatrick -##oire -##roved -inexperienced -1670 -castes -##lative -outing -314 -dubois -flicking -quarrel -ste -learners -1625 -iq -whistled -##class -282 -classify -tariffs -temperament -355 -folly -liszt -##yles -immersed -jordanian -ceasefire -apparel -extras -maru -fished -##bio -harta -stockport -assortment -craftsman -paralysis -transmitters -##cola -blindness -##wk -fatally -proficiency -solemnly -##orno -repairing -amore -groceries -ultraviolet -##chase -schoolhouse -##tua -resurgence -nailed -##otype -##× -ruse -saliva -diagrams -##tructing -albans -rann -thirties -1b -antennas -hilarious -cougars -paddington -stats -##eger -breakaway -ipod -reza -authorship -prohibiting -scoffed -##etz -##ttle -conscription -defected -trondheim -##fires -ivanov -keenan -##adan -##ciful -##fb -##slow -locating -##ials -##tford -cadiz -basalt -blankly -interned -rags -rattling -##tick -carpathian -reassured -sync -bum -guildford -iss -staunch -##onga -astronomers -sera -sofie -emergencies -susquehanna -##heard -duc -mastery -vh1 -williamsburg -bayer -buckled -craving -##khan -##rdes -bloomington -##write -alton -barbecue -##bians -justine -##hri -##ndt -delightful -smartphone -newtown -photon -retrieval -peugeot -hissing -##monium -##orough -flavors -lighted -relaunched -tainted -##games -##lysis -anarchy -microscopic -hopping -adept -evade -evie -##beau -inhibit -sinn -adjustable -hurst -intuition -wilton -cisco -44th -lawful -lowlands -stockings -thierry -##dalen -##hila -##nai -fates -prank -tb -maison -lobbied -provocative -1724 -4a -utopia -##qual -carbonate -gujarati -purcell -##rford -curtiss -##mei -overgrown -arenas -mediation -swallows -##rnik -respectful -turnbull -##hedron -##hope -alyssa -ozone -##ʻi -ami -gestapo -johansson -snooker -canteen -cuff -declines -empathy -stigma -##ags -##iner -##raine -taxpayers -gui -volga -##wright -##copic -lifespan -overcame -tattooed -enactment -giggles -##ador -##camp -barrington -bribe -obligatory -orbiting -peng -##enas -elusive -sucker -##vating -cong -hardship -empowered -anticipating -estrada -cryptic -greasy -detainees -planck -sudbury -plaid -dod -marriott -kayla -##ears -##vb -##zd -mortally -##hein -cognition -radha -319 -liechtenstein -meade -richly -argyle -harpsichord -liberalism -trumpets -lauded -tyrant -salsa -tiled -lear -promoters -reused -slicing -trident -##chuk -##gami -##lka -cantor -checkpoint -##points -gaul -leger -mammalian -##tov -##aar -##schaft -doha -frenchman -nirvana -##vino -delgado -headlining -##eron -##iography -jug -tko -1649 -naga -intersections -##jia -benfica -nawab -##suka -ashford -gulp -##deck -##vill -##rug -brentford -frazier -pleasures -dunne -potsdam -shenzhen -dentistry -##tec -flanagan -##dorff -##hear -chorale -dinah -prem -quezon -##rogated -relinquished -sutra -terri -##pani -flaps -##rissa -poly -##rnet -homme -aback -##eki -linger -womb -##kson -##lewood -doorstep -orthodoxy -threaded -westfield -##rval -dioceses -fridays -subsided -##gata -loyalists -##biotic -##ettes -letterman -lunatic -prelate -tenderly -invariably -souza -thug -winslow -##otide -furlongs -gogh -jeopardy -##runa -pegasus -##umble -humiliated -standalone -tagged -##roller -freshmen -klan -##bright -attaining -initiating -transatlantic -logged -viz -##uance -1723 -combatants -intervening -stephane -chieftain -despised -grazed -317 -cdc -galveston -godzilla -macro -simulate -##planes -parades -##esses -960 -##ductive -##unes -equator -overdose -##cans -##hosh -##lifting -joshi -epstein -sonora -treacherous -aquatics -manchu -responsive -##sation -supervisory -##christ -##llins -##ibar -##balance -##uso -kimball -karlsruhe -mab -##emy -ignores -phonetic -reuters -spaghetti -820 -almighty -danzig -rumbling -tombstone -designations -lured -outset -##felt -supermarkets -##wt -grupo -kei -kraft -susanna -##blood -comprehension -genealogy -##aghan -##verted -redding -##ythe -1722 -bowing -##pore -##roi -lest -sharpened -fulbright -valkyrie -sikhs -##unds -swans -bouquet -merritt -##tage -##venting -commuted -redhead -clerks -leasing -cesare -dea -hazy -##vances -fledged -greenfield -servicemen -##gical -armando -blackout -dt -sagged -downloadable -intra -potion -pods -##4th -##mism -xp -attendants -gambia -stale -##ntine -plump -asteroids -rediscovered -buds -flea -hive -##neas -1737 -classifications -debuts -##eles -olympus -scala -##eurs -##gno -##mute -hummed -sigismund -visuals -wiggled -await -pilasters -clench -sulfate -##ances -bellevue -enigma -trainee -snort -##sw -clouded -denim -##rank -##rder -churning -hartman -lodges -riches -sima -##missible -accountable -socrates -regulates -mueller -##cr -1702 -avoids -solids -himalayas -nutrient -pup -##jevic -squat -fades -nec -##lates -##pina -##rona -##ου -privateer -tequila -##gative -##mpton -apt -hornet -immortals -##dou -asturias -cleansing -dario -##rries -##anta -etymology -servicing -zhejiang -##venor -##nx -horned -erasmus -rayon -relocating -£10 -##bags -escalated -promenade -stubble -2010s -artisans -axial -liquids -mora -sho -yoo -##tsky -bundles -oldies -##nally -notification -bastion -##ths -sparkle -##lved -1728 -leash -pathogen -highs -##hmi -immature -880 -gonzaga -ignatius -mansions -monterrey -sweets -bryson -##loe -polled -regatta -brightest -pei -rosy -squid -hatfield -payroll -addict -meath -cornerback -heaviest -lodging -##mage -capcom -rippled -##sily -barnet -mayhem -ymca -snuggled -rousseau -##cute -blanchard -284 -fragmented -leighton -chromosomes -risking -##md -##strel -##utter -corinne -coyotes -cynical -hiroshi -yeomanry -##ractive -ebook -grading -mandela -plume -agustin -magdalene -##rkin -bea -femme -trafford -##coll -##lun -##tance -52nd -fourier -upton -##mental -camilla -gust -iihf -islamabad -longevity -##kala -feldman -netting -##rization -endeavour -foraging -mfa -orr -##open -greyish -contradiction -graz -##ruff -handicapped -marlene -tweed -oaxaca -spp -campos -miocene -pri -configured -cooks -pluto -cozy -pornographic -##entes -70th -fairness -glided -jonny -lynne -rounding -sired -##emon -##nist -remade -uncover -##mack -complied -lei -newsweek -##jured -##parts -##enting -##pg -293 -finer -guerrillas -athenian -deng -disused -stepmother -accuse -gingerly -seduction -521 -confronting -##walker -##going -gora -nostalgia -sabres -virginity -wrenched -##minated -syndication -wielding -eyre -##56 -##gnon -##igny -behaved -taxpayer -sweeps -##growth -childless -gallant -##ywood -amplified -geraldine -scrape -##ffi -babylonian -fresco -##rdan -##kney -##position -1718 -restricting -tack -fukuoka -osborn -selector -partnering -##dlow -318 -gnu -kia -tak -whitley -gables -##54 -##mania -mri -softness -immersion -##bots -##evsky -1713 -chilling -insignificant -pcs -##uis -elites -lina -purported -supplemental -teaming -##americana -##dding -##inton -proficient -rouen -##nage -##rret -niccolo -selects -##bread -fluffy -1621 -gruff -knotted -mukherjee -polgara -thrash -nicholls -secluded -smoothing -thru -corsica -loaf -whitaker -inquiries -##rrier -##kam -indochina -289 -marlins -myles -peking -##tea -extracts -pastry -superhuman -connacht -vogel -##ditional -##het -##udged -##lash -gloss -quarries -refit -teaser -##alic -##gaon -20s -materialized -sling -camped -pickering -tung -tracker -pursuant -##cide -cranes -soc -##cini -##typical -##viere -anhalt -overboard -workout -chores -fares -orphaned -stains -##logie -fenton -surpassing -joyah -triggers -##itte -grandmaster -##lass -##lists -clapping -fraudulent -ledger -nagasaki -##cor -##nosis -##tsa -eucalyptus -tun -##icio -##rney -##tara -dax -heroism -ina -wrexham -onboard -unsigned -##dates -moshe -galley -winnie -droplets -exiles -praises -watered -noodles -##aia -fein -adi -leland -multicultural -stink -bingo -comets -erskine -modernized -canned -constraint -domestically -chemotherapy -featherweight -stifled -##mum -darkly -irresistible -refreshing -hasty -isolate -##oys -kitchener -planners -##wehr -cages -yarn -implant -toulon -elects -childbirth -yue -##lind -##lone -cn -rightful -sportsman -junctions -remodeled -specifies -##rgh -291 -##oons -complimented -##urgent -lister -ot -##logic -bequeathed -cheekbones -fontana -gabby -##dial -amadeus -corrugated -maverick -resented -triangles -##hered -##usly -nazareth -tyrol -1675 -assent -poorer -sectional -aegean -##cous -296 -nylon -ghanaian -##egorical -##weig -cushions -forbid -fusiliers -obstruction -somerville -##scia -dime -earrings -elliptical -leyte -oder -polymers -timmy -atm -midtown -piloted -settles -continual -externally -mayfield -##uh -enrichment -henson -keane -persians -1733 -benji -braden -pep -324 -##efe -contenders -pepsi -valet -##isches -298 -##asse -##earing -goofy -stroll -##amen -authoritarian -occurrences -adversary -ahmedabad -tangent -toppled -dorchester -1672 -modernism -marxism -islamist -charlemagne -exponential -racks -unicode -brunette -mbc -pic -skirmish -##bund -##lad -##powered -##yst -hoisted -messina -shatter -##ctum -jedi -vantage -##music -##neil -clemens -mahmoud -corrupted -authentication -lowry -nils -##washed -omnibus -wounding -jillian -##itors -##opped -serialized -narcotics -handheld -##arm -##plicity -intersecting -stimulating -##onis -crate -fellowships -hemingway -casinos -climatic -fordham -copeland -drip -beatty -leaflets -robber -brothel -madeira -##hedral -sphinx -ultrasound -##vana -valor -forbade -leonid -villas -##aldo -duane -marquez -##cytes -disadvantaged -forearms -kawasaki -reacts -consular -lax -uncles -uphold -##hopper -concepcion -dorsey -lass -##izan -arching -passageway -1708 -researches -tia -internationals -##graphs -##opers -distinguishes -javanese -divert -##uven -plotted -##listic -##rwin -##erik -##tify -affirmative -signifies -validation -##bson -kari -felicity -georgina -zulu -##eros -##rained -##rath -overcoming -##dot -argyll -##rbin -1734 -chiba -ratification -windy -earls -parapet -##marks -hunan -pristine -astrid -punta -##gart -brodie -##kota -##oder -malaga -minerva -rouse -##phonic -bellowed -pagoda -portals -reclamation -##gur -##odies -##⁄₄ -parentheses -quoting -allergic -palette -showcases -benefactor -heartland -nonlinear -##tness -bladed -cheerfully -scans -##ety -##hone -1666 -girlfriends -pedersen -hiram -sous -##liche -##nator -1683 -##nery -##orio -##umen -bobo -primaries -smiley -##cb -unearthed -uniformly -fis -metadata -1635 -ind -##oted -recoil -##titles -##tura -##ια -406 -hilbert -jamestown -mcmillan -tulane -seychelles -##frid -antics -coli -fated -stucco -##grants -1654 -bulky -accolades -arrays -caledonian -carnage -optimism -puebla -##tative -##cave -enforcing -rotherham -seo -dunlop -aeronautics -chimed -incline -zoning -archduke -hellenistic -##oses -##sions -candi -thong -##ople -magnate -rustic -##rsk -projective -slant -##offs -danes -hollis -vocalists -##ammed -congenital -contend -gesellschaft -##ocating -##pressive -douglass -quieter -##cm -##kshi -howled -salim -spontaneously -townsville -buena -southport -##bold -kato -1638 -faerie -stiffly -##vus -##rled -297 -flawless -realising -taboo -##7th -bytes -straightening -356 -jena -##hid -##rmin -cartwright -berber -bertram -soloists -411 -noses -417 -coping -fission -hardin -inca -##cen -1717 -mobilized -vhf -##raf -biscuits -curate -##85 -##anial -331 -gaunt -neighbourhoods -1540 -##abas -blanca -bypassed -sockets -behold -coincidentally -##bane -nara -shave -splinter -terrific -##arion -##erian -commonplace -juris -redwood -waistband -boxed -caitlin -fingerprints -jennie -naturalized -##ired -balfour -craters -jody -bungalow -hugely -quilt -glitter -pigeons -undertaker -bulging -constrained -goo -##sil -##akh -assimilation -reworked -##person -persuasion -##pants -felicia -##cliff -##ulent -1732 -explodes -##dun -##inium -##zic -lyman -vulture -hog -overlook -begs -northwards -ow -spoil -##urer -fatima -favorably -accumulate -sargent -sorority -corresponded -dispersal -kochi -toned -##imi -##lita -internacional -newfound -##agger -##lynn -##rigue -booths -peanuts -##eborg -medicare -muriel -nur -##uram -crates -millennia -pajamas -worsened -##breakers -jimi -vanuatu -yawned -##udeau -carousel -##hony -hurdle -##ccus -##mounted -##pod -rv -##eche -airship -ambiguity -compulsion -recapture -##claiming -arthritis -##osomal -1667 -asserting -ngc -sniffing -dade -discontent -glendale -ported -##amina -defamation -rammed -##scent -fling -livingstone -##fleet -875 -##ppy -apocalyptic -comrade -lcd -##lowe -cessna -eine -persecuted -subsistence -demi -hoop -reliefs -710 -coptic -progressing -stemmed -perpetrators -1665 -priestess -##nio -dobson -ebony -rooster -itf -tortricidae -##bbon -##jian -cleanup -##jean -##øy -1721 -eighties -taxonomic -holiness -##hearted -##spar -antilles -showcasing -stabilized -##nb -gia -mascara -michelangelo -dawned -##uria -##vinsky -extinguished -fitz -grotesque -£100 -##fera -##loid -##mous -barges -neue -throbbed -cipher -johnnie -##a1 -##mpt -outburst -##swick -spearheaded -administrations -c1 -heartbreak -pixels -pleasantly -##enay -lombardy -plush -##nsed -bobbie -##hly -reapers -tremor -xiang -minogue -substantive -hitch -barak -##wyl -kwan -##encia -910 -obscene -elegance -indus -surfer -bribery -conserve -##hyllum -##masters -horatio -##fat -apes -rebound -psychotic -##pour -iteration -##mium -##vani -botanic -horribly -antiques -dispose -paxton -##hli -##wg -timeless -1704 -disregard -engraver -hounds -##bau -##version -looted -uno -facilitates -groans -masjid -rutland -antibody -disqualification -decatur -footballers -quake -slacks -48th -rein -scribe -stabilize -commits -exemplary -tho -##hort -##chison -pantry -traversed -##hiti -disrepair -identifiable -vibrated -baccalaureate -##nnis -csa -interviewing -##iensis -##raße -greaves -wealthiest -343 -classed -jogged -£5 -##58 -##atal -illuminating -knicks -respecting -##uno -scrubbed -##iji -##dles -kruger -moods -growls -raider -silvia -chefs -kam -vr -cree -percival -##terol -gunter -counterattack -defiant -henan -ze -##rasia -##riety -equivalence -submissions -##fra -##thor -bautista -mechanically -##heater -cornice -herbal -templar -##mering -outputs -ruining -ligand -renumbered -extravagant -mika -blockbuster -eta -insurrection -##ilia -darkening -ferocious -pianos -strife -kinship -##aer -melee -##anor -##iste -##may -##oue -decidedly -weep -##jad -##missive -##ppel -354 -puget -unease -##gnant -1629 -hammering -kassel -ob -wessex -##lga -bromwich -egan -paranoia -utilization -##atable -##idad -contradictory -provoke -##ols -##ouring -##tangled -knesset -##very -##lette -plumbing -##sden -##¹ -greensboro -occult -sniff -338 -zev -beaming -gamer -haggard -mahal -##olt -##pins -mendes -utmost -briefing -gunnery -##gut -##pher -##zh -##rok -1679 -khalifa -sonya -##boot -principals -urbana -wiring -##liffe -##minating -##rrado -dahl -nyu -skepticism -np -townspeople -ithaca -lobster -somethin -##fur -##arina -##−1 -freighter -zimmerman -biceps -contractual -##herton -amend -hurrying -subconscious -##anal -336 -meng -clermont -spawning -##eia -##lub -dignitaries -impetus -snacks -spotting -twigs -##bilis -##cz -##ouk -libertadores -nic -skylar -##aina -##firm -gustave -asean -##anum -dieter -legislatures -flirt -bromley -trolls -umar -##bbies -##tyle -blah -parc -bridgeport -crank -negligence -##nction -46th -constantin -molded -bandages -seriousness -00pm -siegel -carpets -compartments -upbeat -statehood -##dner -##edging -marko -730 -platt -##hane -paving -##iy -1738 -abbess -impatience -limousine -nbl -##talk -441 -lucille -mojo -nightfall -robbers -##nais -karel -brisk -calves -replicate -ascribed -telescopes -##olf -intimidated -##reen -ballast -specialization -##sit -aerodynamic -caliphate -rainer -visionary -##arded -epsilon -##aday -##onte -aggregation -auditory -boosted -reunification -kathmandu -loco -robyn -402 -acknowledges -appointing -humanoid -newell -redeveloped -restraints -##tained -barbarians -chopper -1609 -italiana -##lez -##lho -investigates -wrestlemania -##anies -##bib -690 -##falls -creaked -dragoons -gravely -minions -stupidity -volley -##harat -##week -musik -##eries -##uously -fungal -massimo -semantics -malvern -##ahl -##pee -discourage -embryo -imperialism -1910s -profoundly -##ddled -jiangsu -sparkled -stat -##holz -sweatshirt -tobin -##iction -sneered -##cheon -##oit -brit -causal -smyth -##neuve -diffuse -perrin -silvio -##ipes -##recht -detonated -iqbal -selma -##nism -##zumi -roasted -##riders -tay -##ados -##mament -##mut -##rud -840 -completes -nipples -cfa -flavour -hirsch -##laus -calderon -sneakers -moravian -##ksha -1622 -rq -294 -##imeters -bodo -##isance -##pre -##ronia -anatomical -excerpt -##lke -dh -kunst -##tablished -##scoe -biomass -panted -unharmed -gael -housemates -montpellier -##59 -coa -rodents -tonic -hickory -singleton -##taro -451 -1719 -aldo -breaststroke -dempsey -och -rocco -##cuit -merton -dissemination -midsummer -serials -##idi -haji -polynomials -##rdon -gs -enoch -prematurely -shutter -taunton -£3 -##grating -##inates -archangel -harassed -##asco -326 -archway -dazzling -##ecin -1736 -sumo -wat -##kovich -1086 -honneur -##ently -##nostic -##ttal -##idon -1605 -403 -1716 -blogger -rents -##gnan -hires -##ikh -##dant -howie -##rons -handler -retracted -shocks -1632 -arun -duluth -kepler -trumpeter -##lary -peeking -seasoned -trooper -##mara -laszlo -##iciencies -##rti -heterosexual -##inatory -##ssion -indira -jogging -##inga -##lism -beit -dissatisfaction -malice -##ately -nedra -peeling -##rgeon -47th -stadiums -475 -vertigo -##ains -iced -restroom -##plify -##tub -illustrating -pear -##chner -##sibility -inorganic -rappers -receipts -watery -##kura -lucinda -##oulos -reintroduced -##8th -##tched -gracefully -saxons -nutritional -wastewater -rained -favourites -bedrock -fisted -hallways -likeness -upscale -##lateral -1580 -blinds -prequel -##pps -##tama -deter -humiliating -restraining -tn -vents -1659 -laundering -recess -rosary -tractors -coulter -federer -##ifiers -##plin -persistence -##quitable -geschichte -pendulum -quakers -##beam -bassett -pictorial -buffet -koln -##sitor -drills -reciprocal -shooters -##57 -##cton -##tees -converge -pip -dmitri -donnelly -yamamoto -aqua -azores -demographics -hypnotic -spitfire -suspend -wryly -roderick -##rran -sebastien -##asurable -mavericks -##fles -##200 -himalayan -prodigy -##iance -transvaal -demonstrators -handcuffs -dodged -mcnamara -sublime -1726 -crazed -##efined -##till -ivo -pondered -reconciled -shrill -sava -##duk -bal -cad -heresy -jaipur -goran -##nished -341 -lux -shelly -whitehall -##hre -israelis -peacekeeping -##wled -1703 -demetrius -ousted -##arians -##zos -beale -anwar -backstroke -raged -shrinking -cremated -##yck -benign -towing -wadi -darmstadt -landfill -parana -soothe -colleen -sidewalks -mayfair -tumble -hepatitis -ferrer -superstructure -##gingly -##urse -##wee -anthropological -translators -##mies -closeness -hooves -##pw -mondays -##roll -##vita -landscaping -##urized -purification -sock -thorns -thwarted -jalan -tiberius -##taka -saline -##rito -confidently -khyber -sculptors -##ij -brahms -hammersmith -inspectors -battista -fivb -fragmentation -hackney -##uls -arresting -exercising -antoinette -bedfordshire -##zily -dyed -##hema -1656 -racetrack -variability -##tique -1655 -austrians -deteriorating -madman -theorists -aix -lehman -weathered -1731 -decreed -eruptions -1729 -flaw -quinlan -sorbonne -flutes -nunez -1711 -adored -downwards -fable -rasped -1712 -moritz -mouthful -renegade -shivers -stunts -dysfunction -restrain -translit -327 -pancakes -##avio -##cision -##tray -351 -vial -##lden -bain -##maid -##oxide -chihuahua -malacca -vimes -##rba -##rnier -1664 -donnie -plaques -##ually -337 -bangs -floppy -huntsville -loretta -nikolay -##otte -eater -handgun -ubiquitous -##hett -eras -zodiac -1634 -##omorphic -1820s -##zog -cochran -##bula -##lithic -warring -##rada -dalai -excused -blazers -mcconnell -reeling -bot -este -##abi -geese -hoax -taxon -##bla -guitarists -##icon -condemning -hunts -inversion -moffat -taekwondo -##lvis -1624 -stammered -##rest -##rzy -sousa -fundraiser -marylebone -navigable -uptown -cabbage -daniela -salman -shitty -whimper -##kian -##utive -programmers -protections -rm -##rmi -##rued -forceful -##enes -fuss -##tao -##wash -brat -oppressive -reykjavik -spartak -ticking -##inkles -##kiewicz -adolph -horst -maui -protege -straighten -cpc -landau -concourse -clements -resultant -##ando -imaginative -joo -reactivated -##rem -##ffled -##uising -consultative -##guide -flop -kaitlyn -mergers -parenting -somber -##vron -supervise -vidhan -##imum -courtship -exemplified -harmonies -medallist -refining -##rrow -##ка -amara -##hum -780 -goalscorer -sited -overshadowed -rohan -displeasure -secretive -multiplied -osman -##orth -engravings -padre -##kali -##veda -miniatures -mis -##yala -clap -pali -rook -##cana -1692 -57th -antennae -astro -oskar -1628 -bulldog -crotch -hackett -yucatan -##sure -amplifiers -brno -ferrara -migrating -##gree -thanking -turing -##eza -mccann -ting -andersson -onslaught -gaines -ganga -incense -standardization -##mation -sentai -scuba -stuffing -turquoise -waivers -alloys -##vitt -regaining -vaults -##clops -##gizing -digger -furry -memorabilia -probing -##iad -payton -rec -deutschland -filippo -opaque -seamen -zenith -afrikaans -##filtration -disciplined -inspirational -##merie -banco -confuse -grafton -tod -##dgets -championed -simi -anomaly -biplane -##ceptive -electrode -##para -1697 -cleavage -crossbow -swirl -informant -##lars -##osta -afi -bonfire -spec -##oux -lakeside -slump -##culus -##lais -##qvist -##rrigan -1016 -facades -borg -inwardly -cervical -xl -pointedly -050 -stabilization -##odon -chests -1699 -hacked -ctv -orthogonal -suzy -##lastic -gaulle -jacobite -rearview -##cam -##erted -ashby -##drik -##igate -##mise -##zbek -affectionately -canine -disperse -latham -##istles -##ivar -spielberg -##orin -##idium -ezekiel -cid -##sg -durga -middletown -##cina -customized -frontiers -harden -##etano -##zzy -1604 -bolsheviks -##66 -coloration -yoko -##bedo -briefs -slabs -debra -liquidation -plumage -##oin -blossoms -dementia -subsidy -1611 -proctor -relational -jerseys -parochial -ter -##ici -esa -peshawar -cavalier -loren -cpi -idiots -shamrock -1646 -dutton -malabar -mustache -##endez -##ocytes -referencing -terminates -marche -yarmouth -##sop -acton -mated -seton -subtly -baptised -beige -extremes -jolted -kristina -telecast -##actic -safeguard -waldo -##baldi -##bular -endeavors -sloppy -subterranean -##ensburg -##itung -delicately -pigment -tq -##scu -1626 -##ound -collisions -coveted -herds -##personal -##meister -##nberger -chopra -##ricting -abnormalities -defective -galician -lucie -##dilly -alligator -likened -##genase -burundi -clears -complexion -derelict -deafening -diablo -fingered -champaign -dogg -enlist -isotope -labeling -mrna -##erre -brilliance -marvelous -##ayo -1652 -crawley -ether -footed -dwellers -deserts -hamish -rubs -warlock -skimmed -##lizer -870 -buick -embark -heraldic -irregularities -##ajan -kiara -##kulam -##ieg -antigen -kowalski -##lge -oakley -visitation -##mbit -vt -##suit -1570 -murderers -##miento -##rites -chimneys -##sling -condemn -custer -exchequer -havre -##ghi -fluctuations -##rations -dfb -hendricks -vaccines -##tarian -nietzsche -biking -juicy -##duced -brooding -scrolling -selangor -##ragan -352 -annum -boomed -seminole -sugarcane -##dna -departmental -dismissing -innsbruck -arteries -ashok -batavia -daze -kun -overtook -##rga -##tlan -beheaded -gaddafi -holm -electronically -faulty -galilee -fractures -kobayashi -##lized -gunmen -magma -aramaic -mala -eastenders -inference -messengers -bf -##qu -407 -bathrooms -##vere -1658 -flashbacks -ideally -misunderstood -##jali -##weather -mendez -##grounds -505 -uncanny -##iii -1709 -friendships -##nbc -sacrament -accommodated -reiterated -logistical -pebbles -thumped -##escence -administering -decrees -drafts -##flight -##cased -##tula -futuristic -picket -intimidation -winthrop -##fahan -interfered -339 -afar -francoise -morally -uta -cochin -croft -dwarfs -##bruck -##dents -##nami -biker -##hner -##meral -nano -##isen -##ometric -##pres -##ан -brightened -meek -parcels -securely -gunners -##jhl -##zko -agile -hysteria -##lten -##rcus -bukit -champs -chevy -cuckoo -leith -sadler -theologians -welded -##section -1663 -jj -plurality -xander -##rooms -##formed -shredded -temps -intimately -pau -tormented -##lok -##stellar -1618 -charred -ems -essen -##mmel -alarms -spraying -ascot -blooms -twinkle -##abia -##apes -internment -obsidian -##chaft -snoop -##dav -##ooping -malibu -##tension -quiver -##itia -hays -mcintosh -travers -walsall -##ffie -1623 -beverley -schwarz -plunging -structurally -m3 -rosenthal -vikram -##tsk -770 -ghz -##onda -##tiv -chalmers -groningen -pew -reckon -unicef -##rvis -55th -##gni -1651 -sulawesi -avila -cai -metaphysical -screwing -turbulence -##mberg -augusto -samba -56th -baffled -momentary -toxin -##urian -##wani -aachen -condoms -dali -steppe -##3d -##app -##oed -##year -adolescence -dauphin -electrically -inaccessible -microscopy -nikita -##ega -atv -##cel -##enter -##oles -##oteric -##ы -accountants -punishments -wrongly -bribes -adventurous -clinch -flinders -southland -##hem -##kata -gough -##ciency -lads -soared -##ה -undergoes -deformation -outlawed -rubbish -##arus -##mussen -##nidae -##rzburg -arcs -##ingdon -##tituted -1695 -wheelbase -wheeling -bombardier -campground -zebra -##lices -##oj -##bain -lullaby -##ecure -donetsk -wylie -grenada -##arding -##ης -squinting -eireann -opposes -##andra -maximal -runes -##broken -##cuting -##iface -##ror -##rosis -additive -britney -adultery -triggering -##drome -detrimental -aarhus -containment -jc -swapped -vichy -##ioms -madly -##oric -##rag -brant -##ckey -##trix -1560 -1612 -broughton -rustling -##stems -##uder -asbestos -mentoring -##nivorous -finley -leaps -##isan -apical -pry -slits -substitutes -##dict -intuitive -fantasia -insistent -unreasonable -##igen -##vna -domed -hannover -margot -ponder -##zziness -impromptu -jian -lc -rampage -stemming -##eft -andrey -gerais -whichever -amnesia -appropriated -anzac -clicks -modifying -ultimatum -cambrian -maids -verve -yellowstone -##mbs -conservatoire -##scribe -adherence -dinners -spectra -imperfect -mysteriously -sidekick -tatar -tuba -##aks -##ifolia -distrust -##athan -##zle -c2 -ronin -zac -##pse -celaena -instrumentalist -scents -skopje -##mbling -comical -compensated -vidal -condor -intersect -jingle -wavelengths -##urrent -mcqueen -##izzly -carp -weasel -422 -kanye -militias -postdoctoral -eugen -gunslinger -##ɛ -faux -hospice -##for -appalled -derivation -dwarves -##elis -dilapidated -##folk -astoria -philology -##lwyn -##otho -##saka -inducing -philanthropy -##bf -##itative -geek -markedly -sql -##yce -bessie -indices -rn -##flict -495 -frowns -resolving -weightlifting -tugs -cleric -contentious -1653 -mania -rms -##miya -##reate -##ruck -##tucket -bien -eels -marek -##ayton -##cence -discreet -unofficially -##ife -leaks -##bber -1705 -332 -dung -compressor -hillsborough -pandit -shillings -distal -##skin -381 -##tat -##you -nosed -##nir -mangrove -undeveloped -##idia -textures -##inho -##500 -##rise -ae -irritating -nay -amazingly -bancroft -apologetic -compassionate -kata -symphonies -##lovic -airspace -##lch -930 -gifford -precautions -fulfillment -sevilla -vulgar -martinique -##urities -looting -piccolo -tidy -##dermott -quadrant -armchair -incomes -mathematicians -stampede -nilsson -##inking -##scan -foo -quarterfinal -##ostal -shang -shouldered -squirrels -##owe -344 -vinegar -##bner -##rchy -##systems -delaying -##trics -ars -dwyer -rhapsody -sponsoring -##gration -bipolar -cinder -starters -##olio -##urst -421 -signage -##nty -aground -figurative -mons -acquaintances -duets -erroneously -soyuz -elliptic -recreated -##cultural -##quette -##ssed -##tma -##zcz -moderator -scares -##itaire -##stones -##udence -juniper -sighting -##just -##nsen -britten -calabria -ry -bop -cramer -forsyth -stillness -##л -airmen -gathers -unfit -##umber -##upt -taunting -##rip -seeker -streamlined -##bution -holster -schumann -tread -vox -##gano -##onzo -strive -dil -reforming -covent -newbury -predicting -##orro -decorate -tre -##puted -andover -ie -asahi -dept -dunkirk -gills -##tori -buren -huskies -##stis -##stov -abstracts -bets -loosen -##opa -1682 -yearning -##glio -##sir -berman -effortlessly -enamel -napoli -persist -##peration -##uez -attache -elisa -b1 -invitations -##kic -accelerating -reindeer -boardwalk -clutches -nelly -polka -starbucks -##kei -adamant -huey -lough -unbroken -adventurer -embroidery -inspecting -stanza -##ducted -naia -taluka -##pone -##roids -chases -deprivation -florian -##jing -##ppet -earthly -##lib -##ssee -colossal -foreigner -vet -freaks -patrice -rosewood -triassic -upstate -##pkins -dominates -ata -chants -ks -vo -##400 -##bley -##raya -##rmed -555 -agra -infiltrate -##ailing -##ilation -##tzer -##uppe -##werk -binoculars -enthusiast -fujian -squeak -##avs -abolitionist -almeida -boredom -hampstead -marsden -rations -##ands -inflated -334 -bonuses -rosalie -patna -##rco -329 -detachments -penitentiary -54th -flourishing -woolf -##dion -##etched -papyrus -##lster -##nsor -##toy -bobbed -dismounted -endelle -inhuman -motorola -tbs -wince -wreath -##ticus -hideout -inspections -sanjay -disgrace -infused -pudding -stalks -##urbed -arsenic -leases -##hyl -##rrard -collarbone -##waite -##wil -dowry -##bant -##edance -genealogical -nitrate -salamanca -scandals -thyroid -necessitated -##! -##" -### -##$ -##% -##& -##' -##( -##) -##* -##+ -##, -##- -##. -##/ -##: -##; -##< -##= -##> -##? -##@ -##[ -##\ -##] -##^ -##_ -##` -##{ -##| -##} -##~ -##¡ -##¢ -##£ -##¤ -##¥ -##¦ -##§ -##¨ -##© -##ª -##« -##¬ -##® -##± -##´ -##µ -##¶ -##· -##º -##» -##¼ -##¾ -##¿ -##æ -##ð -##÷ -##þ -##đ -##ħ -##ŋ -##œ -##ƒ -##ɐ -##ɑ -##ɒ -##ɔ -##ɕ -##ə -##ɡ -##ɣ -##ɨ -##ɪ -##ɫ -##ɬ -##ɯ -##ɲ -##ɴ -##ɹ -##ɾ -##ʀ -##ʁ -##ʂ -##ʃ -##ʉ -##ʊ -##ʋ -##ʌ -##ʎ -##ʐ -##ʑ -##ʒ -##ʔ -##ʰ -##ʲ -##ʳ -##ʷ -##ʸ -##ʻ -##ʼ -##ʾ -##ʿ -##ˈ -##ˡ -##ˢ -##ˣ -##ˤ -##β -##γ -##δ -##ε -##ζ -##θ -##κ -##λ -##μ -##ξ -##ο -##π -##ρ -##σ -##τ -##υ -##φ -##χ -##ψ -##ω -##б -##г -##д -##ж -##з -##м -##п -##с -##у -##ф -##х -##ц -##ч -##ш -##щ -##ъ -##э -##ю -##ђ -##є -##і -##ј -##љ -##њ -##ћ -##ӏ -##ա -##բ -##գ -##դ -##ե -##թ -##ի -##լ -##կ -##հ -##մ -##յ -##ն -##ո -##պ -##ս -##վ -##տ -##ր -##ւ -##ք -##־ -##א -##ב -##ג -##ד -##ו -##ז -##ח -##ט -##י -##ך -##כ -##ל -##ם -##מ -##ן -##נ -##ס -##ע -##ף -##פ -##ץ -##צ -##ק -##ר -##ש -##ת -##، -##ء -##ب -##ت -##ث -##ج -##ح -##خ -##ذ -##ز -##س -##ش -##ص -##ض -##ط -##ظ -##ع -##غ -##ـ -##ف -##ق -##ك -##و -##ى -##ٹ -##پ -##چ -##ک -##گ -##ں -##ھ -##ہ -##ے -##अ -##आ -##उ -##ए -##क -##ख -##ग -##च -##ज -##ट -##ड -##ण -##त -##थ -##द -##ध -##न -##प -##ब -##भ -##म -##य -##र -##ल -##व -##श -##ष -##स -##ह -##ा -##ि -##ी -##ो -##। -##॥ -##ং -##অ -##আ -##ই -##উ -##এ -##ও -##ক -##খ -##গ -##চ -##ছ -##জ -##ট -##ড -##ণ -##ত -##থ -##দ -##ধ -##ন -##প -##ব -##ভ -##ম -##য -##র -##ল -##শ -##ষ -##স -##হ -##া -##ি -##ী -##ে -##க -##ச -##ட -##த -##ந -##ன -##ப -##ம -##ய -##ர -##ல -##ள -##வ -##ா -##ி -##ு -##ே -##ை -##ನ -##ರ -##ಾ -##ක -##ය -##ර -##ල -##ව -##ා -##ก -##ง -##ต -##ท -##น -##พ -##ม -##ย -##ร -##ล -##ว -##ส -##อ -##า -##เ -##་ -##། -##ག -##ང -##ད -##ན -##པ -##བ -##མ -##འ -##ར -##ལ -##ས -##မ -##ა -##ბ -##გ -##დ -##ე -##ვ -##თ -##ი -##კ -##ლ -##მ -##ნ -##ო -##რ -##ს -##ტ -##უ -##ᄀ -##ᄂ -##ᄃ -##ᄅ -##ᄆ -##ᄇ -##ᄉ -##ᄊ -##ᄋ -##ᄌ -##ᄎ -##ᄏ -##ᄐ -##ᄑ -##ᄒ -##ᅡ -##ᅢ -##ᅥ -##ᅦ -##ᅧ -##ᅩ -##ᅪ -##ᅭ -##ᅮ -##ᅯ -##ᅲ -##ᅳ -##ᅴ -##ᅵ -##ᆨ -##ᆫ -##ᆯ -##ᆷ -##ᆸ -##ᆼ -##ᴬ -##ᴮ -##ᴰ -##ᴵ -##ᴺ -##ᵀ -##ᵃ -##ᵇ -##ᵈ -##ᵉ -##ᵍ -##ᵏ -##ᵐ -##ᵒ -##ᵖ -##ᵗ -##ᵘ -##ᵣ -##ᵤ -##ᵥ -##ᶜ -##ᶠ -##‐ -##‑ -##‒ -##– -##— -##― -##‖ -##‘ -##’ -##‚ -##“ -##” -##„ -##† -##‡ -##• -##… -##‰ -##′ -##″ -##› -##‿ -##⁄ -##⁰ -##ⁱ -##⁴ -##⁵ -##⁶ -##⁷ -##⁸ -##⁹ -##⁻ -##ⁿ -##₅ -##₆ -##₇ -##₈ -##₉ -##₊ -##₍ -##₎ -##ₐ -##ₑ -##ₒ -##ₓ -##ₕ -##ₖ -##ₗ -##ₘ -##ₚ -##ₛ -##ₜ -##₤ -##₩ -##€ -##₱ -##₹ -##ℓ -##№ -##ℝ -##™ -##⅓ -##⅔ -##← -##↑ -##→ -##↓ -##↔ -##↦ -##⇄ -##⇌ -##⇒ -##∂ -##∅ -##∆ -##∇ -##∈ -##∗ -##∘ -##√ -##∞ -##∧ -##∨ -##∩ -##∪ -##≈ -##≡ -##≤ -##≥ -##⊂ -##⊆ -##⊕ -##⊗ -##⋅ -##─ -##│ -##■ -##▪ -##● -##★ -##☆ -##☉ -##♠ -##♣ -##♥ -##♦ -##♯ -##⟨ -##⟩ -##ⱼ -##⺩ -##⺼ -##⽥ -##、 -##。 -##〈 -##〉 -##《 -##》 -##「 -##」 -##『 -##』 -##〜 -##あ -##い -##う -##え -##お -##か -##き -##く -##け -##こ -##さ -##し -##す -##せ -##そ -##た -##ち -##っ -##つ -##て -##と -##な -##に -##ぬ -##ね -##の -##は -##ひ -##ふ -##へ -##ほ -##ま -##み -##む -##め -##も -##や -##ゆ -##よ -##ら -##り -##る -##れ -##ろ -##を -##ん -##ァ -##ア -##ィ -##イ -##ウ -##ェ -##エ -##オ -##カ -##キ -##ク -##ケ -##コ -##サ -##シ -##ス -##セ -##タ -##チ -##ッ -##ツ -##テ -##ト -##ナ -##ニ -##ノ -##ハ -##ヒ -##フ -##ヘ -##ホ -##マ -##ミ -##ム -##メ -##モ -##ャ -##ュ -##ョ -##ラ -##リ -##ル -##レ -##ロ -##ワ -##ン -##・ -##ー -##一 -##三 -##上 -##下 -##不 -##世 -##中 -##主 -##久 -##之 -##也 -##事 -##二 -##五 -##井 -##京 -##人 -##亻 -##仁 -##介 -##代 -##仮 -##伊 -##会 -##佐 -##侍 -##保 -##信 -##健 -##元 -##光 -##八 -##公 -##内 -##出 -##分 -##前 -##劉 -##力 -##加 -##勝 -##北 -##区 -##十 -##千 -##南 -##博 -##原 -##口 -##古 -##史 -##司 -##合 -##吉 -##同 -##名 -##和 -##囗 -##四 -##国 -##國 -##土 -##地 -##坂 -##城 -##堂 -##場 -##士 -##夏 -##外 -##大 -##天 -##太 -##夫 -##奈 -##女 -##子 -##学 -##宀 -##宇 -##安 -##宗 -##定 -##宣 -##宮 -##家 -##宿 -##寺 -##將 -##小 -##尚 -##山 -##岡 -##島 -##崎 -##川 -##州 -##巿 -##帝 -##平 -##年 -##幸 -##广 -##弘 -##張 -##彳 -##後 -##御 -##德 -##心 -##忄 -##志 -##忠 -##愛 -##成 -##我 -##戦 -##戸 -##手 -##扌 -##政 -##文 -##新 -##方 -##日 -##明 -##星 -##春 -##昭 -##智 -##曲 -##書 -##月 -##有 -##朝 -##木 -##本 -##李 -##村 -##東 -##松 -##林 -##森 -##楊 -##樹 -##橋 -##歌 -##止 -##正 -##武 -##比 -##氏 -##民 -##水 -##氵 -##氷 -##永 -##江 -##沢 -##河 -##治 -##法 -##海 -##清 -##漢 -##瀬 -##火 -##版 -##犬 -##王 -##生 -##田 -##男 -##疒 -##発 -##白 -##的 -##皇 -##目 -##相 -##省 -##真 -##石 -##示 -##社 -##神 -##福 -##禾 -##秀 -##秋 -##空 -##立 -##章 -##竹 -##糹 -##美 -##義 -##耳 -##良 -##艹 -##花 -##英 -##華 -##葉 -##藤 -##行 -##街 -##西 -##見 -##訁 -##語 -##谷 -##貝 -##貴 -##車 -##軍 -##辶 -##道 -##郎 -##郡 -##部 -##都 -##里 -##野 -##金 -##鈴 -##镇 -##長 -##門 -##間 -##阝 -##阿 -##陳 -##陽 -##雄 -##青 -##面 -##風 -##食 -##香 -##馬 -##高 -##龍 -##龸 -##fi -##fl -##! -##( -##) -##, -##- -##. -##/ -##: -##? -##~ From 837cd6f890a518ac5bd3355900a0d7f479fe5328 Mon Sep 17 00:00:00 2001 From: ChenXin Date: Tue, 14 May 2019 23:15:55 +0800 Subject: [PATCH 18/19] =?UTF-8?q?cache=5Fresults=20=E5=8F=82=E6=95=B0?= =?UTF-8?q?=E7=9A=84=E4=BF=AE=E6=94=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/core/test_utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/core/test_utils.py b/test/core/test_utils.py index b846a32d..6d70d97c 100644 --- a/test/core/test_utils.py +++ b/test/core/test_utils.py @@ -140,7 +140,7 @@ class TestCache(unittest.TestCase): try: start_time = time.time() embed, vocab, d = process_data_1('test/data_for_tests/word2vec_test.txt', 'test/data_for_tests/cws_train', - cache_filepath='test/demo_overwrite.pkl') + _cache_fp='test/demo_overwrite.pkl') end_time = time.time() pre_time = end_time - start_time with open('test/demo_overwrite.pkl', 'rb') as f: @@ -150,7 +150,7 @@ class TestCache(unittest.TestCase): self.assertListEqual(embed[i].tolist(), _embed[i].tolist()) start_time = time.time() embed, vocab, d = process_data_1('test/data_for_tests/word2vec_test.txt', 'test/data_for_tests/cws_train', - cache_filepath='test/demo_overwrite.pkl') + _cache_fp='test/demo_overwrite.pkl') end_time = time.time() read_time = end_time - start_time print("Read using {:.3f}, while prepare using:{:.3f}".format(read_time, pre_time)) @@ -162,7 +162,7 @@ class TestCache(unittest.TestCase): try: start_time = time.time() embed, vocab, d = process_data_1('test/data_for_tests/word2vec_test.txt', 'test/data_for_tests/cws_train', - refresh=True) + _refresh=True) end_time = time.time() pre_time = end_time - start_time with open('test/demo1.pkl', 'rb') as f: @@ -172,7 +172,7 @@ class TestCache(unittest.TestCase): self.assertListEqual(embed[i].tolist(), _embed[i].tolist()) start_time = time.time() embed, vocab, d = process_data_1('test/data_for_tests/word2vec_test.txt', 'test/data_for_tests/cws_train', - refresh=True) + _refresh=True) end_time = time.time() read_time = end_time - start_time print("Read using {:.3f}, while prepare using:{:.3f}".format(read_time, pre_time)) From 51b493d7165cdb2045c2c8198217d23afb75d5eb Mon Sep 17 00:00:00 2001 From: ChenXin Date: Tue, 14 May 2019 23:21:21 +0800 Subject: [PATCH 19/19] =?UTF-8?q?=E4=BF=AE=E6=94=B9=20io=20=E7=9A=84?= =?UTF-8?q?=E6=B5=8B=E8=AF=95=E6=96=87=E4=BB=B6;=20=E5=88=A0=E9=99=A4?= =?UTF-8?q?=E4=BA=86=E4=B8=80=E4=BA=9B=E8=BF=87=E6=97=B6=E7=9A=84=E6=B5=8B?= =?UTF-8?q?=E8=AF=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/io/config | 62 ---------------- test/io/test_config_saver.py | 112 ----------------------------- test/io/test_dataset_loader.py | 10 +-- test/io/test_embed_loader.py | 17 +++-- test/modules/test_other_modules.py | 46 ------------ 5 files changed, 13 insertions(+), 234 deletions(-) delete mode 100644 test/io/config delete mode 100644 test/io/test_config_saver.py diff --git a/test/io/config b/test/io/config deleted file mode 100644 index 5ff9eacf..00000000 --- a/test/io/config +++ /dev/null @@ -1,62 +0,0 @@ -[test] -x = 1 - -y = 2 - -z = 3 -#this is an example -input = [1,2,3] - -text = "this is text" - -doubles = 0.8 - -tt = 0.5 - -test = 105 - -str = "this is a str" - -double = 0.5 - - -[t] -x = "this is an test section" - - - -[test-case-2] -double = 0.5 - -doubles = 0.8 - -tt = 0.5 - -test = 105 - -str = "this is a str" - -[another-test] -doubles = 0.8 - -tt = 0.5 - -test = 105 - -str = "this is a str" - -double = 0.5 - - -[one-another-test] -doubles = 0.8 - -tt = 0.5 - -test = 105 - -str = "this is a str" - -double = 0.5 - - diff --git a/test/io/test_config_saver.py b/test/io/test_config_saver.py deleted file mode 100644 index e5341d63..00000000 --- a/test/io/test_config_saver.py +++ /dev/null @@ -1,112 +0,0 @@ -import os -import unittest - -# from fastNLP.io import ConfigSection, ConfigLoader, ConfigSaver - - -class TestConfigSaver(unittest.TestCase): - def test_case_1(self): - config_file_dir = "." - config_file_name = "config" - config_file_path = os.path.join(config_file_dir, config_file_name) - - tmp_config_file_path = os.path.join(config_file_dir, "tmp_config") - - with open(config_file_path, "r") as f: - lines = f.readlines() - - standard_section = ConfigSection() - t_section = ConfigSection() - ConfigLoader().load_config(config_file_path, {"test": standard_section, "t": t_section}) - - config_saver = ConfigSaver(config_file_path) - - section = ConfigSection() - section["doubles"] = 0.8 - section["tt"] = 0.5 - section["test"] = 105 - section["str"] = "this is a str" - - test_case_2_section = section - test_case_2_section["double"] = 0.5 - - for k in section.__dict__.keys(): - standard_section[k] = section[k] - - config_saver.save_config_file("test", section) - config_saver.save_config_file("another-test", section) - config_saver.save_config_file("one-another-test", section) - config_saver.save_config_file("test-case-2", section) - - test_section = ConfigSection() - at_section = ConfigSection() - another_test_section = ConfigSection() - one_another_test_section = ConfigSection() - a_test_case_2_section = ConfigSection() - - ConfigLoader().load_config(config_file_path, {"test": test_section, - "another-test": another_test_section, - "t": at_section, - "one-another-test": one_another_test_section, - "test-case-2": a_test_case_2_section}) - - assert test_section == standard_section - assert at_section == t_section - assert another_test_section == section - assert one_another_test_section == section - assert a_test_case_2_section == test_case_2_section - - config_saver.save_config_file("test", section) - - with open(config_file_path, "w") as f: - f.writelines(lines) - - with open(tmp_config_file_path, "w") as f: - f.write('[test]\n') - f.write('this is an fault example\n') - - tmp_config_saver = ConfigSaver(tmp_config_file_path) - try: - tmp_config_saver._read_section() - except Exception as e: - pass - os.remove(tmp_config_file_path) - - try: - tmp_config_saver = ConfigSaver("file-NOT-exist") - except Exception as e: - pass - - def test_case_2(self): - config = "[section_A]\n[section_B]\n" - - with open("./test.cfg", "w", encoding="utf-8") as f: - f.write(config) - saver = ConfigSaver("./test.cfg") - - section = ConfigSection() - section["doubles"] = 0.8 - section["tt"] = [1, 2, 3] - section["test"] = 105 - section["str"] = "this is a str" - - saver.save_config_file("section_A", section) - - os.system("rm ./test.cfg") - - def test_case_3(self): - config = "[section_A]\ndoubles = 0.9\ntt = [1, 2, 3]\n[section_B]\n" - - with open("./test.cfg", "w", encoding="utf-8") as f: - f.write(config) - saver = ConfigSaver("./test.cfg") - - section = ConfigSection() - section["doubles"] = 0.8 - section["tt"] = [1, 2, 3] - section["test"] = 105 - section["str"] = "this is a str" - - saver.save_config_file("section_A", section) - - os.system("rm ./test.cfg") diff --git a/test/io/test_dataset_loader.py b/test/io/test_dataset_loader.py index 3c9d7c07..12d352b1 100644 --- a/test/io/test_dataset_loader.py +++ b/test/io/test_dataset_loader.py @@ -9,22 +9,22 @@ class TestDatasetLoader(unittest.TestCase): """ Test the the loader of Conll2003 dataset """ - dataset_path = "../data_for_tests/conll_2003_example.txt" + dataset_path = "test/data_for_tests/conll_2003_example.txt" loader = Conll2003Loader() dataset_2003 = loader.load(dataset_path) def test_PeopleDailyCorpusLoader(self): - data_set = PeopleDailyCorpusLoader().load("../data_for_tests/people_daily_raw.txt") + data_set = PeopleDailyCorpusLoader().load("test/data_for_tests/people_daily_raw.txt") def test_CSVLoader(self): ds = CSVLoader(sep='\t', headers=['words', 'label']) \ - .load('../data_for_tests/tutorial_sample_dataset.csv') + .load('test/data_for_tests/tutorial_sample_dataset.csv') assert len(ds) > 0 def test_SNLILoader(self): - ds = SNLILoader().load('../data_for_tests/sample_snli.jsonl') + ds = SNLILoader().load('test/data_for_tests/sample_snli.jsonl') assert len(ds) == 3 def test_JsonLoader(self): - ds = JsonLoader().load('../data_for_tests/sample_snli.jsonl') + ds = JsonLoader().load('test/data_for_tests/sample_snli.jsonl') assert len(ds) == 3 diff --git a/test/io/test_embed_loader.py b/test/io/test_embed_loader.py index d43a00fe..ff8ecfcf 100644 --- a/test/io/test_embed_loader.py +++ b/test/io/test_embed_loader.py @@ -3,15 +3,13 @@ import numpy as np from fastNLP import Vocabulary from fastNLP.io import EmbedLoader -import os -from fastNLP.io.dataset_loader import SSTLoader -from fastNLP.core.const import Const as C + class TestEmbedLoader(unittest.TestCase): def test_load_with_vocab(self): vocab = Vocabulary() - glove = "../data_for_tests/glove.6B.50d_test.txt" - word2vec = "../data_for_tests/word2vec_test.txt" + glove = "test/data_for_tests/glove.6B.50d_test.txt" + word2vec = "test/data_for_tests/word2vec_test.txt" vocab.add_word('the') vocab.add_word('none') g_m = EmbedLoader.load_with_vocab(glove, vocab) @@ -19,11 +17,11 @@ class TestEmbedLoader(unittest.TestCase): w_m = EmbedLoader.load_with_vocab(word2vec, vocab, normalize=True) self.assertEqual(w_m.shape, (4, 50)) self.assertAlmostEqual(np.linalg.norm(w_m, axis=1).sum(), 4) - + def test_load_without_vocab(self): words = ['the', 'of', 'in', 'a', 'to', 'and'] - glove = "../data_for_tests/glove.6B.50d_test.txt" - word2vec = "../data_for_tests/word2vec_test.txt" + glove = "test/data_for_tests/glove.6B.50d_test.txt" + word2vec = "test/data_for_tests/word2vec_test.txt" g_m, vocab = EmbedLoader.load_without_vocab(glove) self.assertEqual(g_m.shape, (8, 50)) for word in words: @@ -39,9 +37,10 @@ class TestEmbedLoader(unittest.TestCase): self.assertAlmostEqual(np.linalg.norm(w_m, axis=1).sum(), 7) for word in words: self.assertIn(word, vocab) - + def test_read_all_glove(self): pass + # TODO # 这是可以运行的,但是总数少于行数,应该是由于glove有重复的word # path = '/where/to/read/full/glove' # init_embed, vocab = EmbedLoader.load_without_vocab(path, error='strict') diff --git a/test/modules/test_other_modules.py b/test/modules/test_other_modules.py index ef5020c1..c5462623 100644 --- a/test/modules/test_other_modules.py +++ b/test/modules/test_other_modules.py @@ -2,55 +2,9 @@ import unittest import torch -# from fastNLP.modules.other_modules import GroupNorm, LayerNormalization, BiLinear, BiAffine from fastNLP.modules.encoder.star_transformer import StarTransformer -class TestGroupNorm(unittest.TestCase): - def test_case_1(self): - gn = GroupNorm(num_features=1, num_groups=10, eps=1.5e-5) - x = torch.randn((20, 50, 10)) - y = gn(x) - - -class TestLayerNormalization(unittest.TestCase): - def test_case_1(self): - ln = LayerNormalization(layer_size=5, eps=2e-3) - x = torch.randn((20, 50, 5)) - y = ln(x) - - -class TestBiLinear(unittest.TestCase): - def test_case_1(self): - bl = BiLinear(n_left=5, n_right=5, n_out=10, bias=True) - x_left = torch.randn((7, 10, 20, 5)) - x_right = torch.randn((7, 10, 20, 5)) - y = bl(x_left, x_right) - print(bl) - bl2 = BiLinear(n_left=15, n_right=15, n_out=10, bias=True) - - -class TestBiAffine(unittest.TestCase): - def test_case_1(self): - batch_size = 16 - encoder_length = 21 - decoder_length = 32 - layer = BiAffine(10, 10, 25, biaffine=True) - decoder_input = torch.randn((batch_size, encoder_length, 10)) - encoder_input = torch.randn((batch_size, decoder_length, 10)) - y = layer(decoder_input, encoder_input) - self.assertEqual(tuple(y.shape), (batch_size, 25, encoder_length, decoder_length)) - - def test_case_2(self): - batch_size = 16 - encoder_length = 21 - decoder_length = 32 - layer = BiAffine(10, 10, 25, biaffine=False) - decoder_input = torch.randn((batch_size, encoder_length, 10)) - encoder_input = torch.randn((batch_size, decoder_length, 10)) - y = layer(decoder_input, encoder_input) - self.assertEqual(tuple(y.shape), (batch_size, 25, encoder_length, 1)) - class TestStarTransformer(unittest.TestCase): def test_1(self): model = StarTransformer(num_layers=6, hidden_size=100, num_head=8, head_dim=20, max_len=100)