diff --git a/docs/source/user/task1.rst b/docs/source/user/task1.rst new file mode 100644 index 00000000..0c346999 --- /dev/null +++ b/docs/source/user/task1.rst @@ -0,0 +1,3 @@ +===================== +用 fastNLP 分类 +===================== \ No newline at end of file diff --git a/tutorials/fastnlp_10min_tutorial.ipynb b/tutorials/fastnlp_10min_tutorial.ipynb index 534c4e49..526fd49f 100644 --- a/tutorials/fastnlp_10min_tutorial.ipynb +++ b/tutorials/fastnlp_10min_tutorial.ipynb @@ -41,7 +41,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 1, "metadata": {}, "outputs": [ { @@ -63,7 +63,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 2, "metadata": {}, "outputs": [ { @@ -97,7 +97,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -107,7 +107,7 @@ "'label': 0 type=str}" ] }, - "execution_count": 8, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -128,7 +128,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -148,7 +148,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -168,7 +168,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -191,7 +191,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -221,7 +221,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -249,7 +249,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -263,7 +263,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 10, "metadata": {}, "outputs": [ { @@ -295,17 +295,17 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "{'raw_sentence': a welcome relief from baseball movies that try too hard to be mythic , this one is a sweet and modest and ultimately winning story . type=str,\n", - "'label': 3 type=int,\n", - "'words': [4, 1, 1, 18, 1, 1, 13, 1, 1, 1, 8, 26, 1, 5, 35, 1, 11, 4, 1, 10, 1, 10, 1, 1, 1, 2] type=list,\n", - "'seq_len': 26 type=int}\n" + "{'raw_sentence': the performances are an absolute joy . type=str,\n", + "'label': 4 type=int,\n", + "'words': [3, 1, 1, 26, 1, 1, 2] type=list,\n", + "'seq_len': 7 type=int}\n" ] } ], @@ -327,9 +327,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "batch_x has: {'words': tensor([[ 15, 72, 15, 73, 74, 7, 3, 75, 6, 3, 16, 16,\n", + " 76, 2],\n", + " [ 15, 72, 15, 73, 74, 7, 3, 75, 6, 3, 16, 16,\n", + " 76, 2]])}\n", + "batch_y has: {'label': tensor([ 1, 1])}\n" + ] + } + ], "source": [ "# 如果你们需要做强化学习或者GAN之类的项目,你们也可以使用这些数据预处理的工具\n", "from fastNLP.core.batch import Batch\n", @@ -352,7 +364,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 15, "metadata": {}, "outputs": [ { @@ -360,7 +372,7 @@ "text/plain": [ "CNNText(\n", " (embed): Embedding(\n", - " (embed): Embedding(59, 50, padding_idx=0)\n", + " 77, 50\n", " (dropout): Dropout(p=0.0)\n", " )\n", " (conv_pool): ConvMaxpool(\n", @@ -377,14 +389,14 @@ ")" ] }, - "execution_count": 17, + "execution_count": 15, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from fastNLP.models import CNNText\n", - "model = CNNText(embed_num=len(vocab), embed_dim=50, num_classes=5, padding=2, dropout=0.1)\n", + "model = CNNText((len(vocab), 50), num_classes=5, padding=2, dropout=0.1)\n", "model" ] }, @@ -448,7 +460,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -485,7 +497,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -508,7 +520,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ @@ -517,7 +529,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 19, "metadata": {}, "outputs": [ { @@ -525,48 +537,25 @@ "output_type": "stream", "text": [ "input fields after batch(if batch size is 2):\n", - "\tword_seq: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 26]) \n", + "\tword_seq: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 11]) \n", "target fields after batch(if batch size is 2):\n", "\tlabel_seq: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2019-01-12 17-07-51\n" + "\n" ] }, { - "data": { - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=10), HTML(value='')), layout=Layout(display='…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluation at Epoch 1/5. Step:2/10. AccuracyMetric: acc=0.425926\n", - "Evaluation at Epoch 2/5. Step:4/10. AccuracyMetric: acc=0.425926\n", - "Evaluation at Epoch 3/5. Step:6/10. AccuracyMetric: acc=0.611111\n", - "Evaluation at Epoch 4/5. Step:8/10. AccuracyMetric: acc=0.648148\n", - "Evaluation at Epoch 5/5. Step:10/10. AccuracyMetric: acc=0.703704\n", - "\n", - "In Epoch:5/Step:10, got best dev performance:AccuracyMetric: acc=0.703704\n", - "Reloaded the best model.\n" + "ename": "NameError", + "evalue": "\nProblems occurred when calling CNNText.forward(self, words, seq_len=None)\n\tmissing param: ['words']\n\tunused field: ['word_seq']\n\tSuggestion: You need to provide ['words'] in DataSet and set it as input. ", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0msave_path\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 9\u001b[0;31m n_epochs=5)\n\u001b[0m\u001b[1;32m 10\u001b[0m \u001b[0moverfit_trainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, train_data, model, optimizer, loss, batch_size, sampler, update_every, n_epochs, print_every, dev_data, metrics, metric_key, validate_every, save_path, prefetch, use_tqdm, device, callbacks, check_code_level)\u001b[0m\n\u001b[1;32m 447\u001b[0m _check_code(dataset=train_data, model=model, losser=losser, metrics=metrics, dev_data=dev_data,\n\u001b[1;32m 448\u001b[0m \u001b[0mmetric_key\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmetric_key\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcheck_level\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcheck_code_level\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 449\u001b[0;31m batch_size=min(batch_size, DEFAULT_CHECK_BATCH_SIZE))\n\u001b[0m\u001b[1;32m 450\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 451\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain_data\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m_check_code\u001b[0;34m(dataset, model, losser, metrics, batch_size, dev_data, metric_key, check_level)\u001b[0m\n\u001b[1;32m 808\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minfo_str\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 809\u001b[0m _check_forward_error(forward_func=model.forward, dataset=dataset,\n\u001b[0;32m--> 810\u001b[0;31m batch_x=batch_x, check_level=check_level)\n\u001b[0m\u001b[1;32m 811\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 812\u001b[0m \u001b[0mrefined_batch_x\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_build_args\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mbatch_x\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/utils.py\u001b[0m in \u001b[0;36m_check_forward_error\u001b[0;34m(forward_func, batch_x, dataset, check_level)\u001b[0m\n\u001b[1;32m 594\u001b[0m \u001b[0msugg_str\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0msuggestions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 595\u001b[0m \u001b[0merr_str\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'\\n'\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m'\\n'\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0merrs\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m'\\n\\tSuggestion: '\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0msugg_str\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 596\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mNameError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0merr_str\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 597\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0m_unused\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 598\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcheck_level\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0mWARNING_CHECK_LEVEL\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: \nProblems occurred when calling CNNText.forward(self, words, seq_len=None)\n\tmissing param: ['words']\n\tunused field: ['word_seq']\n\tSuggestion: You need to provide ['words'] in DataSet and set it as input. " ] - }, - { - "data": { - "text/plain": [ - "{'best_eval': {'AccuracyMetric': {'acc': 0.703704}},\n", - " 'best_epoch': 5,\n", - " 'best_step': 10,\n", - " 'seconds': 0.62}" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" } ], "source": [ diff --git a/tutorials/fastnlp_1min_tutorial.ipynb b/tutorials/fastnlp_1min_tutorial.ipynb index 7a35d992..64d57bc4 100644 --- a/tutorials/fastnlp_1min_tutorial.ipynb +++ b/tutorials/fastnlp_1min_tutorial.ipynb @@ -21,22 +21,10 @@ "cell_type": "code", "execution_count": 1, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "c:\\users\\zyfeng\\miniconda3\\envs\\fastnlp\\lib\\site-packages\\tqdm\\autonotebook\\__init__.py:14: TqdmExperimentalWarning: Using `tqdm.autonotebook.tqdm` in notebook mode. Use `tqdm.tqdm` instead to force console mode (e.g. in jupyter console)\n", - " \" (e.g. in jupyter console)\", TqdmExperimentalWarning)\n" - ] - } - ], + "outputs": [], "source": [ - "import sys\n", - "sys.path.append(\"../\")\n", - "\n", "from fastNLP import DataSet\n", - "\n", + " \n", "data_path = \"./sample_data/tutorial_sample_dataset.csv\"\n", "ds = DataSet.read_csv(data_path, headers=('raw_sentence', 'label'), sep='\\t')" ] @@ -77,7 +65,1370 @@ "cell_type": "code", "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[['a',\n", + " 'series',\n", + " 'of',\n", + " 'escapades',\n", + " 'demonstrating',\n", + " 'the',\n", + " 'adage',\n", + " 'that',\n", + " 'what',\n", + " 'is',\n", + " 'good',\n", + " 'for',\n", + " 'the',\n", + " 'goose',\n", + " 'is',\n", + " 'also',\n", + " 'good',\n", + " 'for',\n", + " 'the',\n", + " 'gander',\n", + " ',',\n", + " 'some',\n", + " 'of',\n", + " 'which',\n", + " 'occasionally',\n", + " 'amuses',\n", + " 'but',\n", + " 'none',\n", + " 'of',\n", + " 'which',\n", + " 'amounts',\n", + " 'to',\n", + " 'much',\n", + " 'of',\n", + " 'a',\n", + " 'story',\n", + " '.'],\n", + " ['this',\n", + " 'quiet',\n", + " ',',\n", + " 'introspective',\n", + " 'and',\n", + " 'entertaining',\n", + " 'independent',\n", + " 'is',\n", + " 'worth',\n", + " 'seeking',\n", + " '.'],\n", + " ['even',\n", + " 'fans',\n", + " 'of',\n", + " 'ismail',\n", + " 'merchant',\n", + " \"'s\",\n", + " 'work',\n", + " ',',\n", + " 'i',\n", + " 'suspect',\n", + " ',',\n", + " 'would',\n", + " 'have',\n", + " 'a',\n", + " 'hard',\n", + " 'time',\n", + " 'sitting',\n", + " 'through',\n", + " 'this',\n", + " 'one',\n", + " '.'],\n", + " ['a',\n", + " 'positively',\n", + " 'thrilling',\n", + " 'combination',\n", + " 'of',\n", + " 'ethnography',\n", + " 'and',\n", + " 'all',\n", + " 'the',\n", + " 'intrigue',\n", + " ',',\n", + " 'betrayal',\n", + " ',',\n", + " 'deceit',\n", + " 'and',\n", + " 'murder',\n", + " 'of',\n", + " 'a',\n", + " 'shakespearean',\n", + " 'tragedy',\n", + " 'or',\n", + " 'a',\n", + " 'juicy',\n", + " 'soap',\n", + " 'opera',\n", + " '.'],\n", + " ['aggressive',\n", + " 'self-glorification',\n", + " 'and',\n", + " 'a',\n", + " 'manipulative',\n", + " 'whitewash',\n", + " '.'],\n", + " ['a',\n", + " 'comedy-drama',\n", + " 'of',\n", + " 'nearly',\n", + " 'epic',\n", + " 'proportions',\n", + " 'rooted',\n", + " 'in',\n", + " 'a',\n", + " 'sincere',\n", + " 'performance',\n", + " 'by',\n", + " 'the',\n", + " 'title',\n", + " 'character',\n", + " 'undergoing',\n", + " 'midlife',\n", + " 'crisis',\n", + " '.'],\n", + " ['narratively',\n", + " ',',\n", + " 'trouble',\n", + " 'every',\n", + " 'day',\n", + " 'is',\n", + " 'a',\n", + " 'plodding',\n", + " 'mess',\n", + " '.'],\n", + " ['the',\n", + " 'importance',\n", + " 'of',\n", + " 'being',\n", + " 'earnest',\n", + " ',',\n", + " 'so',\n", + " 'thick',\n", + " 'with',\n", + " 'wit',\n", + " 'it',\n", + " 'plays',\n", + " 'like',\n", + " 'a',\n", + " 'reading',\n", + " 'from',\n", + " 'bartlett',\n", + " \"'s\",\n", + " 'familiar',\n", + " 'quotations'],\n", + " ['but', 'it', 'does', \"n't\", 'leave', 'you', 'with', 'much', '.'],\n", + " ['you', 'could', 'hate', 'it', 'for', 'the', 'same', 'reason', '.'],\n", + " ['there',\n", + " \"'s\",\n", + " 'little',\n", + " 'to',\n", + " 'recommend',\n", + " 'snow',\n", + " 'dogs',\n", + " ',',\n", + " 'unless',\n", + " 'one',\n", + " 'considers',\n", + " 'cliched',\n", + " 'dialogue',\n", + " 'and',\n", + " 'perverse',\n", + " 'escapism',\n", + " 'a',\n", + " 'source',\n", + " 'of',\n", + " 'high',\n", + " 'hilarity',\n", + " '.'],\n", + " ['kung',\n", + " 'pow',\n", + " 'is',\n", + " 'oedekerk',\n", + " \"'s\",\n", + " 'realization',\n", + " 'of',\n", + " 'his',\n", + " 'childhood',\n", + " 'dream',\n", + " 'to',\n", + " 'be',\n", + " 'in',\n", + " 'a',\n", + " 'martial-arts',\n", + " 'flick',\n", + " ',',\n", + " 'and',\n", + " 'proves',\n", + " 'that',\n", + " 'sometimes',\n", + " 'the',\n", + " 'dreams',\n", + " 'of',\n", + " 'youth',\n", + " 'should',\n", + " 'remain',\n", + " 'just',\n", + " 'that',\n", + " '.'],\n", + " ['the', 'performances', 'are', 'an', 'absolute', 'joy', '.'],\n", + " ['fresnadillo',\n", + " 'has',\n", + " 'something',\n", + " 'serious',\n", + " 'to',\n", + " 'say',\n", + " 'about',\n", + " 'the',\n", + " 'ways',\n", + " 'in',\n", + " 'which',\n", + " 'extravagant',\n", + " 'chance',\n", + " 'can',\n", + " 'distort',\n", + " 'our',\n", + " 'perspective',\n", + " 'and',\n", + " 'throw',\n", + " 'us',\n", + " 'off',\n", + " 'the',\n", + " 'path',\n", + " 'of',\n", + " 'good',\n", + " 'sense',\n", + " '.'],\n", + " ['i',\n", + " 'still',\n", + " 'like',\n", + " 'moonlight',\n", + " 'mile',\n", + " ',',\n", + " 'better',\n", + " 'judgment',\n", + " 'be',\n", + " 'damned',\n", + " '.'],\n", + " ['a',\n", + " 'welcome',\n", + " 'relief',\n", + " 'from',\n", + " 'baseball',\n", + " 'movies',\n", + " 'that',\n", + " 'try',\n", + " 'too',\n", + " 'hard',\n", + " 'to',\n", + " 'be',\n", + " 'mythic',\n", + " ',',\n", + " 'this',\n", + " 'one',\n", + " 'is',\n", + " 'a',\n", + " 'sweet',\n", + " 'and',\n", + " 'modest',\n", + " 'and',\n", + " 'ultimately',\n", + " 'winning',\n", + " 'story',\n", + " '.'],\n", + " ['a',\n", + " 'bilingual',\n", + " 'charmer',\n", + " ',',\n", + " 'just',\n", + " 'like',\n", + " 'the',\n", + " 'woman',\n", + " 'who',\n", + " 'inspired',\n", + " 'it'],\n", + " ['like',\n", + " 'a',\n", + " 'less',\n", + " 'dizzily',\n", + " 'gorgeous',\n", + " 'companion',\n", + " 'to',\n", + " 'mr.',\n", + " 'wong',\n", + " \"'s\",\n", + " 'in',\n", + " 'the',\n", + " 'mood',\n", + " 'for',\n", + " 'love',\n", + " '--',\n", + " 'very',\n", + " 'much',\n", + " 'a',\n", + " 'hong',\n", + " 'kong',\n", + " 'movie',\n", + " 'despite',\n", + " 'its',\n", + " 'mainland',\n", + " 'setting',\n", + " '.'],\n", + " ['as',\n", + " 'inept',\n", + " 'as',\n", + " 'big-screen',\n", + " 'remakes',\n", + " 'of',\n", + " 'the',\n", + " 'avengers',\n", + " 'and',\n", + " 'the',\n", + " 'wild',\n", + " 'wild',\n", + " 'west',\n", + " '.'],\n", + " ['it',\n", + " \"'s\",\n", + " 'everything',\n", + " 'you',\n", + " \"'d\",\n", + " 'expect',\n", + " '--',\n", + " 'but',\n", + " 'nothing',\n", + " 'more',\n", + " '.'],\n", + " ['best', 'indie', 'of', 'the', 'year', ',', 'so', 'far', '.'],\n", + " ['hatfield',\n", + " 'and',\n", + " 'hicks',\n", + " 'make',\n", + " 'the',\n", + " 'oddest',\n", + " 'of',\n", + " 'couples',\n", + " ',',\n", + " 'and',\n", + " 'in',\n", + " 'this',\n", + " 'sense',\n", + " 'the',\n", + " 'movie',\n", + " 'becomes',\n", + " 'a',\n", + " 'study',\n", + " 'of',\n", + " 'the',\n", + " 'gambles',\n", + " 'of',\n", + " 'the',\n", + " 'publishing',\n", + " 'world',\n", + " ',',\n", + " 'offering',\n", + " 'a',\n", + " 'case',\n", + " 'study',\n", + " 'that',\n", + " 'exists',\n", + " 'apart',\n", + " 'from',\n", + " 'all',\n", + " 'the',\n", + " 'movie',\n", + " \"'s\",\n", + " 'political',\n", + " 'ramifications',\n", + " '.'],\n", + " ['it',\n", + " \"'s\",\n", + " 'like',\n", + " 'going',\n", + " 'to',\n", + " 'a',\n", + " 'house',\n", + " 'party',\n", + " 'and',\n", + " 'watching',\n", + " 'the',\n", + " 'host',\n", + " 'defend',\n", + " 'himself',\n", + " 'against',\n", + " 'a',\n", + " 'frothing',\n", + " 'ex-girlfriend',\n", + " '.'],\n", + " ['that',\n", + " 'the',\n", + " 'chuck',\n", + " 'norris',\n", + " '``',\n", + " 'grenade',\n", + " 'gag',\n", + " \"''\",\n", + " 'occurs',\n", + " 'about',\n", + " '7',\n", + " 'times',\n", + " 'during',\n", + " 'windtalkers',\n", + " 'is',\n", + " 'a',\n", + " 'good',\n", + " 'indication',\n", + " 'of',\n", + " 'how',\n", + " 'serious-minded',\n", + " 'the',\n", + " 'film',\n", + " 'is',\n", + " '.'],\n", + " ['the',\n", + " 'plot',\n", + " 'is',\n", + " 'romantic',\n", + " 'comedy',\n", + " 'boilerplate',\n", + " 'from',\n", + " 'start',\n", + " 'to',\n", + " 'finish',\n", + " '.'],\n", + " ['it',\n", + " 'arrives',\n", + " 'with',\n", + " 'an',\n", + " 'impeccable',\n", + " 'pedigree',\n", + " ',',\n", + " 'mongrel',\n", + " 'pep',\n", + " ',',\n", + " 'and',\n", + " 'almost',\n", + " 'indecipherable',\n", + " 'plot',\n", + " 'complications',\n", + " '.'],\n", + " ['a',\n", + " 'film',\n", + " 'that',\n", + " 'clearly',\n", + " 'means',\n", + " 'to',\n", + " 'preach',\n", + " 'exclusively',\n", + " 'to',\n", + " 'the',\n", + " 'converted',\n", + " '.'],\n", + " ['while',\n", + " 'the',\n", + " 'importance',\n", + " 'of',\n", + " 'being',\n", + " 'earnest',\n", + " 'offers',\n", + " 'opportunities',\n", + " 'for',\n", + " 'occasional',\n", + " 'smiles',\n", + " 'and',\n", + " 'chuckles',\n", + " ',',\n", + " 'it',\n", + " 'does',\n", + " \"n't\",\n", + " 'give',\n", + " 'us',\n", + " 'a',\n", + " 'reason',\n", + " 'to',\n", + " 'be',\n", + " 'in',\n", + " 'the',\n", + " 'theater',\n", + " 'beyond',\n", + " 'wilde',\n", + " \"'s\",\n", + " 'wit',\n", + " 'and',\n", + " 'the',\n", + " 'actors',\n", + " \"'\",\n", + " 'performances',\n", + " '.'],\n", + " ['the',\n", + " 'latest',\n", + " 'vapid',\n", + " 'actor',\n", + " \"'s\",\n", + " 'exercise',\n", + " 'to',\n", + " 'appropriate',\n", + " 'the',\n", + " 'structure',\n", + " 'of',\n", + " 'arthur',\n", + " 'schnitzler',\n", + " \"'s\",\n", + " 'reigen',\n", + " '.'],\n", + " ['more',\n", + " 'vaudeville',\n", + " 'show',\n", + " 'than',\n", + " 'well-constructed',\n", + " 'narrative',\n", + " ',',\n", + " 'but',\n", + " 'on',\n", + " 'those',\n", + " 'terms',\n", + " 'it',\n", + " \"'s\",\n", + " 'inoffensive',\n", + " 'and',\n", + " 'actually',\n", + " 'rather',\n", + " 'sweet',\n", + " '.'],\n", + " ['nothing', 'more', 'than', 'a', 'run-of-the-mill', 'action', 'flick', '.'],\n", + " ['hampered',\n", + " '--',\n", + " 'no',\n", + " ',',\n", + " 'paralyzed',\n", + " '--',\n", + " 'by',\n", + " 'a',\n", + " 'self-indulgent',\n", + " 'script',\n", + " '...',\n", + " 'that',\n", + " 'aims',\n", + " 'for',\n", + " 'poetry',\n", + " 'and',\n", + " 'ends',\n", + " 'up',\n", + " 'sounding',\n", + " 'like',\n", + " 'satire',\n", + " '.'],\n", + " ['ice',\n", + " 'age',\n", + " 'is',\n", + " 'the',\n", + " 'first',\n", + " 'computer-generated',\n", + " 'feature',\n", + " 'cartoon',\n", + " 'to',\n", + " 'feel',\n", + " 'like',\n", + " 'other',\n", + " 'movies',\n", + " ',',\n", + " 'and',\n", + " 'that',\n", + " 'makes',\n", + " 'for',\n", + " 'some',\n", + " 'glacial',\n", + " 'pacing',\n", + " 'early',\n", + " 'on',\n", + " '.'],\n", + " ['there',\n", + " \"'s\",\n", + " 'very',\n", + " 'little',\n", + " 'sense',\n", + " 'to',\n", + " 'what',\n", + " \"'s\",\n", + " 'going',\n", + " 'on',\n", + " 'here',\n", + " ',',\n", + " 'but',\n", + " 'the',\n", + " 'makers',\n", + " 'serve',\n", + " 'up',\n", + " 'the',\n", + " 'cliches',\n", + " 'with',\n", + " 'considerable',\n", + " 'dash',\n", + " '.'],\n", + " ['cattaneo',\n", + " 'should',\n", + " 'have',\n", + " 'followed',\n", + " 'the',\n", + " 'runaway',\n", + " 'success',\n", + " 'of',\n", + " 'his',\n", + " 'first',\n", + " 'film',\n", + " ',',\n", + " 'the',\n", + " 'full',\n", + " 'monty',\n", + " ',',\n", + " 'with',\n", + " 'something',\n", + " 'different',\n", + " '.'],\n", + " ['they',\n", + " \"'re\",\n", + " 'the',\n", + " 'unnamed',\n", + " ',',\n", + " 'easily',\n", + " 'substitutable',\n", + " 'forces',\n", + " 'that',\n", + " 'serve',\n", + " 'as',\n", + " 'whatever',\n", + " 'terror',\n", + " 'the',\n", + " 'heroes',\n", + " 'of',\n", + " 'horror',\n", + " 'movies',\n", + " 'try',\n", + " 'to',\n", + " 'avoid',\n", + " '.'],\n", + " ['it',\n", + " 'almost',\n", + " 'feels',\n", + " 'as',\n", + " 'if',\n", + " 'the',\n", + " 'movie',\n", + " 'is',\n", + " 'more',\n", + " 'interested',\n", + " 'in',\n", + " 'entertaining',\n", + " 'itself',\n", + " 'than',\n", + " 'in',\n", + " 'amusing',\n", + " 'us',\n", + " '.'],\n", + " ['the',\n", + " 'movie',\n", + " \"'s\",\n", + " 'progression',\n", + " 'into',\n", + " 'rambling',\n", + " 'incoherence',\n", + " 'gives',\n", + " 'new',\n", + " 'meaning',\n", + " 'to',\n", + " 'the',\n", + " 'phrase',\n", + " '`',\n", + " 'fatal',\n", + " 'script',\n", + " 'error',\n", + " '.',\n", + " \"'\"],\n", + " ['i',\n", + " 'still',\n", + " 'like',\n", + " 'moonlight',\n", + " 'mile',\n", + " ',',\n", + " 'better',\n", + " 'judgment',\n", + " 'be',\n", + " 'damned',\n", + " '.'],\n", + " ['a',\n", + " 'welcome',\n", + " 'relief',\n", + " 'from',\n", + " 'baseball',\n", + " 'movies',\n", + " 'that',\n", + " 'try',\n", + " 'too',\n", + " 'hard',\n", + " 'to',\n", + " 'be',\n", + " 'mythic',\n", + " ',',\n", + " 'this',\n", + " 'one',\n", + " 'is',\n", + " 'a',\n", + " 'sweet',\n", + " 'and',\n", + " 'modest',\n", + " 'and',\n", + " 'ultimately',\n", + " 'winning',\n", + " 'story',\n", + " '.'],\n", + " ['a',\n", + " 'bilingual',\n", + " 'charmer',\n", + " ',',\n", + " 'just',\n", + " 'like',\n", + " 'the',\n", + " 'woman',\n", + " 'who',\n", + " 'inspired',\n", + " 'it'],\n", + " ['like',\n", + " 'a',\n", + " 'less',\n", + " 'dizzily',\n", + " 'gorgeous',\n", + " 'companion',\n", + " 'to',\n", + " 'mr.',\n", + " 'wong',\n", + " \"'s\",\n", + " 'in',\n", + " 'the',\n", + " 'mood',\n", + " 'for',\n", + " 'love',\n", + " '--',\n", + " 'very',\n", + " 'much',\n", + " 'a',\n", + " 'hong',\n", + " 'kong',\n", + " 'movie',\n", + " 'despite',\n", + " 'its',\n", + " 'mainland',\n", + " 'setting',\n", + " '.'],\n", + " ['as',\n", + " 'inept',\n", + " 'as',\n", + " 'big-screen',\n", + " 'remakes',\n", + " 'of',\n", + " 'the',\n", + " 'avengers',\n", + " 'and',\n", + " 'the',\n", + " 'wild',\n", + " 'wild',\n", + " 'west',\n", + " '.'],\n", + " ['it',\n", + " \"'s\",\n", + " 'everything',\n", + " 'you',\n", + " \"'d\",\n", + " 'expect',\n", + " '--',\n", + " 'but',\n", + " 'nothing',\n", + " 'more',\n", + " '.'],\n", + " ['best', 'indie', 'of', 'the', 'year', ',', 'so', 'far', '.'],\n", + " ['hatfield',\n", + " 'and',\n", + " 'hicks',\n", + " 'make',\n", + " 'the',\n", + " 'oddest',\n", + " 'of',\n", + " 'couples',\n", + " ',',\n", + " 'and',\n", + " 'in',\n", + " 'this',\n", + " 'sense',\n", + " 'the',\n", + " 'movie',\n", + " 'becomes',\n", + " 'a',\n", + " 'study',\n", + " 'of',\n", + " 'the',\n", + " 'gambles',\n", + " 'of',\n", + " 'the',\n", + " 'publishing',\n", + " 'world',\n", + " ',',\n", + " 'offering',\n", + " 'a',\n", + " 'case',\n", + " 'study',\n", + " 'that',\n", + " 'exists',\n", + " 'apart',\n", + " 'from',\n", + " 'all',\n", + " 'the',\n", + " 'movie',\n", + " \"'s\",\n", + " 'political',\n", + " 'ramifications',\n", + " '.'],\n", + " ['it',\n", + " \"'s\",\n", + " 'like',\n", + " 'going',\n", + " 'to',\n", + " 'a',\n", + " 'house',\n", + " 'party',\n", + " 'and',\n", + " 'watching',\n", + " 'the',\n", + " 'host',\n", + " 'defend',\n", + " 'himself',\n", + " 'against',\n", + " 'a',\n", + " 'frothing',\n", + " 'ex-girlfriend',\n", + " '.'],\n", + " ['that',\n", + " 'the',\n", + " 'chuck',\n", + " 'norris',\n", + " '``',\n", + " 'grenade',\n", + " 'gag',\n", + " \"''\",\n", + " 'occurs',\n", + " 'about',\n", + " '7',\n", + " 'times',\n", + " 'during',\n", + " 'windtalkers',\n", + " 'is',\n", + " 'a',\n", + " 'good',\n", + " 'indication',\n", + " 'of',\n", + " 'how',\n", + " 'serious-minded',\n", + " 'the',\n", + " 'film',\n", + " 'is',\n", + " '.'],\n", + " ['the',\n", + " 'plot',\n", + " 'is',\n", + " 'romantic',\n", + " 'comedy',\n", + " 'boilerplate',\n", + " 'from',\n", + " 'start',\n", + " 'to',\n", + " 'finish',\n", + " '.'],\n", + " ['it',\n", + " 'arrives',\n", + " 'with',\n", + " 'an',\n", + " 'impeccable',\n", + " 'pedigree',\n", + " ',',\n", + " 'mongrel',\n", + " 'pep',\n", + " ',',\n", + " 'and',\n", + " 'almost',\n", + " 'indecipherable',\n", + " 'plot',\n", + " 'complications',\n", + " '.'],\n", + " ['a',\n", + " 'film',\n", + " 'that',\n", + " 'clearly',\n", + " 'means',\n", + " 'to',\n", + " 'preach',\n", + " 'exclusively',\n", + " 'to',\n", + " 'the',\n", + " 'converted',\n", + " '.'],\n", + " ['i',\n", + " 'still',\n", + " 'like',\n", + " 'moonlight',\n", + " 'mile',\n", + " ',',\n", + " 'better',\n", + " 'judgment',\n", + " 'be',\n", + " 'damned',\n", + " '.'],\n", + " ['a',\n", + " 'welcome',\n", + " 'relief',\n", + " 'from',\n", + " 'baseball',\n", + " 'movies',\n", + " 'that',\n", + " 'try',\n", + " 'too',\n", + " 'hard',\n", + " 'to',\n", + " 'be',\n", + " 'mythic',\n", + " ',',\n", + " 'this',\n", + " 'one',\n", + " 'is',\n", + " 'a',\n", + " 'sweet',\n", + " 'and',\n", + " 'modest',\n", + " 'and',\n", + " 'ultimately',\n", + " 'winning',\n", + " 'story',\n", + " '.'],\n", + " ['a',\n", + " 'bilingual',\n", + " 'charmer',\n", + " ',',\n", + " 'just',\n", + " 'like',\n", + " 'the',\n", + " 'woman',\n", + " 'who',\n", + " 'inspired',\n", + " 'it'],\n", + " ['like',\n", + " 'a',\n", + " 'less',\n", + " 'dizzily',\n", + " 'gorgeous',\n", + " 'companion',\n", + " 'to',\n", + " 'mr.',\n", + " 'wong',\n", + " \"'s\",\n", + " 'in',\n", + " 'the',\n", + " 'mood',\n", + " 'for',\n", + " 'love',\n", + " '--',\n", + " 'very',\n", + " 'much',\n", + " 'a',\n", + " 'hong',\n", + " 'kong',\n", + " 'movie',\n", + " 'despite',\n", + " 'its',\n", + " 'mainland',\n", + " 'setting',\n", + " '.'],\n", + " ['as',\n", + " 'inept',\n", + " 'as',\n", + " 'big-screen',\n", + " 'remakes',\n", + " 'of',\n", + " 'the',\n", + " 'avengers',\n", + " 'and',\n", + " 'the',\n", + " 'wild',\n", + " 'wild',\n", + " 'west',\n", + " '.'],\n", + " ['it',\n", + " \"'s\",\n", + " 'everything',\n", + " 'you',\n", + " \"'d\",\n", + " 'expect',\n", + " '--',\n", + " 'but',\n", + " 'nothing',\n", + " 'more',\n", + " '.'],\n", + " ['best', 'indie', 'of', 'the', 'year', ',', 'so', 'far', '.'],\n", + " ['hatfield',\n", + " 'and',\n", + " 'hicks',\n", + " 'make',\n", + " 'the',\n", + " 'oddest',\n", + " 'of',\n", + " 'couples',\n", + " ',',\n", + " 'and',\n", + " 'in',\n", + " 'this',\n", + " 'sense',\n", + " 'the',\n", + " 'movie',\n", + " 'becomes',\n", + " 'a',\n", + " 'study',\n", + " 'of',\n", + " 'the',\n", + " 'gambles',\n", + " 'of',\n", + " 'the',\n", + " 'publishing',\n", + " 'world',\n", + " ',',\n", + " 'offering',\n", + " 'a',\n", + " 'case',\n", + " 'study',\n", + " 'that',\n", + " 'exists',\n", + " 'apart',\n", + " 'from',\n", + " 'all',\n", + " 'the',\n", + " 'movie',\n", + " \"'s\",\n", + " 'political',\n", + " 'ramifications',\n", + " '.'],\n", + " ['it',\n", + " \"'s\",\n", + " 'like',\n", + " 'going',\n", + " 'to',\n", + " 'a',\n", + " 'house',\n", + " 'party',\n", + " 'and',\n", + " 'watching',\n", + " 'the',\n", + " 'host',\n", + " 'defend',\n", + " 'himself',\n", + " 'against',\n", + " 'a',\n", + " 'frothing',\n", + " 'ex-girlfriend',\n", + " '.'],\n", + " ['that',\n", + " 'the',\n", + " 'chuck',\n", + " 'norris',\n", + " '``',\n", + " 'grenade',\n", + " 'gag',\n", + " \"''\",\n", + " 'occurs',\n", + " 'about',\n", + " '7',\n", + " 'times',\n", + " 'during',\n", + " 'windtalkers',\n", + " 'is',\n", + " 'a',\n", + " 'good',\n", + " 'indication',\n", + " 'of',\n", + " 'how',\n", + " 'serious-minded',\n", + " 'the',\n", + " 'film',\n", + " 'is',\n", + " '.'],\n", + " ['the',\n", + " 'plot',\n", + " 'is',\n", + " 'romantic',\n", + " 'comedy',\n", + " 'boilerplate',\n", + " 'from',\n", + " 'start',\n", + " 'to',\n", + " 'finish',\n", + " '.'],\n", + " ['it',\n", + " 'arrives',\n", + " 'with',\n", + " 'an',\n", + " 'impeccable',\n", + " 'pedigree',\n", + " ',',\n", + " 'mongrel',\n", + " 'pep',\n", + " ',',\n", + " 'and',\n", + " 'almost',\n", + " 'indecipherable',\n", + " 'plot',\n", + " 'complications',\n", + " '.'],\n", + " ['a',\n", + " 'film',\n", + " 'that',\n", + " 'clearly',\n", + " 'means',\n", + " 'to',\n", + " 'preach',\n", + " 'exclusively',\n", + " 'to',\n", + " 'the',\n", + " 'converted',\n", + " '.'],\n", + " ['i',\n", + " 'still',\n", + " 'like',\n", + " 'moonlight',\n", + " 'mile',\n", + " ',',\n", + " 'better',\n", + " 'judgment',\n", + " 'be',\n", + " 'damned',\n", + " '.'],\n", + " ['a',\n", + " 'welcome',\n", + " 'relief',\n", + " 'from',\n", + " 'baseball',\n", + " 'movies',\n", + " 'that',\n", + " 'try',\n", + " 'too',\n", + " 'hard',\n", + " 'to',\n", + " 'be',\n", + " 'mythic',\n", + " ',',\n", + " 'this',\n", + " 'one',\n", + " 'is',\n", + " 'a',\n", + " 'sweet',\n", + " 'and',\n", + " 'modest',\n", + " 'and',\n", + " 'ultimately',\n", + " 'winning',\n", + " 'story',\n", + " '.'],\n", + " ['a',\n", + " 'bilingual',\n", + " 'charmer',\n", + " ',',\n", + " 'just',\n", + " 'like',\n", + " 'the',\n", + " 'woman',\n", + " 'who',\n", + " 'inspired',\n", + " 'it'],\n", + " ['like',\n", + " 'a',\n", + " 'less',\n", + " 'dizzily',\n", + " 'gorgeous',\n", + " 'companion',\n", + " 'to',\n", + " 'mr.',\n", + " 'wong',\n", + " \"'s\",\n", + " 'in',\n", + " 'the',\n", + " 'mood',\n", + " 'for',\n", + " 'love',\n", + " '--',\n", + " 'very',\n", + " 'much',\n", + " 'a',\n", + " 'hong',\n", + " 'kong',\n", + " 'movie',\n", + " 'despite',\n", + " 'its',\n", + " 'mainland',\n", + " 'setting',\n", + " '.'],\n", + " ['as',\n", + " 'inept',\n", + " 'as',\n", + " 'big-screen',\n", + " 'remakes',\n", + " 'of',\n", + " 'the',\n", + " 'avengers',\n", + " 'and',\n", + " 'the',\n", + " 'wild',\n", + " 'wild',\n", + " 'west',\n", + " '.'],\n", + " ['it',\n", + " \"'s\",\n", + " 'everything',\n", + " 'you',\n", + " \"'d\",\n", + " 'expect',\n", + " '--',\n", + " 'but',\n", + " 'nothing',\n", + " 'more',\n", + " '.'],\n", + " ['best', 'indie', 'of', 'the', 'year', ',', 'so', 'far', '.'],\n", + " ['hatfield',\n", + " 'and',\n", + " 'hicks',\n", + " 'make',\n", + " 'the',\n", + " 'oddest',\n", + " 'of',\n", + " 'couples',\n", + " ',',\n", + " 'and',\n", + " 'in',\n", + " 'this',\n", + " 'sense',\n", + " 'the',\n", + " 'movie',\n", + " 'becomes',\n", + " 'a',\n", + " 'study',\n", + " 'of',\n", + " 'the',\n", + " 'gambles',\n", + " 'of',\n", + " 'the',\n", + " 'publishing',\n", + " 'world',\n", + " ',',\n", + " 'offering',\n", + " 'a',\n", + " 'case',\n", + " 'study',\n", + " 'that',\n", + " 'exists',\n", + " 'apart',\n", + " 'from',\n", + " 'all',\n", + " 'the',\n", + " 'movie',\n", + " \"'s\",\n", + " 'political',\n", + " 'ramifications',\n", + " '.'],\n", + " ['it',\n", + " \"'s\",\n", + " 'like',\n", + " 'going',\n", + " 'to',\n", + " 'a',\n", + " 'house',\n", + " 'party',\n", + " 'and',\n", + " 'watching',\n", + " 'the',\n", + " 'host',\n", + " 'defend',\n", + " 'himself',\n", + " 'against',\n", + " 'a',\n", + " 'frothing',\n", + " 'ex-girlfriend',\n", + " '.'],\n", + " ['that',\n", + " 'the',\n", + " 'chuck',\n", + " 'norris',\n", + " '``',\n", + " 'grenade',\n", + " 'gag',\n", + " \"''\",\n", + " 'occurs',\n", + " 'about',\n", + " '7',\n", + " 'times',\n", + " 'during',\n", + " 'windtalkers',\n", + " 'is',\n", + " 'a',\n", + " 'good',\n", + " 'indication',\n", + " 'of',\n", + " 'how',\n", + " 'serious-minded',\n", + " 'the',\n", + " 'film',\n", + " 'is',\n", + " '.'],\n", + " ['the',\n", + " 'plot',\n", + " 'is',\n", + " 'romantic',\n", + " 'comedy',\n", + " 'boilerplate',\n", + " 'from',\n", + " 'start',\n", + " 'to',\n", + " 'finish',\n", + " '.'],\n", + " ['it',\n", + " 'arrives',\n", + " 'with',\n", + " 'an',\n", + " 'impeccable',\n", + " 'pedigree',\n", + " ',',\n", + " 'mongrel',\n", + " 'pep',\n", + " ',',\n", + " 'and',\n", + " 'almost',\n", + " 'indecipherable',\n", + " 'plot',\n", + " 'complications',\n", + " '.'],\n", + " ['a',\n", + " 'film',\n", + " 'that',\n", + " 'clearly',\n", + " 'means',\n", + " 'to',\n", + " 'preach',\n", + " 'exclusively',\n", + " 'to',\n", + " 'the',\n", + " 'converted',\n", + " '.']]" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# 将所有数字转为小写\n", "ds.apply(lambda x: x['raw_sentence'].lower(), new_field_name='raw_sentence')\n", @@ -114,7 +1465,192 @@ "cell_type": "code", "execution_count": 5, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[[120, 121, 6, 2, 122, 5, 72, 123, 3],\n", + " [14,\n", + " 4,\n", + " 152,\n", + " 153,\n", + " 154,\n", + " 155,\n", + " 8,\n", + " 156,\n", + " 157,\n", + " 9,\n", + " 16,\n", + " 2,\n", + " 158,\n", + " 21,\n", + " 159,\n", + " 30,\n", + " 98,\n", + " 57,\n", + " 4,\n", + " 160,\n", + " 161,\n", + " 13,\n", + " 162,\n", + " 163,\n", + " 164,\n", + " 165,\n", + " 3],\n", + " [4,\n", + " 112,\n", + " 113,\n", + " 15,\n", + " 114,\n", + " 35,\n", + " 10,\n", + " 68,\n", + " 115,\n", + " 69,\n", + " 8,\n", + " 23,\n", + " 116,\n", + " 5,\n", + " 18,\n", + " 36,\n", + " 11,\n", + " 4,\n", + " 70,\n", + " 7,\n", + " 117,\n", + " 7,\n", + " 118,\n", + " 119,\n", + " 71,\n", + " 3],\n", + " [4, 1, 1, 5, 138, 14, 2, 1, 1, 1, 12],\n", + " [2, 27, 11, 139, 140, 141, 15, 142, 8, 143, 3],\n", + " [12, 9, 14, 32, 8, 4, 59, 60, 7, 61, 2, 62, 63, 64, 65, 4, 66, 67, 3],\n", + " [97, 145, 14, 146, 147, 5, 148, 149, 23, 150, 3],\n", + " [4, 1, 1, 5, 138, 14, 2, 1, 1, 1, 12],\n", + " [4, 1, 1, 5, 138, 14, 2, 1, 1, 1, 12],\n", + " [14,\n", + " 4,\n", + " 152,\n", + " 153,\n", + " 154,\n", + " 155,\n", + " 8,\n", + " 156,\n", + " 157,\n", + " 9,\n", + " 16,\n", + " 2,\n", + " 158,\n", + " 21,\n", + " 159,\n", + " 30,\n", + " 98,\n", + " 57,\n", + " 4,\n", + " 160,\n", + " 161,\n", + " 13,\n", + " 162,\n", + " 163,\n", + " 164,\n", + " 165,\n", + " 3],\n", + " [10,\n", + " 2,\n", + " 82,\n", + " 83,\n", + " 84,\n", + " 85,\n", + " 86,\n", + " 87,\n", + " 88,\n", + " 89,\n", + " 90,\n", + " 91,\n", + " 92,\n", + " 93,\n", + " 11,\n", + " 4,\n", + " 28,\n", + " 94,\n", + " 6,\n", + " 95,\n", + " 96,\n", + " 2,\n", + " 17,\n", + " 11,\n", + " 3],\n", + " [12, 73, 20, 33, 74, 75, 5, 76, 77, 5, 7, 78, 79, 27, 80, 3],\n", + " [12, 78, 1, 24, 1, 2, 13, 11, 31, 1, 16, 1, 1, 133, 16, 1, 1, 3],\n", + " [24, 107, 24, 108, 109, 6, 2, 110, 7, 2, 34, 34, 111, 3],\n", + " [2, 27, 11, 139, 140, 141, 15, 142, 8, 143, 3],\n", + " [24, 107, 24, 108, 109, 6, 2, 110, 7, 2, 34, 34, 111, 3],\n", + " [97, 145, 14, 146, 147, 5, 148, 149, 23, 150, 3],\n", + " [4,\n", + " 112,\n", + " 113,\n", + " 15,\n", + " 114,\n", + " 35,\n", + " 10,\n", + " 68,\n", + " 115,\n", + " 69,\n", + " 8,\n", + " 23,\n", + " 116,\n", + " 5,\n", + " 18,\n", + " 36,\n", + " 11,\n", + " 4,\n", + " 70,\n", + " 7,\n", + " 117,\n", + " 7,\n", + " 118,\n", + " 119,\n", + " 71,\n", + " 3],\n", + " [12, 9, 99, 29, 100, 101, 30, 22, 58, 31, 3],\n", + " [12, 9, 99, 29, 100, 101, 30, 22, 58, 31, 3],\n", + " [120, 121, 6, 2, 122, 5, 72, 123, 3],\n", + " [1, 30, 1, 5, 1, 30, 1, 4, 1, 1, 1, 10, 1, 21, 1, 7, 1, 1, 1, 14, 1, 3],\n", + " [1,\n", + " 1,\n", + " 1,\n", + " 1,\n", + " 8,\n", + " 1,\n", + " 89,\n", + " 2,\n", + " 1,\n", + " 16,\n", + " 151,\n", + " 1,\n", + " 1,\n", + " 1,\n", + " 1,\n", + " 1,\n", + " 1,\n", + " 7,\n", + " 1,\n", + " 1,\n", + " 1,\n", + " 2,\n", + " 1,\n", + " 6,\n", + " 28,\n", + " 25,\n", + " 3]]" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "from fastNLP import Vocabulary\n", "vocab = Vocabulary(min_freq=2)\n", @@ -140,7 +1676,7 @@ "outputs": [], "source": [ "from fastNLP.models import CNNText\n", - "model = CNNText(embed_num=len(vocab), embed_dim=50, num_classes=5, padding=2, dropout=0.1)\n" + "model = CNNText((len(vocab),50), num_classes=5, padding=2, dropout=0.1)\n" ] }, { @@ -162,38 +1698,29 @@ "text": [ "input fields after batch(if batch size is 2):\n", "\twords: (1)type:numpy.ndarray (2)dtype:object, (3)shape:(2,) \n", - "\tword_seq: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 25]) \n", + "\tword_seq: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2, 11]) \n", "target fields after batch(if batch size is 2):\n", "\tlabel_seq: (1)type:torch.Tensor (2)dtype:torch.int64, (3)shape:torch.Size([2]) \n", - "\n", - "training epochs started 2019-01-12 17-00-48\n" + "\n" ] }, { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "23979df0f63e446fbb0406b919b91dd3", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(IntProgress(value=0, layout=Layout(flex='2'), max=6), HTML(value='')), layout=Layout(display='i…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Evaluation at Epoch 1/3. Step:2/6. AccuracyMetric: acc=0.173913\n", - "Evaluation at Epoch 2/3. Step:4/6. AccuracyMetric: acc=0.26087\n", - "Evaluation at Epoch 3/3. Step:6/6. AccuracyMetric: acc=0.304348\n", - "\n", - "In Epoch:3/Step:6, got best dev performance:AccuracyMetric: acc=0.304348\n", - "Reloaded the best model.\n", - "Train finished!\n" + "ename": "AttributeError", + "evalue": "'numpy.ndarray' object has no attribute 'contiguous'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mdev_data\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mdev_data\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mloss\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mCrossEntropyLoss\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mmetrics\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mAccuracyMetric\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m )\n\u001b[1;32m 8\u001b[0m \u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, train_data, model, optimizer, loss, batch_size, sampler, update_every, n_epochs, print_every, dev_data, metrics, metric_key, validate_every, save_path, prefetch, use_tqdm, device, callbacks, check_code_level)\u001b[0m\n\u001b[1;32m 447\u001b[0m _check_code(dataset=train_data, model=model, losser=losser, metrics=metrics, dev_data=dev_data,\n\u001b[1;32m 448\u001b[0m \u001b[0mmetric_key\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmetric_key\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcheck_level\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcheck_code_level\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 449\u001b[0;31m batch_size=min(batch_size, DEFAULT_CHECK_BATCH_SIZE))\n\u001b[0m\u001b[1;32m 450\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 451\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain_data\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/core/trainer.py\u001b[0m in \u001b[0;36m_check_code\u001b[0;34m(dataset, model, losser, metrics, batch_size, dev_data, metric_key, check_level)\u001b[0m\n\u001b[1;32m 811\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 812\u001b[0m \u001b[0mrefined_batch_x\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_build_args\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mbatch_x\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 813\u001b[0;31m \u001b[0mpred_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mrefined_batch_x\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 814\u001b[0m \u001b[0mfunc_signature\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_get_func_signature\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 815\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpred_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 489\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 490\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 492\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 493\u001b[0m \u001b[0mhook_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/models/cnn_text_classification.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, words, seq_len)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[0;34m:\u001b[0m\u001b[0;32mreturn\u001b[0m \u001b[0moutput\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mdict\u001b[0m \u001b[0mof\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mLongTensor\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_classes\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 59\u001b[0m \"\"\"\n\u001b[0;32m---> 60\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0membed\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mwords\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# [N,L] -> [N,L,C]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 61\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconv_pool\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# [N,L,C] -> [N,C]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 62\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdropout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 489\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 490\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 492\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 493\u001b[0m \u001b[0mhook_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/fastNLP/modules/encoder/embedding.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 33\u001b[0m \u001b[0;34m:\u001b[0m\u001b[0;32mreturn\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTensor\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mseq_len\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0membed_dim\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 34\u001b[0m \"\"\"\n\u001b[0;32m---> 35\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 36\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdropout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/modules/sparse.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 106\u001b[0m return F.embedding(\n\u001b[1;32m 107\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mweight\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpadding_idx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmax_norm\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 108\u001b[0;31m self.norm_type, self.scale_grad_by_freq, self.sparse)\n\u001b[0m\u001b[1;32m 109\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 110\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mextra_repr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/Users/fdujyn/anaconda3/lib/python3.6/site-packages/torch/nn/functional.py\u001b[0m in \u001b[0;36membedding\u001b[0;34m(input, weight, padding_idx, max_norm, norm_type, scale_grad_by_freq, sparse)\u001b[0m\n\u001b[1;32m 1062\u001b[0m [ 0.6262, 0.2438, 0.7471]]])\n\u001b[1;32m 1063\u001b[0m \"\"\"\n\u001b[0;32m-> 1064\u001b[0;31m \u001b[0minput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcontiguous\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1065\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mpadding_idx\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1066\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mpadding_idx\u001b[0m \u001b[0;34m>\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'numpy.ndarray' object has no attribute 'contiguous'" ] } ], diff --git a/tutorials/fastnlp_test_tutorial.ipynb b/tutorials/fastnlp_test_tutorial.ipynb index 9b0c1b2e..fb87606e 100644 --- a/tutorials/fastnlp_test_tutorial.ipynb +++ b/tutorials/fastnlp_test_tutorial.ipynb @@ -89,7 +89,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.4" + "version": "3.6.7" } }, "nbformat": 4,